추론실행 shp파일 생성 jar

This commit is contained in:
2026-01-16 16:56:07 +09:00
parent 53a07da4b1
commit c56259ad80
14 changed files with 310 additions and 12 deletions

BIN
jar/makeshp-1.0.0.jar Normal file

Binary file not shown.

View File

@@ -1,10 +1,13 @@
package com.kamco.cd.kamcoback.common.api;
import com.kamco.cd.kamcoback.common.api.HelloDto.Res;
import com.kamco.cd.kamcoback.common.service.ExternalJarRunner;
import com.kamco.cd.kamcoback.common.service.HelloService;
import io.swagger.v3.oas.annotations.Parameter;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@RequiredArgsConstructor
@@ -13,6 +16,7 @@ import org.springframework.web.bind.annotation.RestController;
public class HelloApiController {
private final HelloService helloService;
private final ExternalJarRunner externalJarRunner;
@GetMapping
public HelloDto.Res hello(HelloDto.Req req) {
@@ -22,4 +26,16 @@ public class HelloApiController {
return res;
}
@GetMapping("/shp")
public void shp(
@Parameter(description = "jar 경로", example = "jar/makesample-1.0.0.jar") @RequestParam
String jarPath,
@Parameter(description = "batchIds", example = "252,253,257") @RequestParam String batchIds,
@Parameter(description = "32길이 문자열 값", example = "") @RequestParam(required = false)
String inferenceId,
@Parameter(description = "5K 도엽번호", example = "") @RequestParam(required = false)
String mapIds) {
externalJarRunner.run(jarPath, batchIds, inferenceId, mapIds);
}
}

View File

@@ -0,0 +1,68 @@
package com.kamco.cd.kamcoback.common.service;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.stereotype.Component;
@Log4j2
@Component
@RequiredArgsConstructor
public class ExternalJarRunner {
public void run(String jarPath, String batchIds, String inferenceId, String mapIds) {
StringBuilder out = new StringBuilder();
try {
List<String> cmd = new ArrayList<>();
cmd.add("java");
cmd.add("-jar");
cmd.add(jarPath);
if (inferenceId != null && !inferenceId.isBlank()) {
cmd.add("--converter.inference-id=" + inferenceId);
}
if (mapIds != null && !mapIds.isBlank()) {
cmd.add("--converter.map-ids=" + mapIds);
}
if (batchIds != null && !batchIds.isBlank()) {
cmd.add("--converter.batch-ids=" + batchIds);
}
ProcessBuilder pb = new ProcessBuilder(cmd);
pb.redirectErrorStream(true);
Process p = pb.start();
try (BufferedReader br =
new BufferedReader(new InputStreamReader(p.getInputStream(), StandardCharsets.UTF_8))) {
String line;
while ((line = br.readLine()) != null) {
out.append(line).append('\n');
log.info("[jar] {}", line);
}
}
boolean finished = p.waitFor(30, TimeUnit.MINUTES);
if (!finished) {
p.destroyForcibly();
throw new RuntimeException("jar timeout\n" + out);
}
int exit = p.exitValue();
if (exit != 0) {
throw new RuntimeException("jar failed. exitCode=" + exit + "\n" + out);
}
log.info("jar finished successfully");
} catch (Exception e) {
log.error("jar execution error. output=\n{}", out, e);
}
}
}

View File

@@ -0,0 +1,25 @@
package com.kamco.cd.kamcoback.inference.dto;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
public class InferenceResultsTestingDto {
@Getter
@Setter
@AllArgsConstructor
@NoArgsConstructor
public static class ShpDto {
private Long batchId;
private String uid;
private String mapId;
public static ShpDto fromEntity(InferenceResultsTestingEntity e) {
return new ShpDto(e.getBatchId(), e.getUid(), e.getMapId());
}
}
}

View File

@@ -15,13 +15,16 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerSt
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListDto;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearn5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultsTetingRepository;
import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetAnalDataInferenceRepository;
import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetLearn5kRepository;
import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetLearnRepository;
@@ -52,7 +55,7 @@ public class InferenceResultCoreService {
private final MapInkx5kRepository mapInkx5kRepository;
private final MapSheetLearn5kRepository mapSheetLearn5kRepository;
private final InferenceResultRepository inferenceResultRepository;
private final InferenceResultsTetingRepository inferenceResultsTetingRepository;
private final EntityManager entityManager;
private final UserUtil userUtil;
@@ -444,4 +447,16 @@ public class InferenceResultCoreService {
public Long getInferenceLearnIdByUuid(UUID uuid) {
return inferenceResultRepository.getInferenceLearnIdByUuid(uuid);
}
/**
* 추론 결과 shp파일 생성위해서 조회
*
* @param batchIds
* @return
*/
public List<InferenceResultsTestingDto.ShpDto> getInferenceResults(List<Long> batchIds) {
List<InferenceResultsTestingEntity> list =
inferenceResultsTetingRepository.getInferenceResultList(batchIds);
return list.stream().map(InferenceResultsTestingDto.ShpDto::fromEntity).toList();
}
}

View File

@@ -0,0 +1,102 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
import org.locationtech.jts.geom.Geometry;
@Getter
@Setter
@Entity
@Table(name = "inference_results_testing")
public class InferenceResultsTestingEntity {
@Column(name = "probability")
private Double probability;
@Column(name = "before_year")
private Long beforeYear;
@Column(name = "after_year")
private Long afterYear;
@Column(name = "map_id", length = Integer.MAX_VALUE)
private String mapId;
@Column(name = "source_1", length = Integer.MAX_VALUE)
private String source1;
@Column(name = "source_2", length = Integer.MAX_VALUE)
private String source2;
@Column(name = "model_version", length = Integer.MAX_VALUE)
private String modelVersion;
@Column(name = "cls_model_path", length = Integer.MAX_VALUE)
private String clsModelPath;
@Column(name = "cls_model_version", length = Integer.MAX_VALUE)
private String clsModelVersion;
@Column(name = "cd_model_type", length = Integer.MAX_VALUE)
private String cdModelType;
@Column(name = "id")
private Long id;
@Column(name = "model_name", length = Integer.MAX_VALUE)
private String modelName;
@Column(name = "batch_id")
private Long batchId;
@Column(name = "area")
private Double area;
@Column(name = "cd_prob")
private Double cdProb;
@Column(name = "before_c", length = Integer.MAX_VALUE)
private String beforeC;
@Column(name = "before_p")
private Double beforeP;
@Column(name = "after_c", length = Integer.MAX_VALUE)
private String afterC;
@Column(name = "after_p")
private Double afterP;
@Column(name = "input1")
private Long input1;
@Column(name = "input2")
private Long input2;
@Id
@NotNull
@ColumnDefault("nextval('inference_results_testing_seq_seq')")
@Column(name = "seq", nullable = false)
private Long seq;
@ColumnDefault("now()")
@Column(name = "created_date")
private ZonedDateTime createdDate;
@Size(max = 32)
@NotNull
@ColumnDefault("upper(replace((uuid_generate_v4()), '-', ''))")
@Column(name = "uid", nullable = false, length = 32)
private String uid;
@Column(name = "geometry", columnDefinition = "geometry")
private Geometry geometry;
}

View File

@@ -187,7 +187,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
SELECT
r.uid AS result_uid,
msadi.stage,
r.after_p as cd_prob,
r.probability as cd_prob,
msl.compare_yyyy,
msl.target_yyyy,
CASE

View File

@@ -0,0 +1,26 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import static com.kamco.cd.kamcoback.postgres.entity.QInferenceResultsTestingEntity.inferenceResultsTestingEntity;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class InferenceResultsTestingRepositoryImpl
implements InferenceResultsTetingRepositoryCustom {
private final JPAQueryFactory queryFactory;
@Override
public List<InferenceResultsTestingEntity> getInferenceResultList(List<Long> batchIds) {
return queryFactory
.select(inferenceResultsTestingEntity)
.from(inferenceResultsTestingEntity)
.where(inferenceResultsTestingEntity.batchId.in(batchIds))
.fetch();
}
}

View File

@@ -0,0 +1,8 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface InferenceResultsTetingRepository
extends JpaRepository<InferenceResultsTestingEntity, Long>,
InferenceResultsTetingRepositoryCustom {}

View File

@@ -0,0 +1,9 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity;
import java.util.List;
public interface InferenceResultsTetingRepositoryCustom {
List<InferenceResultsTestingEntity> getInferenceResultList(List<Long> batchIds);
}

View File

@@ -4,18 +4,19 @@ import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.common.service.ExternalJarRunner;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient.ExternalCallResult;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.InferenceBatchSheet;
import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto;
import jakarta.transaction.Transactional;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@@ -36,9 +37,9 @@ import org.springframework.stereotype.Service;
public class MapSheetInferenceJobService {
private final InferenceResultCoreService inferenceResultCoreService;
private final InferenceResultShpService inferenceResultShpService;
private final ExternalHttpClient externalHttpClient;
private final ObjectMapper objectMapper;
private final ExternalJarRunner externalJarRunner;
@Value("${inference.batch-url}")
private String batchUrl;
@@ -49,12 +50,11 @@ public class MapSheetInferenceJobService {
@Value("${inference.url}")
private String inferenceUrl;
@Value("${mapsheet.shp.baseurl}")
private String baseDir;
@Value("${inference.jar-path}")
private String jarPath;
/** 추론 진행 배치 1분 */
@Scheduled(fixedDelay = 60_000)
@Transactional
public void runBatch() {
if (isLocalProfile()) {
return;
@@ -213,11 +213,36 @@ public class MapSheetInferenceJobService {
// 추론 종료일때 geom 데이터 저장
inferenceResultCoreService.upsertGeomData(sheet.getId());
// 추론 종료일때 도엽별 실패여부 저장
// TODO jar로 생성하는걸로 변경
// 추론 종료일때 shp 파일 생성
// inferenceResultShpService.createShpFile(sheet.getId());
List<Long> batchIds = new ArrayList<>();
batchIds.add(sheet.getM1BatchId());
batchIds.add(sheet.getM2BatchId());
batchIds.add(sheet.getM3BatchId());
List<InferenceResultsTestingDto.ShpDto> resultList =
inferenceResultCoreService.getInferenceResults(batchIds);
String inferenceId = "";
boolean first = true;
StringBuilder sb = new StringBuilder();
for (InferenceResultsTestingDto.ShpDto dto : resultList) {
if (dto.getMapId() == null) {
continue;
}
if (!sb.isEmpty()) {
sb.append(",");
}
sb.append("\"").append(dto.getMapId()).append("\"");
if (first) {
inferenceId = dto.getUid();
first = false;
}
}
String mapIds = sb.toString();
String batchId = sheet.getM1BatchId() + "," + sheet.getM2BatchId() + "," + sheet.getM3BatchId();
externalJarRunner.run(jarPath, batchId, inferenceId, mapIds);
}
/**
@@ -435,6 +460,7 @@ public class MapSheetInferenceJobService {
.map(Long::valueOf)
.toList();
// 도엽별 실패여부 업데이트
inferenceResultCoreService.saveFail5k(uuid, failedIds, type);
}

View File

@@ -100,3 +100,4 @@ inference:
url: http://10.100.0.11:8000/jobs
batch-url: http://10.100.0.11:8000/batches
geojson-dir: /kamco-nfs/requests/
jar-path: jar/makeshp-1.0.0.jar

View File

@@ -86,3 +86,4 @@ inference:
url: http://10.100.0.11:8000/jobs
batch-url: http://10.100.0.11:8000/batches
geojson-dir: /kamco-nfs/requests/
jar-path: jar/makeshp-1.0.0.jar

View File

@@ -61,4 +61,5 @@ inference:
url: http://10.100.0.11:8000/jobs
batch-url: http://10.100.0.11:8000/batches
geojson-dir: /kamco-nfs/requests/
jar-path: jar/makeshp-1.0.0.jar