diff --git a/src/main/java/com/kamco/cd/kamcoback/common/service/ExternalJarRunner.java b/src/main/java/com/kamco/cd/kamcoback/common/service/ExternalJarRunner.java index 6cfd3f76..d8882a28 100644 --- a/src/main/java/com/kamco/cd/kamcoback/common/service/ExternalJarRunner.java +++ b/src/main/java/com/kamco/cd/kamcoback/common/service/ExternalJarRunner.java @@ -1,23 +1,47 @@ package com.kamco.cd.kamcoback.common.service; +import com.kamco.cd.kamcoback.config.InferenceProperties; import java.io.BufferedReader; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; +import lombok.RequiredArgsConstructor; import lombok.extern.log4j.Log4j2; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; @Log4j2 +//0312 +@RequiredArgsConstructor @Component + public class ExternalJarRunner { @Value("${spring.profiles.active}") private String profile; + //0312 + private final InferenceProperties inferenceProperties; private static final long TIMEOUT_MINUTES = TimeUnit.DAYS.toMinutes(3); + + + // java -jar build/libs/shp-exporter.jar --batch --geoserver.enabled=true --converter.inference-id=qq99999 --converter.batch-ids[0]=111 + //0312 shp 파일 배치를 통해 생성 + public void run(String inferenceLearningId, List batchIds){ + //JAR 경로 (shape파일 생성용) + String jarPathV2 = inferenceProperties.getJarPathV2(); + List args = new ArrayList<>(); + args.add(" --spring.profiles.active="+profile); + args.add(" --batch"); + args.add(" --geoserver.enabled=true"); + args.add(" --converter.inference-id="+inferenceLearningId); + batchIds.forEach(batchId -> args.add(" --converter.batch-ids["+args.size()+"]="+batchId)); + execJar(jarPathV2, args); + } + + /** * shp 파일 생성 * @@ -74,14 +98,15 @@ public class ExternalJarRunner { cmd.add("-jar"); cmd.add(jarPath); cmd.addAll(args); - + //0312 + log.info("exec jar command: {}", cmd); ProcessBuilder pb = new ProcessBuilder(cmd); pb.redirectErrorStream(true); Process p = pb.start(); try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getInputStream(), StandardCharsets.UTF_8))) { + new BufferedReader(new InputStreamReader(p.getInputStream(), StandardCharsets.UTF_8))) { String line; while ((line = br.readLine()) != null) { out.append(line).append('\n'); @@ -135,3 +160,4 @@ public class ExternalJarRunner { return v; } } + diff --git a/src/main/java/com/kamco/cd/kamcoback/config/InferenceProperties.java b/src/main/java/com/kamco/cd/kamcoback/config/InferenceProperties.java index e5a6a04e..93ccb986 100644 --- a/src/main/java/com/kamco/cd/kamcoback/config/InferenceProperties.java +++ b/src/main/java/com/kamco/cd/kamcoback/config/InferenceProperties.java @@ -16,5 +16,7 @@ public class InferenceProperties { private String batchUrl; private String geojsonDir; private String jarPath; + //0312 + private String jarPathV2; private String inferenceServerName; } diff --git a/src/main/java/com/kamco/cd/kamcoback/scheduler/service/MapSheetInferenceJobService.java b/src/main/java/com/kamco/cd/kamcoback/scheduler/service/MapSheetInferenceJobService.java index 0a4b6519..429efe0b 100644 --- a/src/main/java/com/kamco/cd/kamcoback/scheduler/service/MapSheetInferenceJobService.java +++ b/src/main/java/com/kamco/cd/kamcoback/scheduler/service/MapSheetInferenceJobService.java @@ -255,8 +255,9 @@ public class MapSheetInferenceJobService { // 추론 종료일때 shp 파일 생성 String batchIdStr = batchIds.stream().map(String::valueOf).collect(Collectors.joining(",")); - // shp 파일 비동기 생성 - shpPipelineService.runPipeline(jarPath, datasetDir, batchIdStr, sheet.getUid()); + //0312 shp 파일 비동기 생성 (바꿔주세요) + shpPipelineService.makeShapeFile(sheet.getUid(),batchIds); + //shpPipelineService.runPipeline(jarPath, datasetDir, batchIdStr, sheet.getUid()); } /** diff --git a/src/main/java/com/kamco/cd/kamcoback/scheduler/service/ShpPipelineService.java b/src/main/java/com/kamco/cd/kamcoback/scheduler/service/ShpPipelineService.java index d6355927..fd8186df 100644 --- a/src/main/java/com/kamco/cd/kamcoback/scheduler/service/ShpPipelineService.java +++ b/src/main/java/com/kamco/cd/kamcoback/scheduler/service/ShpPipelineService.java @@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.common.service.ExternalJarRunner; import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService; import com.kamco.cd.kamcoback.scheduler.config.ShpKeyLock; import java.nio.file.Paths; +import java.util.List; import lombok.RequiredArgsConstructor; import lombok.extern.log4j.Log4j2; import org.springframework.scheduling.annotation.Async; @@ -18,12 +19,38 @@ public class ShpPipelineService { private final ExternalJarRunner externalJarRunner; private final ShpKeyLock shpKeyLock; + + //0312 shp 파일 비동기 생성 + @Async("makeShapeFile") + public void makeShapeFile(String inferenceId, List batchIds) { + if (!shpKeyLock.tryLock(inferenceId)) { + log.info(""); + log.info("============================================================"); + log.info("SHP pipeline already running. inferenceId={}", inferenceId); + log.info("============================================================"); + try { + log.info("SHP pipeline already start. inferenceId={}", inferenceId); + externalJarRunner.run(inferenceId, batchIds); + } catch (Exception e) { + log.error("SHP pipeline failed. inferenceId={}", inferenceId, e); + // TODO 실패 상태 업데이트 로직 추가 + } finally { + log.info("============================================================"); + log.info("SHP pipeline DONE. inferenceId={}", inferenceId); + log.info("============================================================"); + log.info(""); + shpKeyLock.unlock(inferenceId); + } + } + } + + /** * shp 파일 생성 1. merge 생성 2. 생성된 merge shp 파일로 geoserver 등록, 3.도엽별로 shp 생성 * - * @param jarPath 실행 jar 파일 경로 - * @param datasetDir shp 파일이 생성될 경로 - * @param batchIds 추론 batch id = 12,13,14 + * @param jarPath 실행 jar 파일 경로 + * @param datasetDir shp 파일이 생성될 경로 + * @param batchIds 추론 batch id = 12,13,14 * @param inferenceId 추론 uid 32자 */ @Async("shpExecutor") @@ -36,24 +63,33 @@ public class ShpPipelineService { } try { + + log.info(""); + log.info("============================================================"); + log.info("SHP pipeline started. inferenceId={}", inferenceId); + log.info("============================================================"); + // uid 기준 merge shp, geojson 파일 생성 externalJarRunner.run(jarPath, batchIds, inferenceId, "", "MERGED"); // uid 기준 shp 파일 geoserver 등록 - String register = - Paths.get(datasetDir, inferenceId, "merge", inferenceId + ".shp").toString(); - log.info("register={}", register); - externalJarRunner.run(jarPath, register, inferenceId); - - // uid 기준 도엽별 shp, geojson 파일 생성 - externalJarRunner.run(jarPath, batchIds, inferenceId, "", "RESOLVE"); - - log.info("SHP pipeline finished. inferenceId={}", inferenceId); +// String register = +// Paths.get(datasetDir, inferenceId, "merge", inferenceId + ".shp").toString(); +// log.info("register={}", register); +// externalJarRunner.run(jarPath, register, inferenceId); +// +// // uid 기준 도엽별 shp, geojson 파일 생성 +// externalJarRunner.run(jarPath, batchIds, inferenceId, "", "RESOLVE"); +// +// log.info("SHP pipeline finished. inferenceId={}", inferenceId); } catch (Exception e) { log.error("SHP pipeline failed. inferenceId={}", inferenceId, e); // TODO 실패 상태 업데이트 로직 추가 } finally { + log.info("============================================================"); + log.info("SHP pipeline DONE. inferenceId={}", inferenceId); + log.info("============================================================"); shpKeyLock.unlock(inferenceId); } } diff --git a/src/main/resources/application-dev.yml b/src/main/resources/application-dev.yml index 8dc9f7a0..5f9badd3 100644 --- a/src/main/resources/application-dev.yml +++ b/src/main/resources/application-dev.yml @@ -100,6 +100,7 @@ inference: url: http://192.168.2.183:8000/jobs batch-url: http://192.168.2.183:8000/batches jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar + jar-path-v2: ${inference.nfs}/repo/jar/shp-exporter-v2.jar inference-server-name: server1,server2,server3,server4 output-dir: ${inference.nfs}/model_output/export diff --git a/src/main/resources/application-local.yml b/src/main/resources/application-local.yml index 32ed9a30..e38c12fe 100644 --- a/src/main/resources/application-local.yml +++ b/src/main/resources/application-local.yml @@ -78,6 +78,7 @@ inference: url: http://10.100.0.11:8000/jobs batch-url: http://10.100.0.11:8000/batches jar-path: jar/shp-exporter.jar + jar-path-v2: jar/shp-exporter-v2.jar inference-server-name: server1,server2,server3,server4 output-dir: ${inference.nfs}/model_output/export diff --git a/src/main/resources/application-prod.yml b/src/main/resources/application-prod.yml index 25a59362..3581f6ad 100644 --- a/src/main/resources/application-prod.yml +++ b/src/main/resources/application-prod.yml @@ -95,6 +95,8 @@ inference: url: http://172.16.4.56:8000/jobs batch-url: http://172.16.4.56:8000/batches jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar + # //0312 + jar-path-v2: ${inference.nfs}/repo/jar/shp-exporter-v2.jar inference-server-name: server1,server2,server3,server4 output-dir: ${inference.nfs}/model_output/export diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index e32450ac..68cd99cb 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -88,3 +88,6 @@ inference: nfs: /kamco-nfs geojson-dir: ${inference.nfs}/requests/ # 추론실행을 위한 파일생성경로 jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar + # //0312 + jar-path-v2: ${inference.nfs}/repo/jar/shp-exporter-v2.jar +