shp파일 바꾸는로직정리

This commit is contained in:
2026-03-12 07:31:54 +09:00
parent 5d417d85ff
commit 828a4c5dca
8 changed files with 88 additions and 16 deletions

View File

@@ -1,23 +1,47 @@
package com.kamco.cd.kamcoback.common.service;
import com.kamco.cd.kamcoback.config.InferenceProperties;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@Log4j2
//0312
@RequiredArgsConstructor
@Component
public class ExternalJarRunner {
@Value("${spring.profiles.active}")
private String profile;
//0312
private final InferenceProperties inferenceProperties;
private static final long TIMEOUT_MINUTES = TimeUnit.DAYS.toMinutes(3);
// java -jar build/libs/shp-exporter.jar --batch --geoserver.enabled=true --converter.inference-id=qq99999 --converter.batch-ids[0]=111
//0312 shp 파일 배치를 통해 생성
public void run(String inferenceLearningId, List<Long> batchIds){
//JAR 경로 (shape파일 생성용)
String jarPathV2 = inferenceProperties.getJarPathV2();
List<String> args = new ArrayList<>();
args.add(" --spring.profiles.active="+profile);
args.add(" --batch");
args.add(" --geoserver.enabled=true");
args.add(" --converter.inference-id="+inferenceLearningId);
batchIds.forEach(batchId -> args.add(" --converter.batch-ids["+args.size()+"]="+batchId));
execJar(jarPathV2, args);
}
/**
* shp 파일 생성
*
@@ -74,14 +98,15 @@ public class ExternalJarRunner {
cmd.add("-jar");
cmd.add(jarPath);
cmd.addAll(args);
//0312
log.info("exec jar command: {}", cmd);
ProcessBuilder pb = new ProcessBuilder(cmd);
pb.redirectErrorStream(true);
Process p = pb.start();
try (BufferedReader br =
new BufferedReader(new InputStreamReader(p.getInputStream(), StandardCharsets.UTF_8))) {
new BufferedReader(new InputStreamReader(p.getInputStream(), StandardCharsets.UTF_8))) {
String line;
while ((line = br.readLine()) != null) {
out.append(line).append('\n');
@@ -135,3 +160,4 @@ public class ExternalJarRunner {
return v;
}
}

View File

@@ -16,5 +16,7 @@ public class InferenceProperties {
private String batchUrl;
private String geojsonDir;
private String jarPath;
//0312
private String jarPathV2;
private String inferenceServerName;
}

View File

@@ -255,8 +255,9 @@ public class MapSheetInferenceJobService {
// 추론 종료일때 shp 파일 생성
String batchIdStr = batchIds.stream().map(String::valueOf).collect(Collectors.joining(","));
// shp 파일 비동기 생성
shpPipelineService.runPipeline(jarPath, datasetDir, batchIdStr, sheet.getUid());
//0312 shp 파일 비동기 생성 (바꿔주세요)
shpPipelineService.makeShapeFile(sheet.getUid(),batchIds);
//shpPipelineService.runPipeline(jarPath, datasetDir, batchIdStr, sheet.getUid());
}
/**

View File

@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.common.service.ExternalJarRunner;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.scheduler.config.ShpKeyLock;
import java.nio.file.Paths;
import java.util.List;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.scheduling.annotation.Async;
@@ -18,12 +19,38 @@ public class ShpPipelineService {
private final ExternalJarRunner externalJarRunner;
private final ShpKeyLock shpKeyLock;
//0312 shp 파일 비동기 생성
@Async("makeShapeFile")
public void makeShapeFile(String inferenceId, List<Long> batchIds) {
if (!shpKeyLock.tryLock(inferenceId)) {
log.info("");
log.info("============================================================");
log.info("SHP pipeline already running. inferenceId={}", inferenceId);
log.info("============================================================");
try {
log.info("SHP pipeline already start. inferenceId={}", inferenceId);
externalJarRunner.run(inferenceId, batchIds);
} catch (Exception e) {
log.error("SHP pipeline failed. inferenceId={}", inferenceId, e);
// TODO 실패 상태 업데이트 로직 추가
} finally {
log.info("============================================================");
log.info("SHP pipeline DONE. inferenceId={}", inferenceId);
log.info("============================================================");
log.info("");
shpKeyLock.unlock(inferenceId);
}
}
}
/**
* shp 파일 생성 1. merge 생성 2. 생성된 merge shp 파일로 geoserver 등록, 3.도엽별로 shp 생성
*
* @param jarPath 실행 jar 파일 경로
* @param datasetDir shp 파일이 생성될 경로
* @param batchIds 추론 batch id = 12,13,14
* @param jarPath 실행 jar 파일 경로
* @param datasetDir shp 파일이 생성될 경로
* @param batchIds 추론 batch id = 12,13,14
* @param inferenceId 추론 uid 32자
*/
@Async("shpExecutor")
@@ -36,24 +63,33 @@ public class ShpPipelineService {
}
try {
log.info("");
log.info("============================================================");
log.info("SHP pipeline started. inferenceId={}", inferenceId);
log.info("============================================================");
// uid 기준 merge shp, geojson 파일 생성
externalJarRunner.run(jarPath, batchIds, inferenceId, "", "MERGED");
// uid 기준 shp 파일 geoserver 등록
String register =
Paths.get(datasetDir, inferenceId, "merge", inferenceId + ".shp").toString();
log.info("register={}", register);
externalJarRunner.run(jarPath, register, inferenceId);
// uid 기준 도엽별 shp, geojson 파일 생성
externalJarRunner.run(jarPath, batchIds, inferenceId, "", "RESOLVE");
log.info("SHP pipeline finished. inferenceId={}", inferenceId);
// String register =
// Paths.get(datasetDir, inferenceId, "merge", inferenceId + ".shp").toString();
// log.info("register={}", register);
// externalJarRunner.run(jarPath, register, inferenceId);
//
// // uid 기준 도엽별 shp, geojson 파일 생성
// externalJarRunner.run(jarPath, batchIds, inferenceId, "", "RESOLVE");
//
// log.info("SHP pipeline finished. inferenceId={}", inferenceId);
} catch (Exception e) {
log.error("SHP pipeline failed. inferenceId={}", inferenceId, e);
// TODO 실패 상태 업데이트 로직 추가
} finally {
log.info("============================================================");
log.info("SHP pipeline DONE. inferenceId={}", inferenceId);
log.info("============================================================");
shpKeyLock.unlock(inferenceId);
}
}

View File

@@ -100,6 +100,7 @@ inference:
url: http://192.168.2.183:8000/jobs
batch-url: http://192.168.2.183:8000/batches
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
jar-path-v2: ${inference.nfs}/repo/jar/shp-exporter-v2.jar
inference-server-name: server1,server2,server3,server4
output-dir: ${inference.nfs}/model_output/export

View File

@@ -78,6 +78,7 @@ inference:
url: http://10.100.0.11:8000/jobs
batch-url: http://10.100.0.11:8000/batches
jar-path: jar/shp-exporter.jar
jar-path-v2: jar/shp-exporter-v2.jar
inference-server-name: server1,server2,server3,server4
output-dir: ${inference.nfs}/model_output/export

View File

@@ -95,6 +95,8 @@ inference:
url: http://172.16.4.56:8000/jobs
batch-url: http://172.16.4.56:8000/batches
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
# //0312
jar-path-v2: ${inference.nfs}/repo/jar/shp-exporter-v2.jar
inference-server-name: server1,server2,server3,server4
output-dir: ${inference.nfs}/model_output/export

View File

@@ -88,3 +88,6 @@ inference:
nfs: /kamco-nfs
geojson-dir: ${inference.nfs}/requests/ # 추론실행을 위한 파일생성경로
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
# //0312
jar-path-v2: ${inference.nfs}/repo/jar/shp-exporter-v2.jar