diff --git a/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/CsvFileProcessor.java b/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/CsvFileProcessor.java new file mode 100644 index 00000000..fc440c58 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/CsvFileProcessor.java @@ -0,0 +1,33 @@ +package com.kamco.cd.kamcoback.common.utils.zip; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class CsvFileProcessor implements ZipEntryProcessor { + + @Override + public boolean supports(String fileName) { + return fileName.toLowerCase().endsWith(".csv"); + } + + @Override + public void process(String fileName, InputStream is) throws IOException { + try (BufferedReader br = new BufferedReader(new InputStreamReader(is))) { + br.lines() + .forEach( + line -> { + String[] cols = line.split(","); + // CSV 처리 + for (String col : cols) { + log.info(col); // TODO : 추후에 csv 파일 읽어서 작업 필요할 때 정의하기 + } + }); + } + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/JsonStreamingFileProcessor.java b/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/JsonStreamingFileProcessor.java new file mode 100644 index 00000000..40d2e423 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/JsonStreamingFileProcessor.java @@ -0,0 +1,73 @@ +package com.kamco.cd.kamcoback.common.utils.zip; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.io.InputStream; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class JsonStreamingFileProcessor implements ZipEntryProcessor { + + private final JsonFactory jsonFactory; + + public JsonStreamingFileProcessor(ObjectMapper objectMapper) { + // ZipInputStream 보호용 설정 + objectMapper.configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, false); + this.jsonFactory = objectMapper.getFactory(); + } + + @Override + public boolean supports(String fileName) { + return fileName.toLowerCase().endsWith(".json"); + } + + @Override + public void process(String fileName, InputStream is) throws IOException { + + log.info("JSON process start: {}", fileName); + + JsonParser parser = jsonFactory.createParser(is); + + // JSON 구조에 상관없이 token 단위로 순회 + while (parser.nextToken() != null) { + handleToken(parser); + } + + log.info("JSON process end: {}", fileName); + } + + private void handleToken(JsonParser parser) throws IOException { + JsonToken token = parser.currentToken(); + + if (token == JsonToken.FIELD_NAME) { + String fieldName = parser.getCurrentName(); + // TODO: json 파일 읽어야 할 내용 정의되면 항목 확정하기 + switch (fieldName) { + case "type" -> { + parser.nextToken(); + String type = parser.getValueAsString(); + log.info("type: {}", type); + } + case "name" -> { + parser.nextToken(); + String name = parser.getValueAsString(); + log.info("Name: {}", name); + } + case "features" -> { + parser.nextToken(); + String features = parser.readValueAsTree().toString(); + log.info("features: {}", features); + } + default -> { + parser.nextToken(); + parser.skipChildren(); + } + } + } + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/TextFileProcessor.java b/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/TextFileProcessor.java new file mode 100644 index 00000000..1f5d0b5f --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/TextFileProcessor.java @@ -0,0 +1,27 @@ +package com.kamco.cd.kamcoback.common.utils.zip; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class TextFileProcessor implements ZipEntryProcessor { + + @Override + public boolean supports(String fileName) { + return fileName.toLowerCase().endsWith(".txt"); + } + + @Override + public void process(String fileName, InputStream is) throws IOException { + BufferedReader br = new BufferedReader(new InputStreamReader(is)); + String line; + while ((line = br.readLine()) != null) { + log.info(line); // TODO : 추후 txt 파일 읽어서 작업할 때 정의하기 + } + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/ZipEntryProcessor.java b/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/ZipEntryProcessor.java new file mode 100644 index 00000000..985b6a8b --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/ZipEntryProcessor.java @@ -0,0 +1,11 @@ +package com.kamco.cd.kamcoback.common.utils.zip; + +import java.io.IOException; +import java.io.InputStream; + +public interface ZipEntryProcessor { + + boolean supports(String fileName); + + void process(String fileName, InputStream is) throws IOException; +} diff --git a/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/ZipUtils.java b/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/ZipUtils.java new file mode 100644 index 00000000..c0e7f5ed --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/common/utils/zip/ZipUtils.java @@ -0,0 +1,49 @@ +package com.kamco.cd.kamcoback.common.utils.zip; + +import java.io.IOException; +import java.io.InputStream; +import java.io.UncheckedIOException; +import java.util.List; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class ZipUtils { + + private final List processors; + + public ZipUtils(List processors) { + this.processors = processors; + } + + public void processZip(InputStream zipStream) throws IOException { + try (ZipInputStream zis = new ZipInputStream(zipStream)) { + + ZipEntry entry; + while ((entry = zis.getNextEntry()) != null) { + + if (entry.isDirectory()) { + continue; + } + + String fileName = entry.getName(); + processors.stream() + .filter(p -> p.supports(fileName)) + .findFirst() + .ifPresent( + processor -> { + try { + processor.process(fileName, zis); + } catch (IOException ioe) { + throw new UncheckedIOException(ioe); + } + }); + + zis.closeEntry(); + } + } + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultShpApiController.java b/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultShpApiController.java index 3027acbc..f2c83c6f 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultShpApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultShpApiController.java @@ -1,8 +1,13 @@ package com.kamco.cd.kamcoback.inference; import com.kamco.cd.kamcoback.config.api.ApiResponseDto; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto; import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService; import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; import lombok.RequiredArgsConstructor; import org.springframework.web.bind.annotation.PostMapping; @@ -18,16 +23,39 @@ public class InferenceResultShpApiController { private final InferenceResultShpService inferenceResultShpService; @Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "201", + description = "데이터 저장 성공", + content = + @Content( + mediaType = "application/json", + schema = + @Schema(implementation = InferenceResultShpDto.InferenceCntDto.class))), + @ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) @PostMapping("/save") - public ApiResponseDto saveInferenceData() { - inferenceResultShpService.saveInferenceResultData(); - return ApiResponseDto.createOK("OK"); + public ApiResponseDto saveInferenceData() { + return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData()); } @Operation(summary = "shp 파일 생성", description = "shp 파일 생성") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "201", + description = "파일생성 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = InferenceResultShpDto.FileCntDto.class))), + @ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) @PostMapping("/create") - public ApiResponseDto createShpFile() { - inferenceResultShpService.createShpFile(); - return ApiResponseDto.createOK("OK"); + public ApiResponseDto createShpFile() { + return ApiResponseDto.createOK(inferenceResultShpService.createShpFile()); } } diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/ShpWriter.java b/src/main/java/com/kamco/cd/kamcoback/inference/ShpWriter.java index 85e03248..850c1188 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/ShpWriter.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/ShpWriter.java @@ -6,8 +6,8 @@ import java.util.List; public interface ShpWriter { // SHP (.shp/.shx/.dbf) - void writeShp(String shpBasePath, List rows); + WriteCnt writeShp(String shpBasePath, List rows); // GeoJSON (.geojson) - void writeGeoJson(String geoJsonPath, List rows); + WriteCnt writeGeoJson(String geoJsonPath, List rows); } diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/WriteCnt.java b/src/main/java/com/kamco/cd/kamcoback/inference/WriteCnt.java new file mode 100644 index 00000000..d81c1a32 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/inference/WriteCnt.java @@ -0,0 +1,17 @@ +package com.kamco.cd.kamcoback.inference; + +public record WriteCnt(int shp, int shx, int dbf, int prj, int geojson) { + + public static WriteCnt zero() { + return new WriteCnt(0, 0, 0, 0, 0); + } + + public WriteCnt plus(WriteCnt o) { + return new WriteCnt( + this.shp + o.shp, + this.shx + o.shx, + this.dbf + o.dbf, + this.prj + o.prj, + this.geojson + o.geojson); + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java index 9581bab2..96de4301 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java @@ -1,8 +1,11 @@ package com.kamco.cd.kamcoback.inference.dto; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity; +import io.swagger.v3.oas.annotations.media.Schema; import java.util.UUID; +import lombok.AllArgsConstructor; import lombok.Getter; +import lombok.NoArgsConstructor; import lombok.Setter; import org.locationtech.jts.geom.Geometry; @@ -61,4 +64,39 @@ public class InferenceResultShpDto { return d; } } + + @Setter + @Getter + @AllArgsConstructor + @NoArgsConstructor + public static class InferenceCntDto { + + @Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 데이터 건수", example = "120") + int inferenceCnt; + + @Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 Geom 데이터 건수", example = "120") + int inferenceGeomCnt; + } + + @Setter + @Getter + @AllArgsConstructor + @NoArgsConstructor + public static class FileCntDto { + + @Schema(description = "shp 파일 생성 수 (덮어쓰기 포함)", example = "120") + private int shp; + + @Schema(description = "shx 파일 생성 수 (덮어쓰기 포함)", example = "120") + private int shx; + + @Schema(description = "dbf 파일 생성 수 (덮어쓰기 포함)", example = "120") + private int dbf; + + @Schema(description = "prj 파일 생성 수 (덮어쓰기 포함)", example = "120") + private int prj; + + @Schema(description = "geojson 파일 생성 수 (덮어쓰기 포함)", example = "120") + private int geojson; + } } diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java index d20af592..def45ede 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java @@ -5,6 +5,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.kamco.cd.kamcoback.inference.ShpWriter; +import com.kamco.cd.kamcoback.inference.WriteCnt; import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto; import java.io.File; import java.io.FileOutputStream; @@ -56,12 +57,13 @@ public class GeoToolsShpWriter implements ShpWriter { * * @param shpBasePath 확장자를 제외한 SHP 파일 기본 경로 * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 + * @return 이번 호출로 write(생성/덮어쓰기)가 수행된 파일 개수 */ @Override - public void writeShp(String shpBasePath, List rows) { + public WriteCnt writeShp(String shpBasePath, List rows) { if (rows == null || rows.isEmpty()) { - return; + return WriteCnt.zero(); } // SHP는 Geometry.class를 허용하지 않으므로 @@ -86,7 +88,7 @@ public class GeoToolsShpWriter implements ShpWriter { // FeatureType(schema) 생성 SimpleFeatureType schema = createSchema(geomType, crs); - // ShapefileDataStore 생성 + // ShapefileDataStore 생성 (기존 파일이 있어도 새로 생성/overwrite 동작) dataStore = createDataStore(shpFile, schema); // FeatureCollection 생성 @@ -100,6 +102,9 @@ public class GeoToolsShpWriter implements ShpWriter { log.info("SHP 생성 완료: {} ({} features)", shpFile.getAbsolutePath(), collection.size()); + // 덮어쓰기 포함: 이번 호출이 정상 종료되면 4개 파일 write가 발생했다고 카운트 + return new WriteCnt(1, 1, 1, 1, 0); + } catch (Exception e) { throw new RuntimeException("SHP 생성 실패: " + shpBasePath, e); } finally { @@ -124,12 +129,13 @@ public class GeoToolsShpWriter implements ShpWriter { * * @param geoJsonPath 생성할 GeoJSON 파일의 전체 경로 (.geojson 포함) * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 + * @return 이번 호출로 write(생성/덮어쓰기)가 수행된 파일 개수 */ @Override - public void writeGeoJson(String geoJsonPath, List rows) { + public WriteCnt writeGeoJson(String geoJsonPath, List rows) { if (rows == null || rows.isEmpty()) { - return; + return WriteCnt.zero(); } try { @@ -167,10 +173,6 @@ public class GeoToolsShpWriter implements ShpWriter { groupProps.put("input1", first.getInput1()); groupProps.put("input2", first.getInput2()); groupProps.put("map_id", first.getMapId()); - // 학습서버 버전은 추후 추가 - // groupProps.put("m1", "v1.2222.251223121212"); - // groupProps.put("m2", "v2.211.251223121213"); - // groupProps.put("m3", "v3.233.251223121214"); root.set("properties", groupProps); // features 배열 @@ -231,19 +233,14 @@ public class GeoToolsShpWriter implements ShpWriter { log.info("GeoJSON 생성 완료: {} ({} features)", geoJsonFile.getAbsolutePath(), features.size()); + // 덮어쓰기 포함: 이번 호출이 정상 종료되면 geojson 1개 write로 카운트 + return new WriteCnt(0, 0, 0, 0, 1); + } catch (Exception e) { throw new RuntimeException("GeoJSON 생성 실패: " + geoJsonPath, e); } } - /** - * rows 목록에서 첫 번째로 발견되는 non-null Geometry를 반환한다. - * - *

- SHP 스키마 생성 시 geometry 타입 결정을 위해 사용된다. - * - * @param rows DTO 목록 - * @return 첫 번째 non-null Geometry, 없으면 null - */ private Geometry firstNonNullGeometry(List rows) { for (InferenceResultShpDto.Basic r : rows) { if (r != null && r.getGeometry() != null) { @@ -253,15 +250,6 @@ public class GeoToolsShpWriter implements ShpWriter { return null; } - /** - * SHP 파일에 사용할 SimpleFeatureType(schema)를 생성한다. - * - *

- geometry 컬럼은 반드시 첫 번째 컬럼이어야 한다. - DBF 컬럼은 SHP 제약(컬럼명 10자, 길이 제한)을 고려한다. - * - * @param geomType geometry의 구체 타입 (Polygon, MultiPolygon 등) - * @param crs 좌표계(EPSG:5186) - * @return SimpleFeatureType - */ private SimpleFeatureType createSchema( Class geomType, CoordinateReferenceSystem crs) { SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder(); @@ -286,15 +274,6 @@ public class GeoToolsShpWriter implements ShpWriter { return b.buildFeatureType(); } - /** - * ShapefileDataStore를 생성하고 스키마를 등록한다. - * - *

- DBF 파일 인코딩은 EUC-KR로 설정한다. - spatial index(.qix)를 생성한다. - * - * @param shpFile SHP 파일 객체 - * @param schema SimpleFeatureType - * @return 생성된 ShapefileDataStore - */ private ShapefileDataStore createDataStore(File shpFile, SimpleFeatureType schema) throws Exception { @@ -311,15 +290,6 @@ public class GeoToolsShpWriter implements ShpWriter { return dataStore; } - /** - * DTO 목록을 SimpleFeatureCollection으로 변환한다. - * - *

- DTO 1건당 Feature 1개 생성 - geometry가 null인 데이터는 제외한다. - * - * @param schema FeatureType - * @param rows DTO 목록 - * @return DefaultFeatureCollection - */ private DefaultFeatureCollection buildFeatureCollection( SimpleFeatureType schema, List rows) { DefaultFeatureCollection collection = new DefaultFeatureCollection(); @@ -352,12 +322,6 @@ public class GeoToolsShpWriter implements ShpWriter { return collection; } - /** - * FeatureCollection을 SHP 파일에 실제로 기록한다. - * - * @param dataStore ShapefileDataStore - * @param collection FeatureCollection - */ private void writeFeatures(ShapefileDataStore dataStore, DefaultFeatureCollection collection) throws Exception { @@ -373,12 +337,6 @@ public class GeoToolsShpWriter implements ShpWriter { store.getTransaction().commit(); } - /** - * SHP 좌표계 정보를 담은 .prj 파일을 생성한다. - * - * @param shpBasePath SHP 기본 경로 (확장자 제외) - * @param crs 좌표계(EPSG:5186) - */ private void writePrjFile(String shpBasePath, CoordinateReferenceSystem crs) throws Exception { File prjFile = new File(shpBasePath + ".prj"); @@ -387,11 +345,6 @@ public class GeoToolsShpWriter implements ShpWriter { Files.writeString(prjFile.toPath(), crs.toWKT(), StandardCharsets.UTF_8); } - /** - * 파일이 생성될 디렉토리가 없으면 생성한다. - * - * @param file 생성 대상 파일 - */ private void createDirectories(File file) throws Exception { File parent = file.getParentFile(); if (parent != null) { diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java index 78f7c80c..7a138047 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java @@ -1,9 +1,9 @@ package com.kamco.cd.kamcoback.inference.service; import com.kamco.cd.kamcoback.inference.ShpWriter; +import com.kamco.cd.kamcoback.inference.WriteCnt; import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto; import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService; -import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity; import java.util.List; import lombok.RequiredArgsConstructor; import org.springframework.stereotype.Service; @@ -17,66 +17,76 @@ public class InferenceResultShpService { private final InferenceResultShpCoreService coreService; private final ShpWriter shpWriter; - /** inference_results -> tb_map_sheet_anal_data_inference / geom 업서트 */ + /** inference_results 테이블을 기준으로 분석 결과 테이블과 도형 테이블을 최신 상태로 반영한다. */ @Transactional - public void saveInferenceResultData() { - coreService.buildInferenceData(); + public InferenceResultShpDto.InferenceCntDto saveInferenceResultData() { + return coreService.buildInferenceData(); } /** - * dataUid 단위로 재생성(덮어쓰기) - reset(inference false + geom 전부 false) - geom 엔티티 조회 -> dto 변환 - - * shp/geojson 생성 - 성공 geo_uid만 true - inference true + * 분석 데이터 단위로 SHP / GeoJSON 파일을 생성한다. + * + *

처리 흐름: 1. 파일 미생성 상태의 분석 데이터 조회 2. 재생성을 위한 상태 초기화 3. 도형 데이터 조회 4. SHP / GeoJSON 파일 생성 5. 파일 + * 생성 완료 상태 반영 + * + *

중간 실패 시 다음 실행에서 전체 재생성된다. */ @Transactional - public void createShpFile() { + public InferenceResultShpDto.FileCntDto createShpFile() { - // TODO 경로는 설정으로 빼는 게 좋음 String baseDir = System.getProperty("user.home") + "/export"; - int batchSize = 100; // 한번에 처리할 data_uid 개수 - int geomLimit = 500000; // data_uid 당 최대 geom 로딩 수 (메모리/시간 보고 조절) + int batchSize = 100; + int geomLimit = 500_000; + + WriteCnt total = WriteCnt.zero(); List dataUids = coreService.findPendingDataUids(batchSize); for (Long dataUid : dataUids) { - // 1) 덮어쓰기 시작: 리셋 + // 재생성을 위한 생성 상태 초기화 coreService.resetForRegenerate(dataUid); - // 2) 생성 대상 조회(엔티티) - List entities = - coreService.loadGeomEntities(dataUid, geomLimit); - - if (entities.isEmpty()) { - // 실패 상태(false 유지) -> 다음 배치에서 다시 덮어쓰기로 시도 + // 도형 데이터 조회 + List dtoList = coreService.loadGeomDtos(dataUid, geomLimit); + if (dtoList.isEmpty()) { continue; } - // 3) 엔티티 -> DTO - List dtoList = - entities.stream().map(InferenceResultShpDto.Basic::from).toList(); - - // 4) 파일명: stage_mapSheet_compare_target (첫 row 기준) - MapSheetAnalDataInferenceGeomEntity first = entities.get(0); + // 파일명 생성 (stage_mapSheet_compare_target) + InferenceResultShpDto.Basic first = dtoList.get(0); String baseName = String.format( "%d_%d_%d_%d", - first.getStage(), - first.getMapSheetNum(), - first.getCompareYyyy(), - first.getTargetYyyy()); + first.getStage(), first.getMapId(), first.getInput1(), first.getInput2()); - String shpBasePath = baseDir + "/shp/" + baseName; // 확장자 없이 + String shpBasePath = baseDir + "/shp/" + baseName; String geoJsonPath = baseDir + "/geojson/" + baseName + ".geojson"; - // 5) 파일 생성 (예외 발생 시 성공 마킹 안 됨 -> 다음에 덮어쓰기 재시도) - shpWriter.writeShp(shpBasePath, dtoList); - shpWriter.writeGeoJson(geoJsonPath, dtoList); + try { + // 폴더 안 파일을 세지 않고, Writer가 "이번 호출에서 write한 개수"를 반환 + total = total.plus(shpWriter.writeShp(shpBasePath, dtoList)); + total = total.plus(shpWriter.writeGeoJson(geoJsonPath, dtoList)); - // 6) 성공 마킹: geo_uid만 true - List geoUids = - entities.stream().map(MapSheetAnalDataInferenceGeomEntity::getGeoUid).toList(); - coreService.markSuccess(dataUid, geoUids); + // 파일 생성 완료 상태 반영 + List geoUids = dtoList.stream().map(InferenceResultShpDto.Basic::getGeoUid).toList(); + coreService.markSuccess(dataUid, geoUids); + + } catch (Exception e) { + // 실패 시 markSuccess 하지 않음 -> 다음 실행에서 재생성 + // log.warn("파일 생성 실패: dataUid={}, baseName={}", dataUid, baseName, e); + continue; + } } + + InferenceResultShpDto.FileCntDto fileCntDto = new InferenceResultShpDto.FileCntDto(); + fileCntDto.setShp(total.shp()); + fileCntDto.setShx(total.shx()); + fileCntDto.setDbf(total.dbf()); + fileCntDto.setPrj(total.prj()); + fileCntDto.setGeojson(total.geojson()); + + return fileCntDto; } } diff --git a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java index 802a24e1..1dcb01b6 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java @@ -1,5 +1,6 @@ package com.kamco.cd.kamcoback.model; +import com.kamco.cd.kamcoback.common.utils.zip.ZipUtils; import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.model.service.ModelMngService; @@ -10,15 +11,21 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; import jakarta.transaction.Transactional; +import java.io.IOException; import java.time.LocalDate; import lombok.RequiredArgsConstructor; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; +import org.springframework.http.MediaType; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RequestPart; import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartFile; @Tag(name = "모델 관리", description = "모델 관리 API") @RequiredArgsConstructor @@ -29,6 +36,8 @@ public class ModelMngApiController { private final ModelMngService modelMngService; + @Autowired private ZipUtils zipUtils; + @Operation(summary = "모델관리 목록") @GetMapping public ApiResponseDto> findModelMgmtList( @@ -70,4 +79,10 @@ public class ModelMngApiController { String modelVer) { return ApiResponseDto.okObject(modelMngService.removeModel(modelVer)); } + + @Operation(summary = "모델 zip 파일 업로드", description = "모델 zip 파일 업로드") + @PostMapping(value = "/upload/zip", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) + public void upload(@RequestPart MultipartFile zipFilie) throws IOException { + zipUtils.processZip(zipFilie.getInputStream()); + } } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java index 1a8f9052..d4982dcb 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java @@ -1,5 +1,6 @@ package com.kamco.cd.kamcoback.postgres.core; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity; import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository; import java.util.List; @@ -13,22 +14,32 @@ public class InferenceResultShpCoreService { private final InferenceResultRepository repo; - /** inference_results -> (inference, geom) upsert */ + /** + * inference_results 기준으로 - tb_map_sheet_anal_data_inference - + * tb_map_sheet_anal_data_inference_geom 테이블을 최신 상태로 구성한다. + */ @Transactional - public void buildInferenceData() { - repo.upsertGroupsFromInferenceResults(); - repo.upsertGeomsFromInferenceResults(); + public InferenceResultShpDto.InferenceCntDto buildInferenceData() { + int inferenceCnt = repo.upsertGroupsFromInferenceResults(); + int inferenceGeomCnt = repo.upsertGeomsFromInferenceResults(); + + InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto(); + cntDto.setInferenceCnt(inferenceCnt); + cntDto.setInferenceGeomCnt(inferenceGeomCnt); + + return cntDto; } - /** file_created_yn = false/null 인 data_uid 목록 */ + /** 파일 생성이 완료되지 않은 분석 데이터(data_uid) 목록을 조회한다. */ @Transactional(readOnly = true) public List findPendingDataUids(int limit) { return repo.findPendingDataUids(limit); } /** - * 재생성 시작: 덮어쓰기 기준(무조건 처음부터) - inference.file_created_yn = false, file_created_dttm = null - - * geom(file_created_yn) 전부 false 리셋 + * 분석 데이터 재생성을 위해 기존 파일 생성 상태를 초기화한다. + * + *

- 분석 데이터(file_created_yn)를 미생성 상태로 변경 - 해당 분석 데이터에 속한 모든 도형의 생성 상태를 미생성으로 변경 */ @Transactional public void resetForRegenerate(Long dataUid) { @@ -36,13 +47,25 @@ public class InferenceResultShpCoreService { repo.resetGeomCreatedByDataUid(dataUid); } - /** 생성 대상 geom 엔티티 로드 (file_created_yn=false/null + geom not null) */ + /** + * 지정된 분석 데이터에 속한 도형 정보를 조회한다. + * + *

- 파일 미생성 상태의 도형만 대상 - geometry가 존재하는 도형만 조회 + */ @Transactional(readOnly = true) - public List loadGeomEntities(Long dataUid, int limit) { - return repo.findGeomEntitiesByDataUid(dataUid, limit); + public List loadGeomDtos(Long dataUid, int limit) { + List entities = + repo.findGeomEntitiesByDataUid(dataUid, limit); + + return entities.stream().map(InferenceResultShpDto.Basic::from).toList(); } - /** 성공 마킹: - 성공 geo_uid만 geom.file_created_yn=true - inference.file_created_yn=true */ + /** + * 파일 생성이 성공한 도형 및 분석 데이터에 대해 생성 완료 상태로 갱신한다. + * + * @param dataUid 분석 데이터 UID + * @param geoUids 파일 생성이 완료된 도형 UID 목록 + */ @Transactional public void markSuccess(Long dataUid, List geoUids) { repo.markGeomCreatedByGeoUids(geoUids); diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java index 05c085de..ae083e77 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java @@ -19,114 +19,131 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC @PersistenceContext private final EntityManager em; - private final QMapSheetAnalDataInferenceEntity i = + /** tb_map_sheet_anal_data_inference */ + private final QMapSheetAnalDataInferenceEntity inferenceEntity = QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity; - private final QMapSheetAnalDataInferenceGeomEntity g = + /** tb_map_sheet_anal_data_inference_geom */ + private final QMapSheetAnalDataInferenceGeomEntity inferenceGeomEntity = QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity; // =============================== // Upsert (Native only) // =============================== + /** + * inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를 + * 생성/갱신한다. + * + *

- 최초 생성 시 file_created_yn = false - detecting_cnt는 inference_results 건수 기준 + * + * @return 반영된 행 수 + */ @Override public int upsertGroupsFromInferenceResults() { String sql = """ - INSERT INTO tb_map_sheet_anal_data_inference ( - stage, - compare_yyyy, - target_yyyy, - map_sheet_num, - created_dttm, - updated_dttm, - file_created_yn, - detecting_cnt - ) - SELECT - r.stage, - r.input1 AS compare_yyyy, - r.input2 AS target_yyyy, - r.map_id AS map_sheet_num, - now() AS created_dttm, - now() AS updated_dttm, - false AS file_created_yn, - count(*) AS detecting_cnt - FROM inference_results r - GROUP BY r.stage, r.input1, r.input2, r.map_id - ON CONFLICT (stage, compare_yyyy, target_yyyy, map_sheet_num) - DO UPDATE SET - updated_dttm = now(), - detecting_cnt = EXCLUDED.detecting_cnt - """; + INSERT INTO tb_map_sheet_anal_data_inference ( + stage, + compare_yyyy, + target_yyyy, + map_sheet_num, + created_dttm, + updated_dttm, + file_created_yn, + detecting_cnt + ) + SELECT + r.stage, + r.input1 AS compare_yyyy, + r.input2 AS target_yyyy, + r.map_id AS map_sheet_num, + now() AS created_dttm, + now() AS updated_dttm, + false AS file_created_yn, + count(*) AS detecting_cnt + FROM inference_results r + GROUP BY r.stage, r.input1, r.input2, r.map_id + ON CONFLICT (stage, compare_yyyy, target_yyyy, map_sheet_num) + DO UPDATE SET + updated_dttm = now(), + detecting_cnt = EXCLUDED.detecting_cnt + """; return em.createNativeQuery(sql).executeUpdate(); } + /** + * inference_results 테이블을 기준으로 도형 단위(uuid) 분석 결과를 생성/갱신한다. + * + *

- uuid 기준 중복 제거(DISTINCT ON) - 최신 updated_dttm 우선 - geometry는 WKB / WKT 모두 처리 - 최초 생성 시 + * file_created_yn = false + * + * @return 반영된 행 수 + */ @Override public int upsertGeomsFromInferenceResults() { - // class_after_prob 컬럼 매핑 오타 주의(여기 제대로 넣음) String sql = """ - INSERT INTO tb_map_sheet_anal_data_inference_geom ( - uuid, stage, cd_prob, compare_yyyy, target_yyyy, map_sheet_num, - class_before_cd, class_before_prob, class_after_cd, class_after_prob, - geom, area, data_uid, created_dttm, updated_dttm, - file_created_yn - ) - SELECT - x.uuid, x.stage, x.cd_prob, x.compare_yyyy, x.target_yyyy, x.map_sheet_num, - x.class_before_cd, x.class_before_prob, x.class_after_cd, x.class_after_prob, - x.geom, x.area, x.data_uid, x.created_dttm, x.updated_dttm, - false AS file_created_yn - FROM ( - SELECT DISTINCT ON (r.uuid) - r.uuid, - r.stage, - r.cd_prob, - r.input1 AS compare_yyyy, - r.input2 AS target_yyyy, - r.map_id AS map_sheet_num, - r.before_class AS class_before_cd, - r.before_probability AS class_before_prob, - r.after_class AS class_after_cd, - r.after_probability AS class_after_prob, - CASE - WHEN r.geometry IS NULL THEN NULL - WHEN left(r.geometry, 2) = '01' - THEN ST_SetSRID(ST_GeomFromWKB(decode(r.geometry, 'hex')), 5186) - ELSE ST_GeomFromText(r.geometry, 5186) - END AS geom, - r.area, - di.data_uid, - r.created_dttm, - r.updated_dttm - FROM inference_results r - JOIN tb_map_sheet_anal_data_inference di - ON di.stage = r.stage - AND di.compare_yyyy = r.input1 - AND di.target_yyyy = r.input2 - AND di.map_sheet_num = r.map_id - ORDER BY r.uuid, r.updated_dttm DESC NULLS LAST, r.uid DESC - ) x - ON CONFLICT (uuid) - DO UPDATE SET - stage = EXCLUDED.stage, - cd_prob = EXCLUDED.cd_prob, - compare_yyyy = EXCLUDED.compare_yyyy, - target_yyyy = EXCLUDED.target_yyyy, - map_sheet_num = EXCLUDED.map_sheet_num, - class_before_cd = EXCLUDED.class_before_cd, - class_before_prob = EXCLUDED.class_before_prob, - class_after_cd = EXCLUDED.class_after_cd, - class_after_prob = EXCLUDED.class_after_prob, - geom = EXCLUDED.geom, - area = EXCLUDED.area, - data_uid = EXCLUDED.data_uid, - updated_dttm = now() - """; + INSERT INTO tb_map_sheet_anal_data_inference_geom ( + uuid, stage, cd_prob, compare_yyyy, target_yyyy, map_sheet_num, + class_before_cd, class_before_prob, class_after_cd, class_after_prob, + geom, area, data_uid, created_dttm, updated_dttm, + file_created_yn + ) + SELECT + x.uuid, x.stage, x.cd_prob, x.compare_yyyy, x.target_yyyy, x.map_sheet_num, + x.class_before_cd, x.class_before_prob, x.class_after_cd, x.class_after_prob, + x.geom, x.area, x.data_uid, x.created_dttm, x.updated_dttm, + false AS file_created_yn + FROM ( + SELECT DISTINCT ON (r.uuid) + r.uuid, + r.stage, + r.cd_prob, + r.input1 AS compare_yyyy, + r.input2 AS target_yyyy, + r.map_id AS map_sheet_num, + r.before_class AS class_before_cd, + r.before_probability AS class_before_prob, + r.after_class AS class_after_cd, + r.after_probability AS class_after_prob, + CASE + WHEN r.geometry IS NULL THEN NULL + WHEN left(r.geometry, 2) = '01' + THEN ST_SetSRID(ST_GeomFromWKB(decode(r.geometry, 'hex')), 5186) + ELSE ST_GeomFromText(r.geometry, 5186) + END AS geom, + r.area, + di.data_uid, + r.created_dttm, + r.updated_dttm + FROM inference_results r + JOIN tb_map_sheet_anal_data_inference di + ON di.stage = r.stage + AND di.compare_yyyy = r.input1 + AND di.target_yyyy = r.input2 + AND di.map_sheet_num = r.map_id + ORDER BY r.uuid, r.updated_dttm DESC NULLS LAST, r.uid DESC + ) x + ON CONFLICT (uuid) + DO UPDATE SET + stage = EXCLUDED.stage, + cd_prob = EXCLUDED.cd_prob, + compare_yyyy = EXCLUDED.compare_yyyy, + target_yyyy = EXCLUDED.target_yyyy, + map_sheet_num = EXCLUDED.map_sheet_num, + class_before_cd = EXCLUDED.class_before_cd, + class_before_prob = EXCLUDED.class_before_prob, + class_after_cd = EXCLUDED.class_after_cd, + class_after_prob = EXCLUDED.class_after_prob, + geom = EXCLUDED.geom, + area = EXCLUDED.area, + data_uid = EXCLUDED.data_uid, + updated_dttm = now() + """; return em.createNativeQuery(sql).executeUpdate(); } @@ -135,63 +152,92 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC // Jobs // =============================== + /** + * 파일 생성이 완료되지 않은 분석 데이터(data_uid) 목록을 조회한다. + * + * @param limit 최대 조회 건수 + * @return data_uid 목록 + */ @Override public List findPendingDataUids(int limit) { return queryFactory - .select(i.id) // data_uid - .from(i) - .where(i.fileCreatedYn.isFalse().or(i.fileCreatedYn.isNull())) - .orderBy(i.id.asc()) + .select(inferenceEntity.id) + .from(inferenceEntity) + .where(inferenceEntity.fileCreatedYn.isFalse().or(inferenceEntity.fileCreatedYn.isNull())) + .orderBy(inferenceEntity.id.asc()) .limit(limit) .fetch(); } // =============================== - // Reset / Mark (전부 ZonedDateTime) + // Reset / Mark // =============================== + /** + * 분석 데이터의 파일 생성 상태를 재생성 가능 상태로 초기화한다. + * + *

- file_created_yn = false - file_created_dttm = null + * + * @return 갱신된 행 수 + */ @Override public int resetInferenceCreated(Long dataUid) { ZonedDateTime now = ZonedDateTime.now(); return (int) queryFactory - .update(i) - .set(i.fileCreatedYn, false) - .set(i.fileCreatedDttm, (ZonedDateTime) null) - .set(i.updatedDttm, now) - .where(i.id.eq(dataUid)) + .update(inferenceEntity) + .set(inferenceEntity.fileCreatedYn, false) + .set(inferenceEntity.fileCreatedDttm, (ZonedDateTime) null) + .set(inferenceEntity.updatedDttm, now) + .where(inferenceEntity.id.eq(dataUid)) .execute(); } + /** + * 분석 데이터의 파일 생성 완료 상태를 반영한다. + * + * @return 갱신된 행 수 + */ @Override public int markInferenceCreated(Long dataUid) { ZonedDateTime now = ZonedDateTime.now(); return (int) queryFactory - .update(i) - .set(i.fileCreatedYn, true) - .set(i.fileCreatedDttm, now) - .set(i.updatedDttm, now) - .where(i.id.eq(dataUid)) + .update(inferenceEntity) + .set(inferenceEntity.fileCreatedYn, true) + .set(inferenceEntity.fileCreatedDttm, now) + .set(inferenceEntity.updatedDttm, now) + .where(inferenceEntity.id.eq(dataUid)) .execute(); } + /** + * 분석 데이터에 속한 모든 도형의 파일 생성 상태를 초기화한다. + * + * @return 갱신된 행 수 + */ @Override public int resetGeomCreatedByDataUid(Long dataUid) { ZonedDateTime now = ZonedDateTime.now(); return (int) queryFactory - .update(g) - .set(g.fileCreatedYn, false) - .set(g.fileCreatedDttm, (ZonedDateTime) null) - .set(g.updatedDttm, now) // ✅ 엔티티/Q가 ZonedDateTime이면 정상 - .where(g.dataUid.eq(dataUid)) + .update(inferenceGeomEntity) + .set(inferenceGeomEntity.fileCreatedYn, false) + .set(inferenceGeomEntity.fileCreatedDttm, (ZonedDateTime) null) + .set(inferenceGeomEntity.updatedDttm, now) + .where(inferenceGeomEntity.dataUid.eq(dataUid)) .execute(); } + /** + * 파일 생성이 완료된 도형(geo_uid)을 생성 완료 상태로 반영한다. + * + * @param geoUids 생성 완료된 도형 UID 목록 + * @return 갱신된 행 수 + */ @Override public int markGeomCreatedByGeoUids(List geoUids) { if (geoUids == null || geoUids.isEmpty()) { @@ -202,11 +248,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC return (int) queryFactory - .update(g) - .set(g.fileCreatedYn, true) - .set(g.fileCreatedDttm, now) - .set(g.updatedDttm, now) - .where(g.geoUid.in(geoUids)) + .update(inferenceGeomEntity) + .set(inferenceGeomEntity.fileCreatedYn, true) + .set(inferenceGeomEntity.fileCreatedDttm, now) + .set(inferenceGeomEntity.updatedDttm, now) + .where(inferenceGeomEntity.geoUid.in(geoUids)) .execute(); } @@ -214,16 +260,24 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC // Export source (Entity only) // =============================== + /** + * SHP / GeoJSON 파일 생성을 위한 도형 데이터 조회 + * + *

- 특정 분석 데이터(data_uid)에 속한 도형 - geometry 존재 - 파일 미생성 상태만 대상 + */ @Override public List findGeomEntitiesByDataUid( Long dataUid, int limit) { return queryFactory - .selectFrom(g) + .selectFrom(inferenceGeomEntity) .where( - g.dataUid.eq(dataUid), - g.geom.isNotNull(), - g.fileCreatedYn.isFalse().or(g.fileCreatedYn.isNull())) - .orderBy(g.geoUid.asc()) + inferenceGeomEntity.dataUid.eq(dataUid), + inferenceGeomEntity.geom.isNotNull(), + inferenceGeomEntity + .fileCreatedYn + .isFalse() + .or(inferenceGeomEntity.fileCreatedYn.isNull())) + .orderBy(inferenceGeomEntity.geoUid.asc()) .limit(limit) .fetch(); } diff --git a/src/main/resources/db/migration/dump-kamco_cds-202512231534.tar b/src/main/resources/db/migration/dump-kamco_cds-202512261634.tar similarity index 76% rename from src/main/resources/db/migration/dump-kamco_cds-202512231534.tar rename to src/main/resources/db/migration/dump-kamco_cds-202512261634.tar index 5396379a..e7818316 100644 Binary files a/src/main/resources/db/migration/dump-kamco_cds-202512231534.tar and b/src/main/resources/db/migration/dump-kamco_cds-202512261634.tar differ