From 07714c5d14e6d1886ef102fc69cad6d88ce0fcde Mon Sep 17 00:00:00 2001 From: teddy Date: Mon, 29 Dec 2025 10:17:09 +0900 Subject: [PATCH 1/6] =?UTF-8?q?shp=20=ED=8C=8C=EC=9D=BC=20=EC=83=9D?= =?UTF-8?q?=EC=84=B1=20=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../inference/service/GeoToolsShpWriter.java | 40 ++++++++++++------- .../service/InferenceResultShpService.java | 15 ++++--- .../inference/service/ShpWriter.java | 2 +- 3 files changed, 35 insertions(+), 22 deletions(-) diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java index 4a0492d6..4588ed0e 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java @@ -55,7 +55,7 @@ public class GeoToolsShpWriter implements ShpWriter { *

- geometry 타입은 첫 번째 유효 geometry 기준으로 스키마를 생성한다. - 좌표계는 EPSG:5186으로 설정하며, .prj 파일을 직접 생성한다. * * @param shpBasePath 확장자를 제외한 SHP 파일 기본 경로 - * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 + * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 * @return 이번 호출로 write(생성/덮어쓰기)가 수행된 파일 개수 */ @Override @@ -123,11 +123,10 @@ public class GeoToolsShpWriter implements ShpWriter { * 생성된다. - geometry는 GeoTools GeometryJSON을 사용하여 직렬화한다. * *

GeoJSON 구조 예: { "type": "FeatureCollection", "name": "stage_input1_input2_mapId", "crs": { - * "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::5186" } }, "properties": { ... - * }, "features": [ ... ] } + * "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::5186" } }, "properties": { ... }, "features": [ ... ] } * * @param geoJsonPath 생성할 GeoJSON 파일의 전체 경로 (.geojson 포함) - * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 + * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 * @return 이번 호출로 write(생성/덮어쓰기)가 수행된 파일 개수 */ @Override @@ -153,9 +152,9 @@ public class GeoToolsShpWriter implements ShpWriter { // name: stage_input1_input2_mapId String name = - String.format( - "%d_%d_%d_%d", - first.getStage(), first.getInput1(), first.getInput2(), first.getMapId()); + String.format( + "%d_%d_%d_%d", + first.getStage(), first.getInput1(), first.getInput2(), first.getMapId()); root.put("name", name); // CRS (EPSG:5186) @@ -226,7 +225,7 @@ public class GeoToolsShpWriter implements ShpWriter { // 파일 쓰기 try (OutputStreamWriter w = - new OutputStreamWriter(new FileOutputStream(geoJsonFile), GEOJSON_CHARSET)) { + new OutputStreamWriter(new FileOutputStream(geoJsonFile), GEOJSON_CHARSET)) { om.writerWithDefaultPrettyPrinter().writeValue(w, root); } @@ -250,7 +249,7 @@ public class GeoToolsShpWriter implements ShpWriter { } private SimpleFeatureType createSchema( - Class geomType, CoordinateReferenceSystem crs) { + Class geomType, CoordinateReferenceSystem crs) { SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder(); b.setName("inference_result"); b.setCRS(crs); @@ -273,14 +272,25 @@ public class GeoToolsShpWriter implements ShpWriter { return b.buildFeatureType(); } + /** + * .shp .shx .dbf .fix 파일 생성 (껍데기 생성) + * + * @param shpFile + * @param schema + * @return + * @throws Exception + */ private ShapefileDataStore createDataStore(File shpFile, SimpleFeatureType schema) - throws Exception { + throws Exception { Map params = new HashMap<>(); params.put("url", shpFile.toURI().toURL()); - params.put("create spatial index", Boolean.TRUE); + + // .fix 파일 생성 Boolean.TRUE, 미생성 Boolean.FALSE + params.put("create spatial index", Boolean.FALSE); ShapefileDataStoreFactory factory = new ShapefileDataStoreFactory(); + ShapefileDataStore dataStore = (ShapefileDataStore) factory.createNewDataStore(params); dataStore.setCharset(DBF_CHARSET); @@ -290,7 +300,7 @@ public class GeoToolsShpWriter implements ShpWriter { } private DefaultFeatureCollection buildFeatureCollection( - SimpleFeatureType schema, List rows) { + SimpleFeatureType schema, List rows) { DefaultFeatureCollection collection = new DefaultFeatureCollection(); SimpleFeatureBuilder builder = new SimpleFeatureBuilder(schema); @@ -308,10 +318,10 @@ public class GeoToolsShpWriter implements ShpWriter { builder.add(dto.getArea() != null ? dto.getArea().doubleValue() : null); builder.add(dto.getBeforeClass()); builder.add( - dto.getBeforeProbability() != null ? dto.getBeforeProbability().doubleValue() : null); + dto.getBeforeProbability() != null ? dto.getBeforeProbability().doubleValue() : null); builder.add(dto.getAfterClass()); builder.add( - dto.getAfterProbability() != null ? dto.getAfterProbability().doubleValue() : null); + dto.getAfterProbability() != null ? dto.getAfterProbability().doubleValue() : null); SimpleFeature feature = builder.buildFeature(null); collection.add(feature); @@ -322,7 +332,7 @@ public class GeoToolsShpWriter implements ShpWriter { } private void writeFeatures(ShapefileDataStore dataStore, DefaultFeatureCollection collection) - throws Exception { + throws Exception { String typeName = dataStore.getTypeNames()[0]; SimpleFeatureSource featureSource = dataStore.getFeatureSource(typeName); diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java index 5d795feb..3d977c77 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java @@ -16,7 +16,9 @@ public class InferenceResultShpService { private final InferenceResultShpCoreService coreService; private final ShpWriter shpWriter; - /** inference_results 테이블을 기준으로 분석 결과 테이블과 도형 테이블을 최신 상태로 반영한다. */ + /** + * inference_results 테이블을 기준으로 분석 결과 테이블과 도형 테이블을 최신 상태로 반영한다. + */ @Transactional public InferenceResultShpDto.InferenceCntDto saveInferenceResultData() { return coreService.buildInferenceData(); @@ -33,6 +35,7 @@ public class InferenceResultShpService { @Transactional public InferenceResultShpDto.FileCntDto createShpFile() { + // TODO 파일 경로는 정해지면 수정, properties 사용 String baseDir = System.getProperty("user.home") + "/export"; int batchSize = 100; @@ -47,7 +50,7 @@ public class InferenceResultShpService { // 재생성을 위한 생성 상태 초기화 coreService.resetForRegenerate(dataUid); - // 도형 데이터 조회 + // 추론 데이터 조회 List dtoList = coreService.loadGeomDtos(dataUid, geomLimit); if (dtoList.isEmpty()) { continue; @@ -56,15 +59,15 @@ public class InferenceResultShpService { // 파일명 생성 (stage_mapSheet_compare_target) InferenceResultShpDto.Basic first = dtoList.get(0); String baseName = - String.format( - "%d_%d_%d_%d", - first.getStage(), first.getMapId(), first.getInput1(), first.getInput2()); + String.format( + "%d_%d_%d_%d", + first.getStage(), first.getMapId(), first.getInput1(), first.getInput2()); String shpBasePath = baseDir + "/shp/" + baseName; String geoJsonPath = baseDir + "/geojson/" + baseName + ".geojson"; try { - // 폴더 안 파일을 세지 않고, Writer가 "이번 호출에서 write한 개수"를 반환 + // Writer가 "이번 호출에서 write한 개수"를 반환 total = total.plus(shpWriter.writeShp(shpBasePath, dtoList)); total = total.plus(shpWriter.writeGeoJson(geoJsonPath, dtoList)); diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/ShpWriter.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/ShpWriter.java index 01f6cf69..84a47249 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/ShpWriter.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/ShpWriter.java @@ -6,7 +6,7 @@ import java.util.List; public interface ShpWriter { - // SHP (.shp/.shx/.dbf) + // SHP (.shp/.shx/.dbf/.fix) WriteCnt writeShp(String shpBasePath, List rows); // GeoJSON (.geojson) From 624053915892eb6e1864f380fa8438403e8fb0d4 Mon Sep 17 00:00:00 2001 From: teddy Date: Mon, 29 Dec 2025 10:43:15 +0900 Subject: [PATCH 2/6] =?UTF-8?q?shp=20=ED=8C=8C=EC=9D=BC=20=EC=83=9D?= =?UTF-8?q?=EC=84=B1=20=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../inference/service/GeoToolsShpWriter.java | 27 ++++++++++--------- .../service/InferenceResultShpService.java | 11 ++++---- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java index 4588ed0e..5d43f91d 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java @@ -55,7 +55,7 @@ public class GeoToolsShpWriter implements ShpWriter { *

- geometry 타입은 첫 번째 유효 geometry 기준으로 스키마를 생성한다. - 좌표계는 EPSG:5186으로 설정하며, .prj 파일을 직접 생성한다. * * @param shpBasePath 확장자를 제외한 SHP 파일 기본 경로 - * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 + * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 * @return 이번 호출로 write(생성/덮어쓰기)가 수행된 파일 개수 */ @Override @@ -123,10 +123,11 @@ public class GeoToolsShpWriter implements ShpWriter { * 생성된다. - geometry는 GeoTools GeometryJSON을 사용하여 직렬화한다. * *

GeoJSON 구조 예: { "type": "FeatureCollection", "name": "stage_input1_input2_mapId", "crs": { - * "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::5186" } }, "properties": { ... }, "features": [ ... ] } + * "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::5186" } }, "properties": { ... + * }, "features": [ ... ] } * * @param geoJsonPath 생성할 GeoJSON 파일의 전체 경로 (.geojson 포함) - * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 + * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 * @return 이번 호출로 write(생성/덮어쓰기)가 수행된 파일 개수 */ @Override @@ -152,9 +153,9 @@ public class GeoToolsShpWriter implements ShpWriter { // name: stage_input1_input2_mapId String name = - String.format( - "%d_%d_%d_%d", - first.getStage(), first.getInput1(), first.getInput2(), first.getMapId()); + String.format( + "%d_%d_%d_%d", + first.getStage(), first.getInput1(), first.getInput2(), first.getMapId()); root.put("name", name); // CRS (EPSG:5186) @@ -225,7 +226,7 @@ public class GeoToolsShpWriter implements ShpWriter { // 파일 쓰기 try (OutputStreamWriter w = - new OutputStreamWriter(new FileOutputStream(geoJsonFile), GEOJSON_CHARSET)) { + new OutputStreamWriter(new FileOutputStream(geoJsonFile), GEOJSON_CHARSET)) { om.writerWithDefaultPrettyPrinter().writeValue(w, root); } @@ -249,7 +250,7 @@ public class GeoToolsShpWriter implements ShpWriter { } private SimpleFeatureType createSchema( - Class geomType, CoordinateReferenceSystem crs) { + Class geomType, CoordinateReferenceSystem crs) { SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder(); b.setName("inference_result"); b.setCRS(crs); @@ -281,7 +282,7 @@ public class GeoToolsShpWriter implements ShpWriter { * @throws Exception */ private ShapefileDataStore createDataStore(File shpFile, SimpleFeatureType schema) - throws Exception { + throws Exception { Map params = new HashMap<>(); params.put("url", shpFile.toURI().toURL()); @@ -300,7 +301,7 @@ public class GeoToolsShpWriter implements ShpWriter { } private DefaultFeatureCollection buildFeatureCollection( - SimpleFeatureType schema, List rows) { + SimpleFeatureType schema, List rows) { DefaultFeatureCollection collection = new DefaultFeatureCollection(); SimpleFeatureBuilder builder = new SimpleFeatureBuilder(schema); @@ -318,10 +319,10 @@ public class GeoToolsShpWriter implements ShpWriter { builder.add(dto.getArea() != null ? dto.getArea().doubleValue() : null); builder.add(dto.getBeforeClass()); builder.add( - dto.getBeforeProbability() != null ? dto.getBeforeProbability().doubleValue() : null); + dto.getBeforeProbability() != null ? dto.getBeforeProbability().doubleValue() : null); builder.add(dto.getAfterClass()); builder.add( - dto.getAfterProbability() != null ? dto.getAfterProbability().doubleValue() : null); + dto.getAfterProbability() != null ? dto.getAfterProbability().doubleValue() : null); SimpleFeature feature = builder.buildFeature(null); collection.add(feature); @@ -332,7 +333,7 @@ public class GeoToolsShpWriter implements ShpWriter { } private void writeFeatures(ShapefileDataStore dataStore, DefaultFeatureCollection collection) - throws Exception { + throws Exception { String typeName = dataStore.getTypeNames()[0]; SimpleFeatureSource featureSource = dataStore.getFeatureSource(typeName); diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java index 3d977c77..e257edeb 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java @@ -16,9 +16,7 @@ public class InferenceResultShpService { private final InferenceResultShpCoreService coreService; private final ShpWriter shpWriter; - /** - * inference_results 테이블을 기준으로 분석 결과 테이블과 도형 테이블을 최신 상태로 반영한다. - */ + /** inference_results 테이블을 기준으로 분석 결과 테이블과 도형 테이블을 최신 상태로 반영한다. */ @Transactional public InferenceResultShpDto.InferenceCntDto saveInferenceResultData() { return coreService.buildInferenceData(); @@ -38,6 +36,7 @@ public class InferenceResultShpService { // TODO 파일 경로는 정해지면 수정, properties 사용 String baseDir = System.getProperty("user.home") + "/export"; + // TODO 배치 실행으로 변경 필요 int batchSize = 100; int geomLimit = 500_000; @@ -59,9 +58,9 @@ public class InferenceResultShpService { // 파일명 생성 (stage_mapSheet_compare_target) InferenceResultShpDto.Basic first = dtoList.get(0); String baseName = - String.format( - "%d_%d_%d_%d", - first.getStage(), first.getMapId(), first.getInput1(), first.getInput2()); + String.format( + "%d_%d_%d_%d", + first.getStage(), first.getMapId(), first.getInput1(), first.getInput2()); String shpBasePath = baseDir + "/shp/" + baseName; String geoJsonPath = baseDir + "/geojson/" + baseName + ".geojson"; From 6343a5a7815f4dca218c314530a14de6f0fcfe93 Mon Sep 17 00:00:00 2001 From: "gayoun.park" Date: Mon, 29 Dec 2025 11:09:34 +0900 Subject: [PATCH 3/6] =?UTF-8?q?=EB=8F=84=EC=97=BD=EA=B4=80=EB=A6=AC=20:=20?= =?UTF-8?q?=EC=9E=90=EB=8F=99=2050k=20=EB=93=B1=EB=A1=9D=20=EB=A1=9C?= =?UTF-8?q?=EC=A7=81=20->=20exception=EC=9C=BC=EB=A1=9C=20=EC=88=98?= =?UTF-8?q?=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../postgres/core/MapInkxMngCoreService.java | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/MapInkxMngCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/MapInkxMngCoreService.java index 5a7a69fd..cdcba289 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/MapInkxMngCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/MapInkxMngCoreService.java @@ -2,7 +2,6 @@ package com.kamco.cd.kamcoback.postgres.core; import com.kamco.cd.kamcoback.config.api.ApiResponseDto.ApiResponseCode; import com.kamco.cd.kamcoback.config.api.ApiResponseDto.ResponseObj; -import com.kamco.cd.kamcoback.postgres.entity.MapInkx50kEntity; import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity; import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx50kRepository; import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository; @@ -39,21 +38,13 @@ public class MapInkxMngCoreService { } Integer fid50k = mapInkx50kRepository.findByMapidCdParentNo(req.getMapidcdNo()); - if (fid50k == null || fid50k <= 0) { - // parent도 등록 - MapInkx50kEntity parent = - new MapInkx50kEntity(req.getMapidcdNo().substring(0, 5), req.getMapidNm(), "", null); - MapInkx50kEntity result = mapInkx50kRepository.save(parent); - fid50k = result.getFid(); + if (fid50k == null) { + return new ResponseObj(ApiResponseCode.NOT_FOUND_DATA, "1:50,000 도엽의 정보가 없습니다. 관리자에게 문의하세요."); } MapInkx5kEntity entity = new MapInkx5kEntity( - req.getMapidcdNo(), - req.getMapidNm(), - map_polygon, - fid50k == null ? null : fid50k.longValue(), - "USE" // 기본은 USE로 + req.getMapidcdNo(), req.getMapidNm(), map_polygon, fid50k.longValue(), "USE" // 기본은 USE로 ); mapInkx5kRepository.save(entity); From f48c444f6cace84c572e817a5d47cee4449a2400 Mon Sep 17 00:00:00 2001 From: teddy Date: Mon, 29 Dec 2025 14:10:22 +0900 Subject: [PATCH 4/6] =?UTF-8?q?shp=20=ED=8C=8C=EC=9D=BC=20=EC=83=9D?= =?UTF-8?q?=EC=84=B1=20=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../inference/dto/InferenceResultShpDto.java | 3 +++ .../core/InferenceResultShpCoreService.java | 2 ++ .../InferenceResultRepositoryCustom.java | 2 ++ .../InferenceResultRepositoryImpl.java | 27 +++++++++++++++++++ 4 files changed, 34 insertions(+) diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java index 96de4301..d0feb315 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java @@ -71,6 +71,9 @@ public class InferenceResultShpDto { @NoArgsConstructor public static class InferenceCntDto { + @Schema(description = "추론 결과(inference_results)를 기준으로 신규 목록 저장 터이터 건수", example = "120") + int sheetAnalDataCnt; + @Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 데이터 건수", example = "120") int inferenceCnt; diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java index d4982dcb..53f41d14 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java @@ -20,10 +20,12 @@ public class InferenceResultShpCoreService { */ @Transactional public InferenceResultShpDto.InferenceCntDto buildInferenceData() { + int sheetAnalDataCnt = repo.upsertGroupsFromMapSheetAnal(); int inferenceCnt = repo.upsertGroupsFromInferenceResults(); int inferenceGeomCnt = repo.upsertGeomsFromInferenceResults(); InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto(); + cntDto.setSheetAnalDataCnt(sheetAnalDataCnt); cntDto.setInferenceCnt(inferenceCnt); cntDto.setInferenceGeomCnt(inferenceGeomCnt); diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java index 3808d7d9..41b72849 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java @@ -5,6 +5,8 @@ import java.util.List; public interface InferenceResultRepositoryCustom { + int upsertGroupsFromMapSheetAnal(); + int upsertGroupsFromInferenceResults(); int upsertGeomsFromInferenceResults(); diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java index ae083e77..a1d7afb6 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java @@ -31,6 +31,33 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC // Upsert (Native only) // =============================== + @Override + public int upsertGroupsFromMapSheetAnal() { + String sql = + """ + INSERT INTO tb_map_sheet_anal_inference ( + compare_yyyy, + target_yyyy, + anal_map_sheet, + stage, + anal_title + ) + SELECT + r.input1 AS compare_yyyy, + r.input2 AS target_yyyy, + r.map_id AS anal_map_sheet, + r.stage, + CONCAT(r.stage ,'_', r.input1 ,'_', r.input2 ,'_', r.map_id) as anal_title + FROM inference_results r + GROUP BY r.stage, r.input1, r.input2, r.map_id + ON CONFLICT (compare_yyyy, target_yyyy, anal_map_sheet, stage) + DO UPDATE SET + updated_dttm = now() + """; + + return em.createNativeQuery(sql).executeUpdate(); + } + /** * inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를 * 생성/갱신한다. From 424e14f36ea2f3067a4a184127bfb33abcfa4a27 Mon Sep 17 00:00:00 2001 From: teddy Date: Mon, 29 Dec 2025 14:53:27 +0900 Subject: [PATCH 5/6] =?UTF-8?q?=EC=8A=A4=EC=9B=A8=EA=B1=B0=20=EC=84=A4?= =?UTF-8?q?=EC=A0=95=20=EB=B3=80=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../cd/kamcoback/config/OpenApiConfig.java | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/src/main/java/com/kamco/cd/kamcoback/config/OpenApiConfig.java b/src/main/java/com/kamco/cd/kamcoback/config/OpenApiConfig.java index 8e5051ac..f8b1653b 100644 --- a/src/main/java/com/kamco/cd/kamcoback/config/OpenApiConfig.java +++ b/src/main/java/com/kamco/cd/kamcoback/config/OpenApiConfig.java @@ -46,11 +46,18 @@ public class OpenApiConfig { // profile 별 server url 분기 List servers = new ArrayList<>(); - switch (profile) { - case "prod" -> servers.add(new Server().url(prodUrl).description("운영 서버")); - case "dev" -> servers.add(new Server().url(devUrl).description("개발 서버")); - default -> - servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 개발 서버")); + if ("dev".equals(profile)) { + servers.add(new Server().url(devUrl).description("개발 서버")); + servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버")); + // servers.add(new Server().url(prodUrl).description("운영 서버")); + } else if ("prod".equals(profile)) { + // servers.add(new Server().url(prodUrl).description("운영 서버")); + servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버")); + servers.add(new Server().url(devUrl).description("개발 서버")); + } else { + servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버")); + servers.add(new Server().url(devUrl).description("개발 서버")); + // servers.add(new Server().url(prodUrl).description("운영 서버")); } return new OpenAPI() From 78f988411c9c2b8008039b97e5516f994c296ad3 Mon Sep 17 00:00:00 2001 From: teddy Date: Mon, 29 Dec 2025 15:42:35 +0900 Subject: [PATCH 6/6] =?UTF-8?q?enum=20=EC=BD=94=EB=93=9C=20=EC=A1=B0?= =?UTF-8?q?=ED=9A=8C=20=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../kamco/cd/kamcoback/common/enums/SyncStateType.java | 3 +++ .../cd/kamcoback/common/utils/enums/CodeHidden.java | 10 ++++++++++ .../kamco/cd/kamcoback/common/utils/enums/Enums.java | 9 +++++++++ 3 files changed, 22 insertions(+) create mode 100644 src/main/java/com/kamco/cd/kamcoback/common/utils/enums/CodeHidden.java diff --git a/src/main/java/com/kamco/cd/kamcoback/common/enums/SyncStateType.java b/src/main/java/com/kamco/cd/kamcoback/common/enums/SyncStateType.java index fc06e79b..f5c9d08f 100644 --- a/src/main/java/com/kamco/cd/kamcoback/common/enums/SyncStateType.java +++ b/src/main/java/com/kamco/cd/kamcoback/common/enums/SyncStateType.java @@ -1,6 +1,7 @@ package com.kamco.cd.kamcoback.common.enums; import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose; +import com.kamco.cd.kamcoback.common.utils.enums.CodeHidden; import com.kamco.cd.kamcoback.common.utils.enums.EnumType; import lombok.AllArgsConstructor; import lombok.Getter; @@ -9,11 +10,13 @@ import lombok.Getter; @Getter @AllArgsConstructor public enum SyncStateType implements EnumType { + @CodeHidden NOTYET("미처리"), NOFILE("파일없음"), NOTPAIR("페어파일누락"), DUPLICATE("파일중복"), TYPEERROR("손상파일"), + @CodeHidden DONE("완료"); private final String desc; diff --git a/src/main/java/com/kamco/cd/kamcoback/common/utils/enums/CodeHidden.java b/src/main/java/com/kamco/cd/kamcoback/common/utils/enums/CodeHidden.java new file mode 100644 index 00000000..ad320cdd --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/common/utils/enums/CodeHidden.java @@ -0,0 +1,10 @@ +package com.kamco.cd.kamcoback.common.utils.enums; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +public @interface CodeHidden {} diff --git a/src/main/java/com/kamco/cd/kamcoback/common/utils/enums/Enums.java b/src/main/java/com/kamco/cd/kamcoback/common/utils/enums/Enums.java index 21d61599..d8d01ed3 100644 --- a/src/main/java/com/kamco/cd/kamcoback/common/utils/enums/Enums.java +++ b/src/main/java/com/kamco/cd/kamcoback/common/utils/enums/Enums.java @@ -35,10 +35,19 @@ public class Enums { return Arrays.stream(enums) .map(e -> (EnumType) e) + .filter(e -> !isHidden(enumClass, (Enum) e)) .map(e -> new CodeDto(e.getId(), e.getText())) .toList(); } + private static boolean isHidden(Class> enumClass, Enum e) { + try { + return enumClass.getField(e.name()).isAnnotationPresent(CodeHidden.class); + } catch (NoSuchFieldException ex) { + return false; + } + } + /** 특정 타입(enum)만 조회 /codes/{type} -> type = RoleType 같은 값 */ public static List getCodes(String type) { Class> enumClass = exposedEnumMap.get(type);