From 2a60ddfd36ff5c4b01f1ef088b76c634094700b3 Mon Sep 17 00:00:00 2001 From: teddy Date: Fri, 26 Dec 2025 15:43:08 +0900 Subject: [PATCH] =?UTF-8?q?shp=20=ED=8C=8C=EC=9D=BC=20=EC=83=9D=EC=84=B1?= =?UTF-8?q?=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../inference/dto/InferenceResultShpDto.java | 8 +- .../service/InferenceResultShpService.java | 32 +++--- .../core/InferenceResultShpCoreService.java | 19 ++-- .../InferenceResultRepositoryImpl.java | 100 +++++++++--------- 4 files changed, 76 insertions(+), 83 deletions(-) diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java index 16da5e33..9581bab2 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java @@ -19,8 +19,8 @@ public class InferenceResultShpDto { // ===== 그룹 키 ===== private Integer stage; private Long mapId; - private Integer input1; // compare_yyyy - private Integer input2; // target_yyyy + private Integer input1; // compare_yyyy + private Integer input2; // target_yyyy // ===== 추론 결과 ===== private Float cdProb; @@ -35,9 +35,7 @@ public class InferenceResultShpDto { private Geometry geometry; private Float area; - /** - * Entity → DTO 변환 - */ + /** Entity → DTO 변환 */ public static Basic from(MapSheetAnalDataInferenceGeomEntity e) { Basic d = new Basic(); diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java index c1b6e091..78f7c80c 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java @@ -17,16 +17,15 @@ public class InferenceResultShpService { private final InferenceResultShpCoreService coreService; private final ShpWriter shpWriter; - /** - * inference_results -> tb_map_sheet_anal_data_inference / geom 업서트 - */ + /** inference_results -> tb_map_sheet_anal_data_inference / geom 업서트 */ @Transactional public void saveInferenceResultData() { coreService.buildInferenceData(); } /** - * dataUid 단위로 재생성(덮어쓰기) - reset(inference false + geom 전부 false) - geom 엔티티 조회 -> dto 변환 - shp/geojson 생성 - 성공 geo_uid만 true - inference true + * dataUid 단위로 재생성(덮어쓰기) - reset(inference false + geom 전부 false) - geom 엔티티 조회 -> dto 변환 - + * shp/geojson 생성 - 성공 geo_uid만 true - inference true */ @Transactional public void createShpFile() { @@ -34,8 +33,8 @@ public class InferenceResultShpService { // TODO 경로는 설정으로 빼는 게 좋음 String baseDir = System.getProperty("user.home") + "/export"; - int batchSize = 100; // 한번에 처리할 data_uid 개수 - int geomLimit = 500000; // data_uid 당 최대 geom 로딩 수 (메모리/시간 보고 조절) + int batchSize = 100; // 한번에 처리할 data_uid 개수 + int geomLimit = 500000; // data_uid 당 최대 geom 로딩 수 (메모리/시간 보고 조절) List dataUids = coreService.findPendingDataUids(batchSize); @@ -46,7 +45,7 @@ public class InferenceResultShpService { // 2) 생성 대상 조회(엔티티) List entities = - coreService.loadGeomEntities(dataUid, geomLimit); + coreService.loadGeomEntities(dataUid, geomLimit); if (entities.isEmpty()) { // 실패 상태(false 유지) -> 다음 배치에서 다시 덮어쓰기로 시도 @@ -55,19 +54,19 @@ public class InferenceResultShpService { // 3) 엔티티 -> DTO List dtoList = - entities.stream().map(InferenceResultShpDto.Basic::from).toList(); + entities.stream().map(InferenceResultShpDto.Basic::from).toList(); // 4) 파일명: stage_mapSheet_compare_target (첫 row 기준) MapSheetAnalDataInferenceGeomEntity first = entities.get(0); String baseName = - String.format( - "%d_%d_%d_%d", - first.getStage(), - first.getMapSheetNum(), - first.getCompareYyyy(), - first.getTargetYyyy()); + String.format( + "%d_%d_%d_%d", + first.getStage(), + first.getMapSheetNum(), + first.getCompareYyyy(), + first.getTargetYyyy()); - String shpBasePath = baseDir + "/shp/" + baseName; // 확장자 없이 + String shpBasePath = baseDir + "/shp/" + baseName; // 확장자 없이 String geoJsonPath = baseDir + "/geojson/" + baseName + ".geojson"; // 5) 파일 생성 (예외 발생 시 성공 마킹 안 됨 -> 다음에 덮어쓰기 재시도) @@ -75,7 +74,8 @@ public class InferenceResultShpService { shpWriter.writeGeoJson(geoJsonPath, dtoList); // 6) 성공 마킹: geo_uid만 true - List geoUids = entities.stream().map(MapSheetAnalDataInferenceGeomEntity::getGeoUid).toList(); + List geoUids = + entities.stream().map(MapSheetAnalDataInferenceGeomEntity::getGeoUid).toList(); coreService.markSuccess(dataUid, geoUids); } } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java index 4a42189d..1a8f9052 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java @@ -13,25 +13,22 @@ public class InferenceResultShpCoreService { private final InferenceResultRepository repo; - /** - * inference_results -> (inference, geom) upsert - */ + /** inference_results -> (inference, geom) upsert */ @Transactional public void buildInferenceData() { repo.upsertGroupsFromInferenceResults(); repo.upsertGeomsFromInferenceResults(); } - /** - * file_created_yn = false/null 인 data_uid 목록 - */ + /** file_created_yn = false/null 인 data_uid 목록 */ @Transactional(readOnly = true) public List findPendingDataUids(int limit) { return repo.findPendingDataUids(limit); } /** - * 재생성 시작: 덮어쓰기 기준(무조건 처음부터) - inference.file_created_yn = false, file_created_dttm = null - geom(file_created_yn) 전부 false 리셋 + * 재생성 시작: 덮어쓰기 기준(무조건 처음부터) - inference.file_created_yn = false, file_created_dttm = null - + * geom(file_created_yn) 전부 false 리셋 */ @Transactional public void resetForRegenerate(Long dataUid) { @@ -39,17 +36,13 @@ public class InferenceResultShpCoreService { repo.resetGeomCreatedByDataUid(dataUid); } - /** - * 생성 대상 geom 엔티티 로드 (file_created_yn=false/null + geom not null) - */ + /** 생성 대상 geom 엔티티 로드 (file_created_yn=false/null + geom not null) */ @Transactional(readOnly = true) public List loadGeomEntities(Long dataUid, int limit) { return repo.findGeomEntitiesByDataUid(dataUid, limit); } - /** - * 성공 마킹: - 성공 geo_uid만 geom.file_created_yn=true - inference.file_created_yn=true - */ + /** 성공 마킹: - 성공 geo_uid만 geom.file_created_yn=true - inference.file_created_yn=true */ @Transactional public void markSuccess(Long dataUid, List geoUids) { repo.markGeomCreatedByGeoUids(geoUids); diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java index 54175976..05c085de 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java @@ -17,14 +17,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC private final JPAQueryFactory queryFactory; - @PersistenceContext - private final EntityManager em; + @PersistenceContext private final EntityManager em; private final QMapSheetAnalDataInferenceEntity i = - QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity; + QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity; private final QMapSheetAnalDataInferenceGeomEntity g = - QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity; + QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity; // =============================== // Upsert (Native only) @@ -33,7 +32,8 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC @Override public int upsertGroupsFromInferenceResults() { - String sql = """ + String sql = + """ INSERT INTO tb_map_sheet_anal_data_inference ( stage, compare_yyyy, @@ -68,7 +68,8 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC public int upsertGeomsFromInferenceResults() { // class_after_prob 컬럼 매핑 오타 주의(여기 제대로 넣음) - String sql = """ + String sql = + """ INSERT INTO tb_map_sheet_anal_data_inference_geom ( uuid, stage, cd_prob, compare_yyyy, target_yyyy, map_sheet_num, class_before_cd, class_before_prob, class_after_cd, class_after_prob, @@ -137,12 +138,12 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC @Override public List findPendingDataUids(int limit) { return queryFactory - .select(i.id) // data_uid - .from(i) - .where(i.fileCreatedYn.isFalse().or(i.fileCreatedYn.isNull())) - .orderBy(i.id.asc()) - .limit(limit) - .fetch(); + .select(i.id) // data_uid + .from(i) + .where(i.fileCreatedYn.isFalse().or(i.fileCreatedYn.isNull())) + .orderBy(i.id.asc()) + .limit(limit) + .fetch(); } // =============================== @@ -154,13 +155,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC ZonedDateTime now = ZonedDateTime.now(); return (int) - queryFactory - .update(i) - .set(i.fileCreatedYn, false) - .set(i.fileCreatedDttm, (ZonedDateTime) null) - .set(i.updatedDttm, now) - .where(i.id.eq(dataUid)) - .execute(); + queryFactory + .update(i) + .set(i.fileCreatedYn, false) + .set(i.fileCreatedDttm, (ZonedDateTime) null) + .set(i.updatedDttm, now) + .where(i.id.eq(dataUid)) + .execute(); } @Override @@ -168,13 +169,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC ZonedDateTime now = ZonedDateTime.now(); return (int) - queryFactory - .update(i) - .set(i.fileCreatedYn, true) - .set(i.fileCreatedDttm, now) - .set(i.updatedDttm, now) - .where(i.id.eq(dataUid)) - .execute(); + queryFactory + .update(i) + .set(i.fileCreatedYn, true) + .set(i.fileCreatedDttm, now) + .set(i.updatedDttm, now) + .where(i.id.eq(dataUid)) + .execute(); } @Override @@ -182,13 +183,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC ZonedDateTime now = ZonedDateTime.now(); return (int) - queryFactory - .update(g) - .set(g.fileCreatedYn, false) - .set(g.fileCreatedDttm, (ZonedDateTime) null) - .set(g.updatedDttm, now) // ✅ 엔티티/Q가 ZonedDateTime이면 정상 - .where(g.dataUid.eq(dataUid)) - .execute(); + queryFactory + .update(g) + .set(g.fileCreatedYn, false) + .set(g.fileCreatedDttm, (ZonedDateTime) null) + .set(g.updatedDttm, now) // ✅ 엔티티/Q가 ZonedDateTime이면 정상 + .where(g.dataUid.eq(dataUid)) + .execute(); } @Override @@ -200,13 +201,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC ZonedDateTime now = ZonedDateTime.now(); return (int) - queryFactory - .update(g) - .set(g.fileCreatedYn, true) - .set(g.fileCreatedDttm, now) - .set(g.updatedDttm, now) - .where(g.geoUid.in(geoUids)) - .execute(); + queryFactory + .update(g) + .set(g.fileCreatedYn, true) + .set(g.fileCreatedDttm, now) + .set(g.updatedDttm, now) + .where(g.geoUid.in(geoUids)) + .execute(); } // =============================== @@ -214,15 +215,16 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC // =============================== @Override - public List findGeomEntitiesByDataUid(Long dataUid, int limit) { + public List findGeomEntitiesByDataUid( + Long dataUid, int limit) { return queryFactory - .selectFrom(g) - .where( - g.dataUid.eq(dataUid), - g.geom.isNotNull(), - g.fileCreatedYn.isFalse().or(g.fileCreatedYn.isNull())) - .orderBy(g.geoUid.asc()) - .limit(limit) - .fetch(); + .selectFrom(g) + .where( + g.dataUid.eq(dataUid), + g.geom.isNotNull(), + g.fileCreatedYn.isFalse().or(g.fileCreatedYn.isNull())) + .orderBy(g.geoUid.asc()) + .limit(limit) + .fetch(); } }