From d5f6d7e91d68c661285567d7ea8ff05f9cc65a9a Mon Sep 17 00:00:00 2001 From: "gayoun.park" Date: Fri, 26 Dec 2025 09:40:14 +0900 Subject: [PATCH 1/3] =?UTF-8?q?=EB=AA=A8=EB=8D=B8=EA=B4=80=EB=A6=AC=20?= =?UTF-8?q?=EB=AA=A9=EB=A1=9D=20DTO=20=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java b/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java index 31005da6..ea9da363 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java @@ -93,6 +93,7 @@ public class ModelMngDto { @Getter @Setter @NoArgsConstructor + @AllArgsConstructor public static class ModelList { private Integer rowNum; From 8e6b41c9adcbd5b5983ed74001ae054b19a1b806 Mon Sep 17 00:00:00 2001 From: teddy Date: Fri, 26 Dec 2025 10:22:53 +0900 Subject: [PATCH 2/3] =?UTF-8?q?=ED=8C=8C=EC=9D=BC=EC=83=9D=EC=84=B1=20?= =?UTF-8?q?=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- build.gradle | 3 + .../cd/kamcoback/config/SecurityConfig.java | 2 + .../InferenceResultShpApiController.java | 27 ++ .../cd/kamcoback/inference/ShpWriter.java | 13 + .../inference/dto/InferenceResultShpDto.java | 86 ++++ .../inference/service/GeoToolsShpWriter.java | 435 ++++++++++++++++++ .../service/InferenceResultShpService.java | 58 +++ .../core/InferenceResultCoreService.java | 20 +- .../core/InferenceResultShpCoreService.java | 59 +++ .../entity/InferenceResultEntity.java | 79 ++++ .../MapSheetAnalDataInferenceEntity.java | 164 +++++++ .../MapSheetAnalDataInferenceGeomEntity.java | 150 ++++++ .../Inference/InferenceResultRepository.java | 4 +- .../InferenceResultRepositoryCustom.java | 26 +- .../InferenceResultRepositoryImpl.java | 368 +-------------- ...pSheetAnalDataInferenceGeomRepository.java | 8 + ...AnalDataInferenceGeomRepositoryCustom.java | 3 + ...etAnalDataInferenceGeomRepositoryImpl.java | 11 + .../MapSheetAnalDataInferenceRepository.java | 10 + ...heetAnalDataInferenceRepositoryCustom.java | 3 + ...pSheetAnalDataInferenceRepositoryImpl.java | 13 + .../Inference/MapSheetAnalDataRepository.java | 7 + .../MapSheetAnalDataRepositoryCustom.java | 32 ++ .../MapSheetAnalDataRepositoryImpl.java | 371 +++++++++++++++ 24 files changed, 1571 insertions(+), 381 deletions(-) create mode 100644 src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultShpApiController.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/inference/ShpWriter.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/entity/InferenceResultEntity.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceEntity.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceGeomEntity.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepository.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepositoryCustom.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepositoryImpl.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepository.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepositoryCustom.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepositoryImpl.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataRepository.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataRepositoryCustom.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataRepositoryImpl.java diff --git a/build.gradle b/build.gradle index 223d47bf..c71c8a94 100644 --- a/build.gradle +++ b/build.gradle @@ -38,6 +38,9 @@ dependencies { //geometry implementation 'com.fasterxml.jackson.core:jackson-databind' + implementation "org.geotools:gt-shapefile:30.0" + implementation "org.geotools:gt-referencing:30.0" + implementation "org.geotools:gt-geojson:30.0" implementation 'org.locationtech.jts.io:jts-io-common:1.20.0' implementation 'org.locationtech.jts:jts-core:1.19.0' implementation 'org.hibernate:hibernate-spatial:6.2.7.Final' diff --git a/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java b/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java index fe1ab0cf..9ba95b4b 100644 --- a/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java +++ b/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java @@ -2,6 +2,7 @@ package com.kamco.cd.kamcoback.config; import com.kamco.cd.kamcoback.auth.CustomAuthenticationProvider; import com.kamco.cd.kamcoback.auth.JwtAuthenticationFilter; +import com.kamco.cd.kamcoback.auth.MenuAuthorizationManager; import java.util.List; import lombok.RequiredArgsConstructor; import org.springframework.context.annotation.Bean; @@ -28,6 +29,7 @@ public class SecurityConfig { private final JwtAuthenticationFilter jwtAuthenticationFilter; private final CustomAuthenticationProvider customAuthenticationProvider; + private final MenuAuthorizationManager menuAuthorizationManager; @Bean public SecurityFilterChain securityFilterChain(HttpSecurity http) throws Exception { diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultShpApiController.java b/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultShpApiController.java new file mode 100644 index 00000000..43f99746 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultShpApiController.java @@ -0,0 +1,27 @@ +package com.kamco.cd.kamcoback.inference; + +import com.kamco.cd.kamcoback.config.api.ApiResponseDto; +import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; +import lombok.RequiredArgsConstructor; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +@Tag(name = "추론결과 데이터 생성", description = "추론결과 데이터 생성 API") +@RestController +@RequiredArgsConstructor +@RequestMapping("/api/inference/shp") +public class InferenceResultShpApiController { + + private final InferenceResultShpService inferenceResultShpService; + + + @Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장") + @PostMapping + public ApiResponseDto saveInferenceResultData() { + inferenceResultShpService.saveInferenceResultData(); + return ApiResponseDto.createOK(null); + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/ShpWriter.java b/src/main/java/com/kamco/cd/kamcoback/inference/ShpWriter.java new file mode 100644 index 00000000..85e03248 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/inference/ShpWriter.java @@ -0,0 +1,13 @@ +package com.kamco.cd.kamcoback.inference; + +import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto; +import java.util.List; + +public interface ShpWriter { + + // SHP (.shp/.shx/.dbf) + void writeShp(String shpBasePath, List rows); + + // GeoJSON (.geojson) + void writeGeoJson(String geoJsonPath, List rows); +} diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java new file mode 100644 index 00000000..645c4d58 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java @@ -0,0 +1,86 @@ +package com.kamco.cd.kamcoback.inference.dto; + +import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity; +import java.time.ZonedDateTime; +import java.util.UUID; +import lombok.Getter; +import lombok.Setter; +import org.locationtech.jts.geom.Geometry; + +public class InferenceResultShpDto { + + @Getter + @Setter + public static class Basic { + + private Long id; + private UUID uuid; + private Integer stage; + private Float cdProb; + private Integer input1; + private Integer input2; + private Long mapId; + private String beforeClass; + private Float beforeProbability; + private String afterClass; + private Float afterProbability; + private Float area; + private ZonedDateTime createdDttm; + private ZonedDateTime updatedDttm; + private Geometry geometry; + + public Basic( + Long id, + UUID uuid, + Integer stage, + Float cdProb, + Integer input1, + Integer input2, + Long mapId, + String beforeClass, + Float beforeProbability, + String afterClass, + Float afterProbability, + Float area, + ZonedDateTime createdDttm, + ZonedDateTime updatedDttm, + Geometry geometry + ) { + this.id = id; + this.uuid = uuid; + this.stage = stage; + this.cdProb = cdProb; + this.input1 = input1; + this.input2 = input2; + this.mapId = mapId; + this.beforeClass = beforeClass; + this.beforeProbability = beforeProbability; + this.afterClass = afterClass; + this.afterProbability = afterProbability; + this.area = area; + this.createdDttm = createdDttm; + this.updatedDttm = updatedDttm; + this.geometry = geometry; + } + + public static Basic from(InferenceResultEntity e) { + return new Basic( + e.getId(), + e.getUuid(), + e.getStage(), + e.getCdProb(), + e.getInput1(), + e.getInput2(), + e.getMapId(), + e.getBeforeClass(), + e.getBeforeProbability(), + e.getAfterClass(), + e.getAfterProbability(), + e.getArea(), + e.getCreatedDttm(), + e.getUpdatedDttm(), + e.getGeometry() + ); + } + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java new file mode 100644 index 00000000..184312e7 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java @@ -0,0 +1,435 @@ +package com.kamco.cd.kamcoback.inference.service; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.kamco.cd.kamcoback.inference.ShpWriter; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto; +import java.io.File; +import java.io.FileOutputStream; +import java.io.OutputStreamWriter; +import java.io.Serializable; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import lombok.extern.slf4j.Slf4j; +import org.geotools.api.data.SimpleFeatureSource; +import org.geotools.api.data.SimpleFeatureStore; +import org.geotools.api.data.Transaction; +import org.geotools.api.feature.simple.SimpleFeature; +import org.geotools.api.feature.simple.SimpleFeatureType; +import org.geotools.api.referencing.crs.CoordinateReferenceSystem; +import org.geotools.data.shapefile.ShapefileDataStore; +import org.geotools.data.shapefile.ShapefileDataStoreFactory; +import org.geotools.feature.DefaultFeatureCollection; +import org.geotools.feature.simple.SimpleFeatureBuilder; +import org.geotools.feature.simple.SimpleFeatureTypeBuilder; +import org.geotools.geojson.geom.GeometryJSON; +import org.geotools.referencing.CRS; +import org.locationtech.jts.geom.Geometry; +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class GeoToolsShpWriter implements ShpWriter { + + // DBF 파일 한글 깨짐 방지를 위해 EUC-KR 사용 + private static final Charset DBF_CHARSET = Charset.forName("EUC-KR"); + + // GeoJSON 출력은 UTF-8 + private static final Charset GEOJSON_CHARSET = StandardCharsets.UTF_8; + + // 좌표계: Korea 2000 / Central Belt 2010 + private static final String EPSG_5186 = "EPSG:5186"; + + /** + * SHP 파일(.shp/.shx/.dbf/.prj)을 생성한다. + *

+ * - shpBasePath를 기준으로 파일을 생성한다. 예) /Users/kim/export/shp/1_map_2021_2022 → 1_map_2021_2022.shp → 1_map_2021_2022.shx → 1_map_2021_2022.dbf → 1_map_2021_2022.prj + *

+ * - geometry 타입은 첫 번째 유효 geometry 기준으로 스키마를 생성한다. - 좌표계는 EPSG:5186으로 설정하며, .prj 파일을 직접 생성한다. + * + * @param shpBasePath 확장자를 제외한 SHP 파일 기본 경로 + * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 + */ + @Override + public void writeShp(String shpBasePath, List rows) { + + if (rows == null || rows.isEmpty()) { + return; + } + + // SHP는 Geometry.class를 허용하지 않으므로 + // 첫 번째 유효 geometry의 "구체 타입"을 기준으로 스키마를 생성한다. + Geometry firstGeom = firstNonNullGeometry(rows); + if (firstGeom == null) { + throw new IllegalArgumentException( + "SHP 생성 실패: geometry가 전부 null 입니다. path=" + shpBasePath); + } + + @SuppressWarnings("unchecked") + Class geomType = + (Class) firstGeom.getClass(); + + ShapefileDataStore dataStore = null; + + try { + File shpFile = new File(shpBasePath + ".shp"); + createDirectories(shpFile); + + // EPSG:5186 CRS 로딩 + CoordinateReferenceSystem crs = CRS.decode(EPSG_5186, false); + + // FeatureType(schema) 생성 + SimpleFeatureType schema = createSchema(geomType, crs); + + // ShapefileDataStore 생성 + dataStore = createDataStore(shpFile, schema); + + // FeatureCollection 생성 + DefaultFeatureCollection collection = + buildFeatureCollection(schema, rows); + + // 실제 SHP 파일에 feature 쓰기 + writeFeatures(dataStore, collection); + + // .prj 파일 직접 생성 (EPSG:5186) + writePrjFile(shpBasePath, crs); + + log.info( + "SHP 생성 완료: {} ({} features)", + shpFile.getAbsolutePath(), + collection.size() + ); + + } catch (Exception e) { + throw new RuntimeException("SHP 생성 실패: " + shpBasePath, e); + } finally { + if (dataStore != null) { + try { + dataStore.dispose(); + } catch (Exception ignore) { + } + } + } + } + + /** + * GeoJSON 파일(.geojson)을 생성한다. + *

+ * - FeatureCollection 형태로 출력한다. - 최상단에 name / crs / properties를 포함한다. - 각 Feature는 polygon 단위로 생성된다. - geometry는 GeoTools GeometryJSON을 사용하여 직렬화한다. + *

+ * GeoJSON 구조 예: { "type": "FeatureCollection", "name": "stage_input1_input2_mapId", "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::5186" } }, + * "properties": { ... }, "features": [ ... ] } + * + * @param geoJsonPath 생성할 GeoJSON 파일의 전체 경로 (.geojson 포함) + * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 + */ + @Override + public void writeGeoJson(String geoJsonPath, List rows) { + + if (rows == null || rows.isEmpty()) { + return; + } + + try { + File geoJsonFile = new File(geoJsonPath); + createDirectories(geoJsonFile); + + // 그룹 공통 메타 정보는 첫 row 기준 + InferenceResultShpDto.Basic first = rows.get(0); + + ObjectMapper om = new ObjectMapper(); + GeometryJSON gj = new GeometryJSON(15); + + // FeatureCollection 루트 + ObjectNode root = om.createObjectNode(); + root.put("type", "FeatureCollection"); + + // name: stage_input1_input2_mapId + String name = String.format( + "%d_%d_%d_%d", + first.getStage(), + first.getInput1(), + first.getInput2(), + first.getMapId() + ); + root.put("name", name); + + // CRS (EPSG:5186) + ObjectNode crs = om.createObjectNode(); + crs.put("type", "name"); + ObjectNode crsProps = om.createObjectNode(); + crsProps.put("name", "urn:ogc:def:crs:EPSG::5186"); + crs.set("properties", crsProps); + root.set("crs", crs); + + // 그룹 공통 properties + ObjectNode groupProps = om.createObjectNode(); + groupProps.put("stage", first.getStage()); + groupProps.put("input1", first.getInput1()); + groupProps.put("input2", first.getInput2()); + groupProps.put("map_id", first.getMapId()); + // 학습서버 버전은 추후 추가 +// groupProps.put("m1", "v1.2222.251223121212"); +// groupProps.put("m2", "v2.211.251223121213"); +// groupProps.put("m3", "v3.233.251223121214"); + root.set("properties", groupProps); + + // features 배열 + ArrayNode features = om.createArrayNode(); + + for (InferenceResultShpDto.Basic dto : rows) { + if (dto.getGeometry() == null) { + continue; + } + + ObjectNode feature = om.createObjectNode(); + feature.put("type", "Feature"); + + // feature properties + ObjectNode p = om.createObjectNode(); + p.put("polygon_id", + dto.getUuid() != null ? dto.getUuid().toString() : null); + if (dto.getCdProb() != null) { + p.put("cd_prob", dto.getCdProb()); + } + if (dto.getInput1() != null) { + p.put("input1", dto.getInput1()); + } + if (dto.getInput2() != null) { + p.put("input2", dto.getInput2()); + } + if (dto.getMapId() != null) { + p.put("map_id", dto.getMapId()); + } + if (dto.getArea() != null) { + p.put("area", dto.getArea()); + } + p.put("before_c", dto.getBeforeClass()); + if (dto.getBeforeProbability() != null) { + p.put("before_p", dto.getBeforeProbability()); + } + p.put("after_c", dto.getAfterClass()); + if (dto.getAfterProbability() != null) { + p.put("after_p", dto.getAfterProbability()); + } + + feature.set("properties", p); + + // geometry + String geomJson = gj.toString(dto.getGeometry()); + JsonNode geomNode = om.readTree(geomJson); + feature.set("geometry", geomNode); + + features.add(feature); + } + + root.set("features", features); + + // 파일 쓰기 + try (OutputStreamWriter w = + new OutputStreamWriter( + new FileOutputStream(geoJsonFile), + GEOJSON_CHARSET + )) { + om.writerWithDefaultPrettyPrinter().writeValue(w, root); + } + + log.info( + "GeoJSON 생성 완료: {} ({} features)", + geoJsonFile.getAbsolutePath(), + features.size() + ); + + } catch (Exception e) { + throw new RuntimeException("GeoJSON 생성 실패: " + geoJsonPath, e); + } + } + + /** + * rows 목록에서 첫 번째로 발견되는 non-null Geometry를 반환한다. + *

+ * - SHP 스키마 생성 시 geometry 타입 결정을 위해 사용된다. + * + * @param rows DTO 목록 + * @return 첫 번째 non-null Geometry, 없으면 null + */ + private Geometry firstNonNullGeometry(List rows) { + for (InferenceResultShpDto.Basic r : rows) { + if (r != null && r.getGeometry() != null) { + return r.getGeometry(); + } + } + return null; + } + + /** + * SHP 파일에 사용할 SimpleFeatureType(schema)를 생성한다. + *

+ * - geometry 컬럼은 반드시 첫 번째 컬럼이어야 한다. - DBF 컬럼은 SHP 제약(컬럼명 10자, 길이 제한)을 고려한다. + * + * @param geomType geometry의 구체 타입 (Polygon, MultiPolygon 등) + * @param crs 좌표계(EPSG:5186) + * @return SimpleFeatureType + */ + private SimpleFeatureType createSchema( + Class geomType, + CoordinateReferenceSystem crs + ) { + SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder(); + b.setName("inference_result"); + b.setCRS(crs); + + // geometry는 반드시 첫 컬럼 + b.add("the_geom", geomType); + + // DBF 컬럼 정의 (10자 제한 고려) + b.length(36).add("poly_id", String.class); + b.add("cd_prob", Double.class); + b.add("input1", Integer.class); + b.add("input2", Integer.class); + b.add("map_id", Long.class); + b.add("area", Double.class); + b.length(20).add("before_c", String.class); + b.add("before_p", Double.class); + b.length(20).add("after_c", String.class); + b.add("after_p", Double.class); + + return b.buildFeatureType(); + } + + /** + * ShapefileDataStore를 생성하고 스키마를 등록한다. + *

+ * - DBF 파일 인코딩은 EUC-KR로 설정한다. - spatial index(.qix)를 생성한다. + * + * @param shpFile SHP 파일 객체 + * @param schema SimpleFeatureType + * @return 생성된 ShapefileDataStore + */ + private ShapefileDataStore createDataStore( + File shpFile, + SimpleFeatureType schema + ) throws Exception { + + Map params = new HashMap<>(); + params.put("url", shpFile.toURI().toURL()); + params.put("create spatial index", Boolean.TRUE); + + ShapefileDataStoreFactory factory = new ShapefileDataStoreFactory(); + ShapefileDataStore dataStore = + (ShapefileDataStore) factory.createNewDataStore(params); + + dataStore.setCharset(DBF_CHARSET); + dataStore.createSchema(schema); + + return dataStore; + } + + /** + * DTO 목록을 SimpleFeatureCollection으로 변환한다. + *

+ * - DTO 1건당 Feature 1개 생성 - geometry가 null인 데이터는 제외한다. + * + * @param schema FeatureType + * @param rows DTO 목록 + * @return DefaultFeatureCollection + */ + private DefaultFeatureCollection buildFeatureCollection( + SimpleFeatureType schema, + List rows + ) { + DefaultFeatureCollection collection = new DefaultFeatureCollection(); + SimpleFeatureBuilder builder = new SimpleFeatureBuilder(schema); + + for (InferenceResultShpDto.Basic dto : rows) { + if (dto == null || dto.getGeometry() == null) { + continue; + } + + builder.add(dto.getGeometry()); + builder.add(dto.getUuid() != null ? dto.getUuid().toString() : null); + builder.add(dto.getCdProb() != null ? dto.getCdProb().doubleValue() : null); + builder.add(dto.getInput1()); + builder.add(dto.getInput2()); + builder.add(dto.getMapId()); + builder.add(dto.getArea() != null ? dto.getArea().doubleValue() : null); + builder.add(dto.getBeforeClass()); + builder.add(dto.getBeforeProbability() != null + ? dto.getBeforeProbability().doubleValue() + : null); + builder.add(dto.getAfterClass()); + builder.add(dto.getAfterProbability() != null + ? dto.getAfterProbability().doubleValue() + : null); + + SimpleFeature feature = builder.buildFeature(null); + collection.add(feature); + builder.reset(); + } + + return collection; + } + + /** + * FeatureCollection을 SHP 파일에 실제로 기록한다. + * + * @param dataStore ShapefileDataStore + * @param collection FeatureCollection + */ + private void writeFeatures( + ShapefileDataStore dataStore, + DefaultFeatureCollection collection + ) throws Exception { + + String typeName = dataStore.getTypeNames()[0]; + SimpleFeatureSource featureSource = + dataStore.getFeatureSource(typeName); + + if (!(featureSource instanceof SimpleFeatureStore store)) { + throw new IllegalStateException("FeatureStore 생성 실패"); + } + + store.setTransaction(Transaction.AUTO_COMMIT); + store.addFeatures(collection); + store.getTransaction().commit(); + } + + /** + * SHP 좌표계 정보를 담은 .prj 파일을 생성한다. + * + * @param shpBasePath SHP 기본 경로 (확장자 제외) + * @param crs 좌표계(EPSG:5186) + */ + private void writePrjFile( + String shpBasePath, + CoordinateReferenceSystem crs + ) throws Exception { + + File prjFile = new File(shpBasePath + ".prj"); + createDirectories(prjFile); + + Files.writeString( + prjFile.toPath(), + crs.toWKT(), + StandardCharsets.UTF_8 + ); + } + + /** + * 파일이 생성될 디렉토리가 없으면 생성한다. + * + * @param file 생성 대상 파일 + */ + private void createDirectories(File file) throws Exception { + File parent = file.getParentFile(); + if (parent != null) { + Files.createDirectories(parent.toPath()); + } + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java new file mode 100644 index 00000000..b7a5ce3e --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java @@ -0,0 +1,58 @@ +package com.kamco.cd.kamcoback.inference.service; + +import com.kamco.cd.kamcoback.inference.ShpWriter; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto; +import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService; +import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService.ShpKey; +import java.util.List; +import lombok.RequiredArgsConstructor; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +@Service +@RequiredArgsConstructor +@Transactional(readOnly = true) +public class InferenceResultShpService { + + private final InferenceResultShpCoreService coreService; + private final ShpWriter shpWriter; + + public void saveInferenceResultData() { + + coreService.streamGrouped(1000, (key, entities) -> { + + // Entity -> DTO + List dtoList = + entities.stream().map(InferenceResultShpDto.Basic::from).toList(); + + flushGroup(key, dtoList); + }); + } + + /** + * 파일명 및 파일 경로 + * + * @param key + * @param dtoList + */ + private void flushGroup(ShpKey key, List dtoList) { + + // TODO 경로 정해지면 수정해야함 + String baseDir = System.getProperty("user.home") + "/export"; + + // 파일명 stage_input1_input2_mapId + String baseName = String.format( + "%d_%d_%d_%d", + key.stage(), key.mapId(), key.input1(), key.input2() + ); + + String shpBasePath = baseDir + "/shp/" + baseName; // 확장자 없이 + String geoJsonPath = baseDir + "/geojson/" + baseName + ".geojson"; + + // shp: .shp/.shx/.dbf 생성 + shpWriter.writeShp(shpBasePath, dtoList); + + // geojson: .geojson 생성 + shpWriter.writeGeoJson(geoJsonPath, dtoList); + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java index c66d5138..8ff0c07c 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java @@ -6,7 +6,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet; import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity; -import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository; +import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetAnalDataRepository; import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository; import jakarta.persistence.EntityNotFoundException; import jakarta.validation.constraints.NotNull; @@ -20,7 +20,7 @@ import org.springframework.transaction.annotation.Transactional; @RequiredArgsConstructor public class InferenceResultCoreService { - private final InferenceResultRepository inferenceResultRepository; + private final MapSheetAnalDataRepository mapSheetAnalDataRepository; private final MapInkx5kRepository mapInkx5kRepository; /** @@ -31,7 +31,7 @@ public class InferenceResultCoreService { */ public Page getInferenceResultList( InferenceResultDto.SearchReq searchReq) { - return inferenceResultRepository.getInferenceResultList(searchReq); + return mapSheetAnalDataRepository.getInferenceResultList(searchReq); } /** @@ -42,7 +42,7 @@ public class InferenceResultCoreService { */ public InferenceResultDto.AnalResSummary getInferenceResultSummary(Long id) { InferenceResultDto.AnalResSummary summary = - inferenceResultRepository + mapSheetAnalDataRepository .getInferenceResultSummary(id) .orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id)); return summary; @@ -55,7 +55,7 @@ public class InferenceResultCoreService { * @return */ public List getDashboard(Long id) { - return inferenceResultRepository.getDashboard(id); + return mapSheetAnalDataRepository.getDashboard(id); } /** @@ -66,7 +66,7 @@ public class InferenceResultCoreService { */ public Page getInferenceResultGeomList( Long id, InferenceResultDto.SearchGeoReq searchGeoReq) { - return inferenceResultRepository.getInferenceGeomList(id, searchGeoReq); + return mapSheetAnalDataRepository.getInferenceGeomList(id, searchGeoReq); } /** @@ -80,13 +80,13 @@ public class InferenceResultCoreService { @NotNull Long analyId, InferenceResultDto.SearchGeoReq searchReq) { // 분석 ID 에 해당하는 dataids를 가져온다. List dataIds = - inferenceResultRepository.listAnalyGeom(analyId).stream() + mapSheetAnalDataRepository.listAnalyGeom(analyId).stream() .mapToLong(MapSheetAnalDataEntity::getId) .boxed() .toList(); // 해당데이터의 폴리곤데이터를 가져온다 Page mapSheetAnalDataGeomEntities = - inferenceResultRepository.listInferenceResultWithGeom(dataIds, searchReq); + mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq); return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataGeomEntity::toEntity); } @@ -97,13 +97,13 @@ public class InferenceResultCoreService { * @return */ public List getSheets(Long id) { - return inferenceResultRepository.getSheets(id); + return mapSheetAnalDataRepository.getSheets(id); } @Transactional(readOnly = true) public List listGetScenes5k(Long analyId) { List sceneCodes = - inferenceResultRepository.listAnalyGeom(analyId).stream() + mapSheetAnalDataRepository.listAnalyGeom(analyId).stream() .mapToLong(MapSheetAnalDataEntity::getMapSheetNum) .mapToObj(String::valueOf) .toList(); diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java new file mode 100644 index 00000000..ff061b53 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java @@ -0,0 +1,59 @@ +package com.kamco.cd.kamcoback.postgres.core; + +import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity; +import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository; +import java.util.ArrayList; +import java.util.List; +import java.util.function.BiConsumer; +import lombok.RequiredArgsConstructor; +import org.hibernate.ScrollableResults; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +@Service +@RequiredArgsConstructor +public class InferenceResultShpCoreService { + + private final InferenceResultRepository inferenceResultRepository; + + public record ShpKey(Integer stage, Long mapId, Integer input1, Integer input2) { + + } + + /** + * DB를 스트리밍하면서 그룹이 완성될 때마다 handler로 넘김 handler: (key, groupRows) + */ + @Transactional(readOnly = true) + public void streamGrouped(int fetchSize, BiConsumer> handler) { + + ScrollableResults cursor = inferenceResultRepository.scrollAllOrdered(fetchSize); + + ShpKey currentKey = null; + List buffer = new ArrayList<>(2000); + + try { + while (cursor.next()) { + InferenceResultEntity row = (InferenceResultEntity) cursor.get(); + + ShpKey key = new ShpKey(row.getStage(), row.getMapId(), row.getInput1(), row.getInput2()); + + // 키 변경 -> 이전 그룹 완료 + if (currentKey != null && !currentKey.equals(key)) { + handler.accept(currentKey, buffer); + buffer = new ArrayList<>(2000); + } + + currentKey = key; + buffer.add(row); + } + + // 마지막 그룹 + if (currentKey != null && !buffer.isEmpty()) { + handler.accept(currentKey, buffer); + } + + } finally { + cursor.close(); + } + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/InferenceResultEntity.java b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/InferenceResultEntity.java new file mode 100644 index 00000000..a4c731b6 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/InferenceResultEntity.java @@ -0,0 +1,79 @@ +package com.kamco.cd.kamcoback.postgres.entity; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Table; +import jakarta.validation.constraints.NotNull; +import jakarta.validation.constraints.Size; +import java.time.ZonedDateTime; +import java.util.UUID; +import lombok.Getter; +import lombok.Setter; +import org.hibernate.annotations.ColumnDefault; +import org.locationtech.jts.geom.Geometry; + +@Getter +@Setter +@Entity +@Table(name = "inference_results") +public class InferenceResultEntity { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "uid", nullable = false) + private Long id; + + @NotNull + @ColumnDefault("uuid_generate_v4()") + @Column(name = "uuid", nullable = false) + private UUID uuid; + + @Column(name = "stage") + private Integer stage; + + @Column(name = "cd_prob") + private Float cdProb; + + @Column(name = "input1") + private Integer input1; + + @Column(name = "input2") + private Integer input2; + + @Column(name = "map_id") + private Long mapId; + + @Size(max = 20) + @Column(name = "before_class", length = 20) + private String beforeClass; + + @Column(name = "before_probability") + private Float beforeProbability; + + @Size(max = 20) + @Column(name = "after_class", length = 20) + private String afterClass; + + @Column(name = "after_probability") + private Float afterProbability; + + @ColumnDefault("st_area(geometry)") + @Column(name = "area") + private Float area; + + @NotNull + @ColumnDefault("now()") + @Column(name = "created_dttm", nullable = false) + private ZonedDateTime createdDttm; + + @NotNull + @ColumnDefault("now()") + @Column(name = "updated_dttm", nullable = false) + private ZonedDateTime updatedDttm; + + @Column(name = "geometry", columnDefinition = "geometry not null") + private Geometry geometry; +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceEntity.java b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceEntity.java new file mode 100644 index 00000000..56c1866d --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceEntity.java @@ -0,0 +1,164 @@ +package com.kamco.cd.kamcoback.postgres.entity; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Table; +import jakarta.validation.constraints.Size; +import java.time.ZonedDateTime; +import lombok.Getter; +import lombok.Setter; +import org.hibernate.annotations.ColumnDefault; + +@Getter +@Setter +@Entity +@Table(name = "tb_map_sheet_anal_data_inference") +public class MapSheetAnalDataInferenceEntity { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "data_uid", nullable = false) + private Long id; + + @Size(max = 128) + @Column(name = "data_name", length = 128) + private String dataName; + + @Size(max = 255) + @Column(name = "data_path") + private String dataPath; + + @Size(max = 128) + @Column(name = "data_type", length = 128) + private String dataType; + + @Size(max = 128) + @Column(name = "data_crs_type", length = 128) + private String dataCrsType; + + @Size(max = 255) + @Column(name = "data_crs_type_name") + private String dataCrsTypeName; + + @ColumnDefault("now()") + @Column(name = "created_dttm") + private ZonedDateTime createdDttm; + + @Column(name = "created_uid") + private Long createdUid; + + @ColumnDefault("now()") + @Column(name = "updated_dttm") + private ZonedDateTime updatedDttm; + + @Column(name = "updated_uid") + private Long updatedUid; + + @Column(name = "compare_yyyy") + private Integer compareYyyy; + + @Column(name = "target_yyyy") + private Integer targetYyyy; + + @Column(name = "data_json", length = Integer.MAX_VALUE) + private String dataJson; + + @Size(max = 20) + @ColumnDefault("'0'") + @Column(name = "data_state", length = 20) + private String dataState; + + @ColumnDefault("now()") + @Column(name = "data_state_dttm") + private ZonedDateTime dataStateDttm; + + @Column(name = "anal_strt_dttm") + private ZonedDateTime analStrtDttm; + + @Column(name = "anal_end_dttm") + private ZonedDateTime analEndDttm; + + @ColumnDefault("0") + @Column(name = "anal_sec") + private Long analSec; + + @Size(max = 20) + @Column(name = "anal_state", length = 20) + private String analState; + + @Column(name = "anal_uid") + private Long analUid; + + @Column(name = "map_sheet_num") + private Long mapSheetNum; + + @ColumnDefault("0") + @Column(name = "detecting_cnt") + private Long detectingCnt; + + @ColumnDefault("0") + @Column(name = "pnu") + private Long pnu; + + @Size(max = 20) + @Column(name = "down_state", length = 20) + private String downState; + + @Column(name = "down_state_dttm") + private ZonedDateTime downStateDttm; + + @Size(max = 20) + @Column(name = "fit_state", length = 20) + private String fitState; + + @Column(name = "fit_state_dttm") + private ZonedDateTime fitStateDttm; + + @Column(name = "labeler_uid") + private Long labelerUid; + + @Size(max = 20) + @ColumnDefault("NULL") + @Column(name = "label_state", length = 20) + private String labelState; + + @Column(name = "label_state_dttm") + private ZonedDateTime labelStateDttm; + + @Column(name = "tester_uid") + private Long testerUid; + + @Size(max = 20) + @Column(name = "test_state", length = 20) + private String testState; + + @Column(name = "test_state_dttm") + private ZonedDateTime testStateDttm; + + @Column(name = "fit_state_cmmnt", length = Integer.MAX_VALUE) + private String fitStateCmmnt; + + @Column(name = "ref_map_sheet_num") + private Long refMapSheetNum; + + @Column(name = "stage") + private Integer stage; + + @Column(name = "file_created_yn") + private Boolean fileCreatedYn; + + @Size(max = 100) + @Column(name = "m1", length = 100) + private String m1; + + @Size(max = 100) + @Column(name = "m2", length = 100) + private String m2; + + @Size(max = 100) + @Column(name = "m3", length = 100) + private String m3; +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceGeomEntity.java b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceGeomEntity.java new file mode 100644 index 00000000..05d8ccaa --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceGeomEntity.java @@ -0,0 +1,150 @@ +package com.kamco.cd.kamcoback.postgres.entity; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Table; +import jakarta.validation.constraints.Size; +import java.time.OffsetDateTime; +import java.util.UUID; +import lombok.Getter; +import lombok.Setter; +import org.hibernate.annotations.ColumnDefault; +import org.locationtech.jts.geom.Geometry; + +@Getter +@Setter +@Entity +@Table(name = "tb_map_sheet_anal_data_inference_geom") +public class MapSheetAnalDataInferenceGeomEntity { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "geo_uid") + private Long geoUid; + + @Column(name = "cd_prob") + private Float cdProb; + + @Size(max = 40) + @Column(name = "class_before_cd", length = 40) + private String classBeforeCd; + + @Column(name = "class_before_prob") + private Float classBeforeProb; + + @Size(max = 40) + @Column(name = "class_after_cd", length = 40) + private String classAfterCd; + + @Column(name = "class_after_prob") + private Float classAfterProb; + + @Column(name = "map_sheet_num") + private Long mapSheetNum; + + @Column(name = "compare_yyyy") + private Integer compareYyyy; + + @Column(name = "target_yyyy") + private Integer targetYyyy; + + @Column(name = "area") + private Float area; + + @Size(max = 100) + @Column(name = "geo_type", length = 100) + private String geoType; + + @Column(name = "data_uid") + private Long dataUid; + + @ColumnDefault("now()") + @Column(name = "created_dttm") + private OffsetDateTime createdDttm; + + @Column(name = "created_uid") + private Long createdUid; + + @ColumnDefault("now()") + @Column(name = "updated_dttm") + private OffsetDateTime updatedDttm; + + @Column(name = "updated_uid") + private Long updatedUid; + + @ColumnDefault("0") + @Column(name = "geom_cnt") + private Long geomCnt; + + @ColumnDefault("0") + @Column(name = "pnu") + private Long pnu; + + @Size(max = 20) + @ColumnDefault("'0'") + @Column(name = "fit_state", length = 20) + private String fitState; + + @ColumnDefault("now()") + @Column(name = "fit_state_dttm") + private OffsetDateTime fitStateDttm; + + @Column(name = "labeler_uid") + private Long labelerUid; + + @Size(max = 20) + @ColumnDefault("'0'") + @Column(name = "label_state", length = 20) + private String labelState; + + @ColumnDefault("now()") + @Column(name = "label_state_dttm") + private OffsetDateTime labelStateDttm; + + @Column(name = "tester_uid") + private Long testerUid; + + @Size(max = 20) + @ColumnDefault("'0'") + @Column(name = "test_state", length = 20) + private String testState; + + @ColumnDefault("now()") + @Column(name = "test_state_dttm") + private OffsetDateTime testStateDttm; + + @Column(name = "fit_state_cmmnt", length = Integer.MAX_VALUE) + private String fitStateCmmnt; + + @Column(name = "ref_map_sheet_num") + private Long refMapSheetNum; + + @ColumnDefault("uuid_generate_v4()") + @Column(name = "uuid") + private UUID uuid; + + @Column(name = "stage") + private Integer stage; + + @Column(name = "map_5k_id") + private Long map5kId; + + @Column(name = "file_created_yn") + private Boolean fileCreatedYn; + + + @Column(name = "geom", columnDefinition = "geometry") + private Geometry geom; + + + @Column(name = "geom_center", columnDefinition = "geometry") + private Geometry geomCenter; + + + @Column(name = "before_geom", columnDefinition = "geometry") + private Geometry beforeGeom; + +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepository.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepository.java index 524b462a..e9420572 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepository.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepository.java @@ -1,7 +1,7 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference; -import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity; +import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity; import org.springframework.data.jpa.repository.JpaRepository; public interface InferenceResultRepository - extends JpaRepository, InferenceResultRepositoryCustom {} + extends JpaRepository, InferenceResultRepositoryCustom {} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java index 7b9ac305..306d2021 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java @@ -1,31 +1,9 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference; -import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; -import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard; -import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq; -import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity; -import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity; -import jakarta.validation.constraints.NotNull; -import java.util.List; -import java.util.Optional; -import org.springframework.data.domain.Page; +import org.hibernate.ScrollableResults; public interface InferenceResultRepositoryCustom { - Page getInferenceResultList( - InferenceResultDto.SearchReq searchReq); + ScrollableResults scrollAllOrdered(int fetchSize); - Optional getInferenceResultSummary(Long id); - - Page getInferenceGeomList( - Long id, InferenceResultDto.SearchGeoReq searchGeoReq); - - Page listInferenceResultWithGeom( - List dataIds, SearchGeoReq searchReq); - - List getSheets(Long id); - - List getDashboard(Long id); - - List listAnalyGeom(@NotNull Long id); } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java index 58e7e818..7dc992f5 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java @@ -1,27 +1,13 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference; -import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; -import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard; -import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq; -import com.kamco.cd.kamcoback.postgres.entity.*; -import com.querydsl.core.BooleanBuilder; -import com.querydsl.core.types.Order; -import com.querydsl.core.types.OrderSpecifier; -import com.querydsl.core.types.Projections; -import com.querydsl.core.types.dsl.BooleanExpression; -import com.querydsl.core.types.dsl.Expressions; -import com.querydsl.jpa.JPAExpressions; -import com.querydsl.jpa.JPQLQuery; +import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity; +import com.kamco.cd.kamcoback.postgres.entity.QInferenceResultEntity; import com.querydsl.jpa.impl.JPAQuery; import com.querydsl.jpa.impl.JPAQueryFactory; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; import lombok.RequiredArgsConstructor; -import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageImpl; -import org.springframework.data.domain.Pageable; -import org.springframework.data.domain.Sort; +import org.hibernate.ScrollMode; +import org.hibernate.ScrollableResults; +import org.hibernate.query.Query; import org.springframework.stereotype.Repository; @Repository @@ -29,335 +15,27 @@ import org.springframework.stereotype.Repository; public class InferenceResultRepositoryImpl implements InferenceResultRepositoryCustom { private final JPAQueryFactory queryFactory; - private final QModelMngBakEntity tmm = QModelMngBakEntity.modelMngBakEntity; - private final QModelVerEntity tmv = QModelVerEntity.modelVerEntity; - private final QMapSheetAnalEntity mapSheetAnalEntity = QMapSheetAnalEntity.mapSheetAnalEntity; - private final QMapSheetAnalDataEntity mapSheetAnalDataEntity = - QMapSheetAnalDataEntity.mapSheetAnalDataEntity; - private final QMapSheetAnalDataGeomEntity mapSheetAnalDataGeomEntity = - QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity; - private final QMapSheetAnalSttcEntity mapSheetAnalSttcEntity = - QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity; - /** - * 분석결과 목록 조회 - * - * @param searchReq - * @return - */ - @Override - public Page getInferenceResultList( - InferenceResultDto.SearchReq searchReq) { - Pageable pageable = searchReq.toPageable(); - // "0000" 전체조회 - BooleanBuilder builder = new BooleanBuilder(); - if (searchReq.getStatCode() != null && !"0000".equals(searchReq.getStatCode())) { - builder.and(mapSheetAnalEntity.analState.eq(searchReq.getStatCode())); - } + public ScrollableResults scrollAllOrdered(int fetchSize) { + QInferenceResultEntity e = QInferenceResultEntity.inferenceResultEntity; - // 제목 - if (searchReq.getTitle() != null) { - builder.and(mapSheetAnalEntity.analTitle.like("%" + searchReq.getTitle() + "%")); - } + JPAQuery q = + queryFactory + .selectFrom(e) + .orderBy( + e.stage.asc(), + e.mapId.asc(), + e.input1.asc(), + e.input2.asc(), + e.id.asc() + ); - List content = - queryFactory - .select( - Projections.constructor( - InferenceResultDto.AnalResList.class, - mapSheetAnalEntity.id, - mapSheetAnalEntity.analTitle, - mapSheetAnalEntity.analMapSheet, - mapSheetAnalEntity.detectingCnt, - mapSheetAnalEntity.analStrtDttm, - mapSheetAnalEntity.analEndDttm, - mapSheetAnalEntity.analSec, - mapSheetAnalEntity.analPredSec, - mapSheetAnalEntity.analState, - Expressions.stringTemplate( - "fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState), - mapSheetAnalEntity.gukyuinUsed)) - .from(mapSheetAnalEntity) - .where(builder) - .offset(pageable.getOffset()) - .limit(pageable.getPageSize()) - .orderBy(mapSheetAnalEntity.id.desc()) - .fetch(); + // QueryDSL -> Hibernate Query로 unwrap 해서 커서 스트리밍 + Query hQuery = q.createQuery().unwrap(Query.class); - long total = - queryFactory - .select(mapSheetAnalEntity.id) - .from(mapSheetAnalEntity) - .where(builder) - .fetchCount(); - - return new PageImpl<>(content, pageable, total); - } - - /** - * 분석결과 요약정보 - * - * @param id - * @return - */ - @Override - public Optional getInferenceResultSummary(Long id) { - - // 1. 최신 버전 UID를 가져오는 서브쿼리 - JPQLQuery latestVerUidSub = - JPAExpressions.select(tmv.id.max()).from(tmv).where(tmv.modelUid.eq(tmm.id)); - - Optional content = - Optional.ofNullable( - queryFactory - .select( - Projections.constructor( - InferenceResultDto.AnalResSummary.class, - mapSheetAnalEntity.id, - mapSheetAnalEntity.analTitle, - tmm.modelNm.concat(" ").concat(tmv.modelVer).as("modelInfo"), - mapSheetAnalEntity.targetYyyy, - mapSheetAnalEntity.compareYyyy, - mapSheetAnalEntity.analMapSheet, - mapSheetAnalEntity.analStrtDttm, - mapSheetAnalEntity.analEndDttm, - mapSheetAnalEntity.analSec, - mapSheetAnalEntity.analPredSec, - mapSheetAnalEntity.resultUrl, - mapSheetAnalEntity.detectingCnt, - mapSheetAnalEntity.accuracy, - mapSheetAnalEntity.analState, - Expressions.stringTemplate( - "fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState))) - .from(mapSheetAnalEntity) - .leftJoin(tmm) - .on(mapSheetAnalEntity.modelUid.eq(tmm.id)) - .leftJoin(tmv) - .on(tmv.modelUid.eq(tmm.id).and(tmv.id.eq(latestVerUidSub))) - .where(mapSheetAnalEntity.id.eq(id)) - .fetchOne()); - return content; - } - - /** - * 분석결과 상세 class name별 탐지 개수 - * - * @param id - * @return - */ - @Override - public List getDashboard(Long id) { - return queryFactory - .select( - Projections.constructor( - Dashboard.class, - mapSheetAnalSttcEntity.id.classAfterCd, - mapSheetAnalSttcEntity.classAfterCnt.sum())) - .from(mapSheetAnalSttcEntity) - .where(mapSheetAnalSttcEntity.id.analUid.eq(id)) - .groupBy(mapSheetAnalSttcEntity.id.classAfterCd) - .orderBy(mapSheetAnalSttcEntity.id.classAfterCd.asc()) - .fetch(); - } - - @Override - public List listAnalyGeom(Long id) { - QMapSheetAnalDataEntity analy = QMapSheetAnalDataEntity.mapSheetAnalDataEntity; - return queryFactory.selectFrom(analy).where(analy.analUid.eq(id)).fetch(); - } - - /** - * 분석결과 상세 목록 - * - * @param searchReq - * @return - */ - @Override - public Page listInferenceResultWithGeom( - List ids, SearchGeoReq searchReq) { - - // 분석 차수 - QMapSheetAnalDataGeomEntity detectedEntity = - QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity; - Pageable pageable = searchReq.toPageable(); - - // 검색조건 - JPAQuery query = - queryFactory - .selectFrom(detectedEntity) - .where( - detectedEntity.dataUid.in(ids), - eqTargetClass(detectedEntity, searchReq.getTargetClass()), - eqCompareClass(detectedEntity, searchReq.getCompareClass()), - containsMapSheetNum(detectedEntity, searchReq.getMapSheetNum())); - - // count - long total = query.fetchCount(); - - // Pageable에서 정렬 가져오기, 없으면 기본 정렬(createdDttm desc) 사용 - List> orders = getOrderSpecifiers(pageable.getSort()); - if (orders.isEmpty()) { - orders.add(detectedEntity.createdDttm.desc()); - } - - List content = - query - .offset(pageable.getOffset()) - .limit(pageable.getPageSize()) - .orderBy(orders.toArray(new OrderSpecifier[0])) - .fetch(); - - return new PageImpl<>(content, pageable, total); - } - - /** - * 분석결과 상세 목록 - * - * @param searchGeoReq - * @return - */ - @Override - public Page getInferenceGeomList(Long id, SearchGeoReq searchGeoReq) { - Pageable pageable = searchGeoReq.toPageable(); - BooleanBuilder builder = new BooleanBuilder(); - - // 추론결과 id - builder.and(mapSheetAnalEntity.id.eq(id)); - - // 기준년도 분류 - if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) { - builder.and( - mapSheetAnalDataGeomEntity - .classAfterCd - .toLowerCase() - .eq(searchGeoReq.getTargetClass().toLowerCase())); - } - - // 비교년도 분류 - if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")) { - builder.and( - mapSheetAnalDataGeomEntity - .classBeforeCd - .toLowerCase() - .eq(searchGeoReq.getCompareClass().toLowerCase())); - } - - // 분석도엽 - if (searchGeoReq.getMapSheetNum() != null && !searchGeoReq.getMapSheetNum().isEmpty()) { - List mapSheetNum = searchGeoReq.getMapSheetNum(); - builder.and(mapSheetAnalDataGeomEntity.mapSheetNum.in(mapSheetNum)); - } - - List content = - queryFactory - .select( - Projections.constructor( - InferenceResultDto.Geom.class, - mapSheetAnalDataGeomEntity.compareYyyy, - mapSheetAnalDataGeomEntity.targetYyyy, - mapSheetAnalDataGeomEntity.classBeforeCd, - mapSheetAnalDataGeomEntity.classBeforeProb, - mapSheetAnalDataGeomEntity.classAfterCd, - mapSheetAnalDataGeomEntity.classAfterProb, - mapSheetAnalDataGeomEntity.mapSheetNum, - mapSheetAnalDataGeomEntity.geom, - mapSheetAnalDataGeomEntity.geomCenter)) - .from(mapSheetAnalEntity) - .join(mapSheetAnalDataEntity) - .on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id)) - .join(mapSheetAnalDataGeomEntity) - .on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id)) - .where(builder) - .offset(pageable.getOffset()) - .limit(pageable.getPageSize()) - .fetch(); - - long total = - queryFactory - .select(mapSheetAnalDataGeomEntity.id) - .from(mapSheetAnalEntity) - .join(mapSheetAnalDataEntity) - .on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id)) - .join(mapSheetAnalDataGeomEntity) - .on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id)) - .where(builder) - .fetchCount(); - - return new PageImpl<>(content, pageable, total); - } - - /** - * 추론된 5000:1 도엽 목록 - * - * @param id - * @return - */ - @Override - public List getSheets(Long id) { - return queryFactory - .select(mapSheetAnalDataEntity.mapSheetNum) - .from(mapSheetAnalEntity) - .join(mapSheetAnalDataEntity) - .on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id)) - .where(mapSheetAnalEntity.id.eq(id)) - .groupBy(mapSheetAnalDataEntity.mapSheetNum) - .fetch(); - } - - /** Pageable의 Sort를 QueryDSL OrderSpecifier로 변환 */ - @SuppressWarnings({"unchecked", "rawtypes"}) - private List> getOrderSpecifiers(Sort sort) { - List> orders = new ArrayList<>(); - - if (sort.isSorted()) { - QMapSheetAnalDataGeomEntity entity = QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity; - - for (Sort.Order order : sort) { - Order direction = order.isAscending() ? Order.ASC : Order.DESC; - String property = order.getProperty(); - - // 유효한 필드만 처리 - switch (property) { - case "classBeforeCd" -> orders.add(new OrderSpecifier(direction, entity.classBeforeCd)); - case "classBeforeProb" -> - orders.add(new OrderSpecifier(direction, entity.classBeforeProb)); - case "classAfterCd" -> orders.add(new OrderSpecifier(direction, entity.classAfterCd)); - case "classAfterProb" -> orders.add(new OrderSpecifier(direction, entity.classAfterProb)); - case "mapSheetNum" -> orders.add(new OrderSpecifier(direction, entity.mapSheetNum)); - case "compareYyyy" -> orders.add(new OrderSpecifier(direction, entity.compareYyyy)); - case "targetYyyy" -> orders.add(new OrderSpecifier(direction, entity.targetYyyy)); - case "area" -> orders.add(new OrderSpecifier(direction, entity.area)); - case "createdDttm" -> orders.add(new OrderSpecifier(direction, entity.createdDttm)); - case "updatedDttm" -> orders.add(new OrderSpecifier(direction, entity.updatedDttm)); - // 유효하지 않은 필드는 무시 - default -> {} - } - } - } - - return orders; - } - - private BooleanExpression eqTargetClass( - QMapSheetAnalDataGeomEntity detectedEntity, String targetClass) { - return targetClass != null && !targetClass.isEmpty() - ? detectedEntity.classAfterCd.toLowerCase().eq(targetClass.toLowerCase()) - : null; - } - - private BooleanExpression eqCompareClass( - QMapSheetAnalDataGeomEntity detectedEntity, String compareClass) { - return compareClass != null && !compareClass.isEmpty() - ? detectedEntity.classBeforeCd.toLowerCase().eq(compareClass.toLowerCase()) - : null; - } - - private BooleanExpression containsMapSheetNum( - QMapSheetAnalDataGeomEntity detectedEntity, List mapSheet) { - if (mapSheet == null || mapSheet.isEmpty()) { - return null; - } - - return detectedEntity.mapSheetNum.in(mapSheet); + return hQuery + .setReadOnly(true) + .setFetchSize(fetchSize) // PostgreSQL 커서/스트리밍에 영향 + .scroll(ScrollMode.FORWARD_ONLY); } } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepository.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepository.java new file mode 100644 index 00000000..8b104ec7 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepository.java @@ -0,0 +1,8 @@ +package com.kamco.cd.kamcoback.postgres.repository.Inference; + +import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity; +import org.springframework.data.jpa.repository.JpaRepository; + +public interface MapSheetAnalDataInferenceGeomRepository + extends JpaRepository, + MapSheetAnalDataInferenceGeomRepositoryCustom {} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepositoryCustom.java new file mode 100644 index 00000000..7ba4b827 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepositoryCustom.java @@ -0,0 +1,3 @@ +package com.kamco.cd.kamcoback.postgres.repository.Inference; + +public interface MapSheetAnalDataInferenceGeomRepositoryCustom {} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepositoryImpl.java new file mode 100644 index 00000000..4437987d --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepositoryImpl.java @@ -0,0 +1,11 @@ +package com.kamco.cd.kamcoback.postgres.repository.Inference; + +import lombok.RequiredArgsConstructor; +import org.springframework.stereotype.Repository; + +@Repository +@RequiredArgsConstructor +public class MapSheetAnalDataInferenceGeomRepositoryImpl + implements MapSheetAnalDataInferenceGeomRepositoryCustom { + +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepository.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepository.java new file mode 100644 index 00000000..0a8b8417 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepository.java @@ -0,0 +1,10 @@ +package com.kamco.cd.kamcoback.postgres.repository.Inference; + +import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity; +import org.springframework.data.jpa.repository.JpaRepository; + +public interface MapSheetAnalDataInferenceRepository + extends JpaRepository, + MapSheetAnalDataInferenceRepositoryCustom { + +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepositoryCustom.java new file mode 100644 index 00000000..edcb6120 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepositoryCustom.java @@ -0,0 +1,3 @@ +package com.kamco.cd.kamcoback.postgres.repository.Inference; + +public interface MapSheetAnalDataInferenceRepositoryCustom {} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepositoryImpl.java new file mode 100644 index 00000000..352a512b --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepositoryImpl.java @@ -0,0 +1,13 @@ +package com.kamco.cd.kamcoback.postgres.repository.Inference; + +import com.querydsl.jpa.impl.JPAQueryFactory; +import lombok.RequiredArgsConstructor; +import org.springframework.stereotype.Repository; + +@Repository +@RequiredArgsConstructor +public class MapSheetAnalDataInferenceRepositoryImpl + implements MapSheetAnalDataInferenceRepositoryCustom { + + private final JPAQueryFactory queryFactory; +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataRepository.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataRepository.java new file mode 100644 index 00000000..870315d3 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataRepository.java @@ -0,0 +1,7 @@ +package com.kamco.cd.kamcoback.postgres.repository.Inference; + +import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity; +import org.springframework.data.jpa.repository.JpaRepository; + +public interface MapSheetAnalDataRepository + extends JpaRepository, MapSheetAnalDataRepositoryCustom {} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataRepositoryCustom.java new file mode 100644 index 00000000..3a416b15 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataRepositoryCustom.java @@ -0,0 +1,32 @@ +package com.kamco.cd.kamcoback.postgres.repository.Inference; + +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResList; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResSummary; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq; +import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity; +import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity; +import jakarta.validation.constraints.NotNull; +import java.util.List; +import java.util.Optional; +import org.springframework.data.domain.Page; + +public interface MapSheetAnalDataRepositoryCustom { + + Page getInferenceResultList(InferenceResultDto.SearchReq searchReq); + + Optional getInferenceResultSummary(Long id); + + Page getInferenceGeomList( + Long id, InferenceResultDto.SearchGeoReq searchGeoReq); + + Page listInferenceResultWithGeom( + List dataIds, SearchGeoReq searchReq); + + List getSheets(Long id); + + List getDashboard(Long id); + + List listAnalyGeom(@NotNull Long id); +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataRepositoryImpl.java new file mode 100644 index 00000000..05d567a0 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataRepositoryImpl.java @@ -0,0 +1,371 @@ +package com.kamco.cd.kamcoback.postgres.repository.Inference; + +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResList; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResSummary; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq; +import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity; +import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity; +import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataEntity; +import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity; +import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity; +import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity; +import com.kamco.cd.kamcoback.postgres.entity.QModelMngBakEntity; +import com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity; +import com.querydsl.core.BooleanBuilder; +import com.querydsl.core.types.Order; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.core.types.Projections; +import com.querydsl.core.types.dsl.BooleanExpression; +import com.querydsl.core.types.dsl.Expressions; +import com.querydsl.jpa.JPAExpressions; +import com.querydsl.jpa.JPQLQuery; +import com.querydsl.jpa.impl.JPAQuery; +import com.querydsl.jpa.impl.JPAQueryFactory; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import lombok.RequiredArgsConstructor; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.stereotype.Repository; + +@Repository +@RequiredArgsConstructor +public class MapSheetAnalDataRepositoryImpl implements MapSheetAnalDataRepositoryCustom { + + private final JPAQueryFactory queryFactory; + private final QModelMngBakEntity tmm = QModelMngBakEntity.modelMngBakEntity; + private final QModelVerEntity tmv = QModelVerEntity.modelVerEntity; + private final QMapSheetAnalEntity mapSheetAnalEntity = QMapSheetAnalEntity.mapSheetAnalEntity; + private final QMapSheetAnalDataEntity mapSheetAnalDataEntity = + QMapSheetAnalDataEntity.mapSheetAnalDataEntity; + private final QMapSheetAnalDataGeomEntity mapSheetAnalDataGeomEntity = + QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity; + private final QMapSheetAnalSttcEntity mapSheetAnalSttcEntity = + QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity; + + /** + * 분석결과 목록 조회 + * + * @param searchReq + * @return + */ + @Override + public Page getInferenceResultList(InferenceResultDto.SearchReq searchReq) { + Pageable pageable = searchReq.toPageable(); + // "0000" 전체조회 + BooleanBuilder builder = new BooleanBuilder(); + if (searchReq.getStatCode() != null && !"0000".equals(searchReq.getStatCode())) { + builder.and(mapSheetAnalEntity.analState.eq(searchReq.getStatCode())); + } + + // 제목 + if (searchReq.getTitle() != null) { + builder.and(mapSheetAnalEntity.analTitle.like("%" + searchReq.getTitle() + "%")); + } + + List content = + queryFactory + .select( + Projections.constructor( + InferenceResultDto.AnalResList.class, + mapSheetAnalEntity.id, + mapSheetAnalEntity.analTitle, + mapSheetAnalEntity.analMapSheet, + mapSheetAnalEntity.detectingCnt, + mapSheetAnalEntity.analStrtDttm, + mapSheetAnalEntity.analEndDttm, + mapSheetAnalEntity.analSec, + mapSheetAnalEntity.analPredSec, + mapSheetAnalEntity.analState, + Expressions.stringTemplate( + "fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState), + mapSheetAnalEntity.gukyuinUsed)) + .from(mapSheetAnalEntity) + .where(builder) + .offset(pageable.getOffset()) + .limit(pageable.getPageSize()) + .orderBy(mapSheetAnalEntity.id.desc()) + .fetch(); + + long total = + queryFactory + .select(mapSheetAnalEntity.id) + .from(mapSheetAnalEntity) + .where(builder) + .fetchCount(); + + return new PageImpl<>(content, pageable, total); + } + + /** + * 분석결과 요약정보 + * + * @param id + * @return + */ + @Override + public Optional getInferenceResultSummary(Long id) { + + // 1. 최신 버전 UID를 가져오는 서브쿼리 + JPQLQuery latestVerUidSub = + JPAExpressions.select(tmv.id.max()).from(tmv).where(tmv.modelUid.eq(tmm.id)); + + Optional content = + Optional.ofNullable( + queryFactory + .select( + Projections.constructor( + InferenceResultDto.AnalResSummary.class, + mapSheetAnalEntity.id, + mapSheetAnalEntity.analTitle, + tmm.modelNm.concat(" ").concat(tmv.modelVer).as("modelInfo"), + mapSheetAnalEntity.targetYyyy, + mapSheetAnalEntity.compareYyyy, + mapSheetAnalEntity.analMapSheet, + mapSheetAnalEntity.analStrtDttm, + mapSheetAnalEntity.analEndDttm, + mapSheetAnalEntity.analSec, + mapSheetAnalEntity.analPredSec, + mapSheetAnalEntity.resultUrl, + mapSheetAnalEntity.detectingCnt, + mapSheetAnalEntity.accuracy, + mapSheetAnalEntity.analState, + Expressions.stringTemplate( + "fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState))) + .from(mapSheetAnalEntity) + .leftJoin(tmm) + .on(mapSheetAnalEntity.modelUid.eq(tmm.id)) + .leftJoin(tmv) + .on(tmv.modelUid.eq(tmm.id).and(tmv.id.eq(latestVerUidSub))) + .where(mapSheetAnalEntity.id.eq(id)) + .fetchOne()); + return content; + } + + /** + * 분석결과 상세 class name별 탐지 개수 + * + * @param id + * @return + */ + @Override + public List getDashboard(Long id) { + return queryFactory + .select( + Projections.constructor( + Dashboard.class, + mapSheetAnalSttcEntity.id.classAfterCd, + mapSheetAnalSttcEntity.classAfterCnt.sum())) + .from(mapSheetAnalSttcEntity) + .where(mapSheetAnalSttcEntity.id.analUid.eq(id)) + .groupBy(mapSheetAnalSttcEntity.id.classAfterCd) + .orderBy(mapSheetAnalSttcEntity.id.classAfterCd.asc()) + .fetch(); + } + + @Override + public List listAnalyGeom(Long id) { + QMapSheetAnalDataEntity analy = QMapSheetAnalDataEntity.mapSheetAnalDataEntity; + return queryFactory.selectFrom(analy).where(analy.analUid.eq(id)).fetch(); + } + + /** + * 분석결과 상세 목록 + * + * @param searchReq + * @return + */ + @Override + public Page listInferenceResultWithGeom( + List ids, SearchGeoReq searchReq) { + + // 분석 차수 + QMapSheetAnalDataGeomEntity detectedEntity = + QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity; + Pageable pageable = searchReq.toPageable(); + + // 검색조건 + JPAQuery query = + queryFactory + .selectFrom(detectedEntity) + .where( + detectedEntity.dataUid.in(ids), + eqTargetClass(detectedEntity, searchReq.getTargetClass()), + eqCompareClass(detectedEntity, searchReq.getCompareClass()), + containsMapSheetNum(detectedEntity, searchReq.getMapSheetNum())); + + // count + long total = query.fetchCount(); + + // Pageable에서 정렬 가져오기, 없으면 기본 정렬(createdDttm desc) 사용 + List> orders = getOrderSpecifiers(pageable.getSort()); + if (orders.isEmpty()) { + orders.add(detectedEntity.createdDttm.desc()); + } + + List content = + query + .offset(pageable.getOffset()) + .limit(pageable.getPageSize()) + .orderBy(orders.toArray(new OrderSpecifier[0])) + .fetch(); + + return new PageImpl<>(content, pageable, total); + } + + /** + * 분석결과 상세 목록 + * + * @param searchGeoReq + * @return + */ + @Override + public Page getInferenceGeomList(Long id, SearchGeoReq searchGeoReq) { + Pageable pageable = searchGeoReq.toPageable(); + BooleanBuilder builder = new BooleanBuilder(); + + // 추론결과 id + builder.and(mapSheetAnalEntity.id.eq(id)); + + // 기준년도 분류 + if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) { + builder.and( + mapSheetAnalDataGeomEntity + .classAfterCd + .toLowerCase() + .eq(searchGeoReq.getTargetClass().toLowerCase())); + } + + // 비교년도 분류 + if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")) { + builder.and( + mapSheetAnalDataGeomEntity + .classBeforeCd + .toLowerCase() + .eq(searchGeoReq.getCompareClass().toLowerCase())); + } + + // 분석도엽 + if (searchGeoReq.getMapSheetNum() != null && !searchGeoReq.getMapSheetNum().isEmpty()) { + List mapSheetNum = searchGeoReq.getMapSheetNum(); + builder.and(mapSheetAnalDataGeomEntity.mapSheetNum.in(mapSheetNum)); + } + + List content = + queryFactory + .select( + Projections.constructor( + InferenceResultDto.Geom.class, + mapSheetAnalDataGeomEntity.compareYyyy, + mapSheetAnalDataGeomEntity.targetYyyy, + mapSheetAnalDataGeomEntity.classBeforeCd, + mapSheetAnalDataGeomEntity.classBeforeProb, + mapSheetAnalDataGeomEntity.classAfterCd, + mapSheetAnalDataGeomEntity.classAfterProb, + mapSheetAnalDataGeomEntity.mapSheetNum, + mapSheetAnalDataGeomEntity.geom, + mapSheetAnalDataGeomEntity.geomCenter)) + .from(mapSheetAnalEntity) + .join(mapSheetAnalDataEntity) + .on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id)) + .join(mapSheetAnalDataGeomEntity) + .on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id)) + .where(builder) + .offset(pageable.getOffset()) + .limit(pageable.getPageSize()) + .fetch(); + + long total = + queryFactory + .select(mapSheetAnalDataGeomEntity.id) + .from(mapSheetAnalEntity) + .join(mapSheetAnalDataEntity) + .on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id)) + .join(mapSheetAnalDataGeomEntity) + .on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id)) + .where(builder) + .fetchCount(); + + return new PageImpl<>(content, pageable, total); + } + + /** + * 추론된 5000:1 도엽 목록 + * + * @param id + * @return + */ + @Override + public List getSheets(Long id) { + return queryFactory + .select(mapSheetAnalDataEntity.mapSheetNum) + .from(mapSheetAnalEntity) + .join(mapSheetAnalDataEntity) + .on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id)) + .where(mapSheetAnalEntity.id.eq(id)) + .groupBy(mapSheetAnalDataEntity.mapSheetNum) + .fetch(); + } + + /** Pageable의 Sort를 QueryDSL OrderSpecifier로 변환 */ + @SuppressWarnings({"unchecked", "rawtypes"}) + private List> getOrderSpecifiers(Sort sort) { + List> orders = new ArrayList<>(); + + if (sort.isSorted()) { + QMapSheetAnalDataGeomEntity entity = QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity; + + for (Sort.Order order : sort) { + Order direction = order.isAscending() ? Order.ASC : Order.DESC; + String property = order.getProperty(); + + // 유효한 필드만 처리 + switch (property) { + case "classBeforeCd" -> orders.add(new OrderSpecifier(direction, entity.classBeforeCd)); + case "classBeforeProb" -> + orders.add(new OrderSpecifier(direction, entity.classBeforeProb)); + case "classAfterCd" -> orders.add(new OrderSpecifier(direction, entity.classAfterCd)); + case "classAfterProb" -> orders.add(new OrderSpecifier(direction, entity.classAfterProb)); + case "mapSheetNum" -> orders.add(new OrderSpecifier(direction, entity.mapSheetNum)); + case "compareYyyy" -> orders.add(new OrderSpecifier(direction, entity.compareYyyy)); + case "targetYyyy" -> orders.add(new OrderSpecifier(direction, entity.targetYyyy)); + case "area" -> orders.add(new OrderSpecifier(direction, entity.area)); + case "createdDttm" -> orders.add(new OrderSpecifier(direction, entity.createdDttm)); + case "updatedDttm" -> orders.add(new OrderSpecifier(direction, entity.updatedDttm)); + // 유효하지 않은 필드는 무시 + default -> {} + } + } + } + + return orders; + } + + private BooleanExpression eqTargetClass( + QMapSheetAnalDataGeomEntity detectedEntity, String targetClass) { + return targetClass != null && !targetClass.isEmpty() + ? detectedEntity.classAfterCd.toLowerCase().eq(targetClass.toLowerCase()) + : null; + } + + private BooleanExpression eqCompareClass( + QMapSheetAnalDataGeomEntity detectedEntity, String compareClass) { + return compareClass != null && !compareClass.isEmpty() + ? detectedEntity.classBeforeCd.toLowerCase().eq(compareClass.toLowerCase()) + : null; + } + + private BooleanExpression containsMapSheetNum( + QMapSheetAnalDataGeomEntity detectedEntity, List mapSheet) { + if (mapSheet == null || mapSheet.isEmpty()) { + return null; + } + + return detectedEntity.mapSheetNum.in(mapSheet); + } +} From baffb147d943b56ff12913905a045527073c9b9b Mon Sep 17 00:00:00 2001 From: teddy Date: Fri, 26 Dec 2025 10:23:03 +0900 Subject: [PATCH 3/3] =?UTF-8?q?=ED=8C=8C=EC=9D=BC=EC=83=9D=EC=84=B1=20?= =?UTF-8?q?=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../kamcoback/common/utils/FIleChecker.java | 17 +-- .../InferenceResultShpApiController.java | 1 - .../inference/dto/InferenceResultShpDto.java | 62 ++++---- .../inference/service/GeoToolsShpWriter.java | 144 +++++++----------- .../service/InferenceResultShpService.java | 22 +-- .../mapsheet/MapSheetMngApiController.java | 48 +++--- .../mapsheet/service/MapSheetMngService.java | 73 +++++---- .../core/InferenceResultShpCoreService.java | 11 +- .../MapSheetAnalDataInferenceGeomEntity.java | 4 - .../InferenceResultRepositoryCustom.java | 1 - .../InferenceResultRepositoryImpl.java | 18 +-- ...etAnalDataInferenceGeomRepositoryImpl.java | 4 +- .../MapSheetAnalDataInferenceRepository.java | 6 +- .../mapsheet/MapSheetMngRepositoryImpl.java | 18 ++- .../MapSheetMngFileJobController.java | 2 - 15 files changed, 183 insertions(+), 248 deletions(-) diff --git a/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java b/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java index 5f6909d0..ce622964 100644 --- a/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java +++ b/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java @@ -2,7 +2,6 @@ package com.kamco.cd.kamcoback.common.utils; import static java.lang.String.CASE_INSENSITIVE_ORDER; -import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.DmlReturn; import io.swagger.v3.oas.annotations.media.Schema; import java.io.BufferedReader; import java.io.File; @@ -457,15 +456,13 @@ public class FIleChecker { return FIleChecker.getFilesFromAllDepth(dir, targetFileNm, extension, 100, "name", 0, 100); } - public static int getFileCountFromAllDepth( - String dir, String targetFileNm, String extension) { + public static int getFileCountFromAllDepth(String dir, String targetFileNm, String extension) { List basicList = - FIleChecker.getFilesFromAllDepth(dir, targetFileNm, extension); + FIleChecker.getFilesFromAllDepth(dir, targetFileNm, extension); - return (int) basicList.stream() - .filter(dto -> dto.getExtension().toString().equals(extension)) - .count(); + return (int) + basicList.stream().filter(dto -> dto.getExtension().toString().equals(extension)).count(); } public static Long getFileTotSize(List files) { @@ -478,8 +475,7 @@ public class FIleChecker { return fileTotSize; } - public static boolean multipartSaveTo(MultipartFile mfile, String targetPath) - { + public static boolean multipartSaveTo(MultipartFile mfile, String targetPath) { Path tmpSavePath = Paths.get(targetPath); boolean fileUpload = true; @@ -493,8 +489,7 @@ public class FIleChecker { return true; } - public static boolean validationMultipart(MultipartFile mfile) - { + public static boolean validationMultipart(MultipartFile mfile) { // 파일 유효성 검증 if (mfile == null || mfile.isEmpty() || mfile.getSize() == 0) { return false; diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultShpApiController.java b/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultShpApiController.java index 43f99746..83fa0038 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultShpApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultShpApiController.java @@ -17,7 +17,6 @@ public class InferenceResultShpApiController { private final InferenceResultShpService inferenceResultShpService; - @Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장") @PostMapping public ApiResponseDto saveInferenceResultData() { diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java index 645c4d58..35161f38 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultShpDto.java @@ -30,22 +30,21 @@ public class InferenceResultShpDto { private Geometry geometry; public Basic( - Long id, - UUID uuid, - Integer stage, - Float cdProb, - Integer input1, - Integer input2, - Long mapId, - String beforeClass, - Float beforeProbability, - String afterClass, - Float afterProbability, - Float area, - ZonedDateTime createdDttm, - ZonedDateTime updatedDttm, - Geometry geometry - ) { + Long id, + UUID uuid, + Integer stage, + Float cdProb, + Integer input1, + Integer input2, + Long mapId, + String beforeClass, + Float beforeProbability, + String afterClass, + Float afterProbability, + Float area, + ZonedDateTime createdDttm, + ZonedDateTime updatedDttm, + Geometry geometry) { this.id = id; this.uuid = uuid; this.stage = stage; @@ -65,22 +64,21 @@ public class InferenceResultShpDto { public static Basic from(InferenceResultEntity e) { return new Basic( - e.getId(), - e.getUuid(), - e.getStage(), - e.getCdProb(), - e.getInput1(), - e.getInput2(), - e.getMapId(), - e.getBeforeClass(), - e.getBeforeProbability(), - e.getAfterClass(), - e.getAfterProbability(), - e.getArea(), - e.getCreatedDttm(), - e.getUpdatedDttm(), - e.getGeometry() - ); + e.getId(), + e.getUuid(), + e.getStage(), + e.getCdProb(), + e.getInput1(), + e.getInput2(), + e.getMapId(), + e.getBeforeClass(), + e.getBeforeProbability(), + e.getAfterClass(), + e.getAfterProbability(), + e.getArea(), + e.getCreatedDttm(), + e.getUpdatedDttm(), + e.getGeometry()); } } } diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java index 184312e7..d20af592 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/GeoToolsShpWriter.java @@ -48,13 +48,14 @@ public class GeoToolsShpWriter implements ShpWriter { /** * SHP 파일(.shp/.shx/.dbf/.prj)을 생성한다. - *

- * - shpBasePath를 기준으로 파일을 생성한다. 예) /Users/kim/export/shp/1_map_2021_2022 → 1_map_2021_2022.shp → 1_map_2021_2022.shx → 1_map_2021_2022.dbf → 1_map_2021_2022.prj - *

- * - geometry 타입은 첫 번째 유효 geometry 기준으로 스키마를 생성한다. - 좌표계는 EPSG:5186으로 설정하며, .prj 파일을 직접 생성한다. + * + *

- shpBasePath를 기준으로 파일을 생성한다. 예) /Users/kim/export/shp/1_map_2021_2022 → 1_map_2021_2022.shp + * → 1_map_2021_2022.shx → 1_map_2021_2022.dbf → 1_map_2021_2022.prj + * + *

- geometry 타입은 첫 번째 유효 geometry 기준으로 스키마를 생성한다. - 좌표계는 EPSG:5186으로 설정하며, .prj 파일을 직접 생성한다. * * @param shpBasePath 확장자를 제외한 SHP 파일 기본 경로 - * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 + * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 */ @Override public void writeShp(String shpBasePath, List rows) { @@ -67,13 +68,11 @@ public class GeoToolsShpWriter implements ShpWriter { // 첫 번째 유효 geometry의 "구체 타입"을 기준으로 스키마를 생성한다. Geometry firstGeom = firstNonNullGeometry(rows); if (firstGeom == null) { - throw new IllegalArgumentException( - "SHP 생성 실패: geometry가 전부 null 입니다. path=" + shpBasePath); + throw new IllegalArgumentException("SHP 생성 실패: geometry가 전부 null 입니다. path=" + shpBasePath); } @SuppressWarnings("unchecked") - Class geomType = - (Class) firstGeom.getClass(); + Class geomType = (Class) firstGeom.getClass(); ShapefileDataStore dataStore = null; @@ -91,8 +90,7 @@ public class GeoToolsShpWriter implements ShpWriter { dataStore = createDataStore(shpFile, schema); // FeatureCollection 생성 - DefaultFeatureCollection collection = - buildFeatureCollection(schema, rows); + DefaultFeatureCollection collection = buildFeatureCollection(schema, rows); // 실제 SHP 파일에 feature 쓰기 writeFeatures(dataStore, collection); @@ -100,11 +98,7 @@ public class GeoToolsShpWriter implements ShpWriter { // .prj 파일 직접 생성 (EPSG:5186) writePrjFile(shpBasePath, crs); - log.info( - "SHP 생성 완료: {} ({} features)", - shpFile.getAbsolutePath(), - collection.size() - ); + log.info("SHP 생성 완료: {} ({} features)", shpFile.getAbsolutePath(), collection.size()); } catch (Exception e) { throw new RuntimeException("SHP 생성 실패: " + shpBasePath, e); @@ -120,14 +114,16 @@ public class GeoToolsShpWriter implements ShpWriter { /** * GeoJSON 파일(.geojson)을 생성한다. - *

- * - FeatureCollection 형태로 출력한다. - 최상단에 name / crs / properties를 포함한다. - 각 Feature는 polygon 단위로 생성된다. - geometry는 GeoTools GeometryJSON을 사용하여 직렬화한다. - *

- * GeoJSON 구조 예: { "type": "FeatureCollection", "name": "stage_input1_input2_mapId", "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::5186" } }, - * "properties": { ... }, "features": [ ... ] } + * + *

- FeatureCollection 형태로 출력한다. - 최상단에 name / crs / properties를 포함한다. - 각 Feature는 polygon 단위로 + * 생성된다. - geometry는 GeoTools GeometryJSON을 사용하여 직렬화한다. + * + *

GeoJSON 구조 예: { "type": "FeatureCollection", "name": "stage_input1_input2_mapId", "crs": { + * "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::5186" } }, "properties": { ... + * }, "features": [ ... ] } * * @param geoJsonPath 생성할 GeoJSON 파일의 전체 경로 (.geojson 포함) - * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 + * @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록 */ @Override public void writeGeoJson(String geoJsonPath, List rows) { @@ -151,13 +147,10 @@ public class GeoToolsShpWriter implements ShpWriter { root.put("type", "FeatureCollection"); // name: stage_input1_input2_mapId - String name = String.format( - "%d_%d_%d_%d", - first.getStage(), - first.getInput1(), - first.getInput2(), - first.getMapId() - ); + String name = + String.format( + "%d_%d_%d_%d", + first.getStage(), first.getInput1(), first.getInput2(), first.getMapId()); root.put("name", name); // CRS (EPSG:5186) @@ -175,9 +168,9 @@ public class GeoToolsShpWriter implements ShpWriter { groupProps.put("input2", first.getInput2()); groupProps.put("map_id", first.getMapId()); // 학습서버 버전은 추후 추가 -// groupProps.put("m1", "v1.2222.251223121212"); -// groupProps.put("m2", "v2.211.251223121213"); -// groupProps.put("m3", "v3.233.251223121214"); + // groupProps.put("m1", "v1.2222.251223121212"); + // groupProps.put("m2", "v2.211.251223121213"); + // groupProps.put("m3", "v3.233.251223121214"); root.set("properties", groupProps); // features 배열 @@ -193,8 +186,7 @@ public class GeoToolsShpWriter implements ShpWriter { // feature properties ObjectNode p = om.createObjectNode(); - p.put("polygon_id", - dto.getUuid() != null ? dto.getUuid().toString() : null); + p.put("polygon_id", dto.getUuid() != null ? dto.getUuid().toString() : null); if (dto.getCdProb() != null) { p.put("cd_prob", dto.getCdProb()); } @@ -233,18 +225,11 @@ public class GeoToolsShpWriter implements ShpWriter { // 파일 쓰기 try (OutputStreamWriter w = - new OutputStreamWriter( - new FileOutputStream(geoJsonFile), - GEOJSON_CHARSET - )) { + new OutputStreamWriter(new FileOutputStream(geoJsonFile), GEOJSON_CHARSET)) { om.writerWithDefaultPrettyPrinter().writeValue(w, root); } - log.info( - "GeoJSON 생성 완료: {} ({} features)", - geoJsonFile.getAbsolutePath(), - features.size() - ); + log.info("GeoJSON 생성 완료: {} ({} features)", geoJsonFile.getAbsolutePath(), features.size()); } catch (Exception e) { throw new RuntimeException("GeoJSON 생성 실패: " + geoJsonPath, e); @@ -253,8 +238,8 @@ public class GeoToolsShpWriter implements ShpWriter { /** * rows 목록에서 첫 번째로 발견되는 non-null Geometry를 반환한다. - *

- * - SHP 스키마 생성 시 geometry 타입 결정을 위해 사용된다. + * + *

- SHP 스키마 생성 시 geometry 타입 결정을 위해 사용된다. * * @param rows DTO 목록 * @return 첫 번째 non-null Geometry, 없으면 null @@ -270,17 +255,15 @@ public class GeoToolsShpWriter implements ShpWriter { /** * SHP 파일에 사용할 SimpleFeatureType(schema)를 생성한다. - *

- * - geometry 컬럼은 반드시 첫 번째 컬럼이어야 한다. - DBF 컬럼은 SHP 제약(컬럼명 10자, 길이 제한)을 고려한다. + * + *

- geometry 컬럼은 반드시 첫 번째 컬럼이어야 한다. - DBF 컬럼은 SHP 제약(컬럼명 10자, 길이 제한)을 고려한다. * * @param geomType geometry의 구체 타입 (Polygon, MultiPolygon 등) - * @param crs 좌표계(EPSG:5186) + * @param crs 좌표계(EPSG:5186) * @return SimpleFeatureType */ private SimpleFeatureType createSchema( - Class geomType, - CoordinateReferenceSystem crs - ) { + Class geomType, CoordinateReferenceSystem crs) { SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder(); b.setName("inference_result"); b.setCRS(crs); @@ -305,25 +288,22 @@ public class GeoToolsShpWriter implements ShpWriter { /** * ShapefileDataStore를 생성하고 스키마를 등록한다. - *

- * - DBF 파일 인코딩은 EUC-KR로 설정한다. - spatial index(.qix)를 생성한다. + * + *

- DBF 파일 인코딩은 EUC-KR로 설정한다. - spatial index(.qix)를 생성한다. * * @param shpFile SHP 파일 객체 - * @param schema SimpleFeatureType + * @param schema SimpleFeatureType * @return 생성된 ShapefileDataStore */ - private ShapefileDataStore createDataStore( - File shpFile, - SimpleFeatureType schema - ) throws Exception { + private ShapefileDataStore createDataStore(File shpFile, SimpleFeatureType schema) + throws Exception { Map params = new HashMap<>(); params.put("url", shpFile.toURI().toURL()); params.put("create spatial index", Boolean.TRUE); ShapefileDataStoreFactory factory = new ShapefileDataStoreFactory(); - ShapefileDataStore dataStore = - (ShapefileDataStore) factory.createNewDataStore(params); + ShapefileDataStore dataStore = (ShapefileDataStore) factory.createNewDataStore(params); dataStore.setCharset(DBF_CHARSET); dataStore.createSchema(schema); @@ -333,17 +313,15 @@ public class GeoToolsShpWriter implements ShpWriter { /** * DTO 목록을 SimpleFeatureCollection으로 변환한다. - *

- * - DTO 1건당 Feature 1개 생성 - geometry가 null인 데이터는 제외한다. + * + *

- DTO 1건당 Feature 1개 생성 - geometry가 null인 데이터는 제외한다. * * @param schema FeatureType - * @param rows DTO 목록 + * @param rows DTO 목록 * @return DefaultFeatureCollection */ private DefaultFeatureCollection buildFeatureCollection( - SimpleFeatureType schema, - List rows - ) { + SimpleFeatureType schema, List rows) { DefaultFeatureCollection collection = new DefaultFeatureCollection(); SimpleFeatureBuilder builder = new SimpleFeatureBuilder(schema); @@ -360,13 +338,11 @@ public class GeoToolsShpWriter implements ShpWriter { builder.add(dto.getMapId()); builder.add(dto.getArea() != null ? dto.getArea().doubleValue() : null); builder.add(dto.getBeforeClass()); - builder.add(dto.getBeforeProbability() != null - ? dto.getBeforeProbability().doubleValue() - : null); + builder.add( + dto.getBeforeProbability() != null ? dto.getBeforeProbability().doubleValue() : null); builder.add(dto.getAfterClass()); - builder.add(dto.getAfterProbability() != null - ? dto.getAfterProbability().doubleValue() - : null); + builder.add( + dto.getAfterProbability() != null ? dto.getAfterProbability().doubleValue() : null); SimpleFeature feature = builder.buildFeature(null); collection.add(feature); @@ -379,17 +355,14 @@ public class GeoToolsShpWriter implements ShpWriter { /** * FeatureCollection을 SHP 파일에 실제로 기록한다. * - * @param dataStore ShapefileDataStore + * @param dataStore ShapefileDataStore * @param collection FeatureCollection */ - private void writeFeatures( - ShapefileDataStore dataStore, - DefaultFeatureCollection collection - ) throws Exception { + private void writeFeatures(ShapefileDataStore dataStore, DefaultFeatureCollection collection) + throws Exception { String typeName = dataStore.getTypeNames()[0]; - SimpleFeatureSource featureSource = - dataStore.getFeatureSource(typeName); + SimpleFeatureSource featureSource = dataStore.getFeatureSource(typeName); if (!(featureSource instanceof SimpleFeatureStore store)) { throw new IllegalStateException("FeatureStore 생성 실패"); @@ -404,21 +377,14 @@ public class GeoToolsShpWriter implements ShpWriter { * SHP 좌표계 정보를 담은 .prj 파일을 생성한다. * * @param shpBasePath SHP 기본 경로 (확장자 제외) - * @param crs 좌표계(EPSG:5186) + * @param crs 좌표계(EPSG:5186) */ - private void writePrjFile( - String shpBasePath, - CoordinateReferenceSystem crs - ) throws Exception { + private void writePrjFile(String shpBasePath, CoordinateReferenceSystem crs) throws Exception { File prjFile = new File(shpBasePath + ".prj"); createDirectories(prjFile); - Files.writeString( - prjFile.toPath(), - crs.toWKT(), - StandardCharsets.UTF_8 - ); + Files.writeString(prjFile.toPath(), crs.toWKT(), StandardCharsets.UTF_8); } /** diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java index b7a5ce3e..8c9004b3 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultShpService.java @@ -19,14 +19,16 @@ public class InferenceResultShpService { public void saveInferenceResultData() { - coreService.streamGrouped(1000, (key, entities) -> { + coreService.streamGrouped( + 1000, + (key, entities) -> { - // Entity -> DTO - List dtoList = - entities.stream().map(InferenceResultShpDto.Basic::from).toList(); + // Entity -> DTO + List dtoList = + entities.stream().map(InferenceResultShpDto.Basic::from).toList(); - flushGroup(key, dtoList); - }); + flushGroup(key, dtoList); + }); } /** @@ -41,12 +43,10 @@ public class InferenceResultShpService { String baseDir = System.getProperty("user.home") + "/export"; // 파일명 stage_input1_input2_mapId - String baseName = String.format( - "%d_%d_%d_%d", - key.stage(), key.mapId(), key.input1(), key.input2() - ); + String baseName = + String.format("%d_%d_%d_%d", key.stage(), key.mapId(), key.input1(), key.input2()); - String shpBasePath = baseDir + "/shp/" + baseName; // 확장자 없이 + String shpBasePath = baseDir + "/shp/" + baseName; // 확장자 없이 String geoJsonPath = baseDir + "/geojson/" + baseName + ".geojson"; // shp: .shp/.shx/.dbf 생성 diff --git a/src/main/java/com/kamco/cd/kamcoback/mapsheet/MapSheetMngApiController.java b/src/main/java/com/kamco/cd/kamcoback/mapsheet/MapSheetMngApiController.java index 5a23809d..adc84738 100644 --- a/src/main/java/com/kamco/cd/kamcoback/mapsheet/MapSheetMngApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/mapsheet/MapSheetMngApiController.java @@ -226,21 +226,19 @@ public class MapSheetMngApiController { return ApiResponseDto.ok(mapSheetMngService.deleteByFileUidMngFile(fileUids)); } - - @Operation(summary = "폴더 조회", description = "폴더 조회 (ROOT:/app/original-images 이하로 경로입력)") @ApiResponses( - value = { - @ApiResponse( - responseCode = "200", - description = "조회 성공", - content = - @Content( - mediaType = "application/json", - schema = @Schema(implementation = CommonCodeDto.Basic.class))), - @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), - @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) - }) + value = { + @ApiResponse( + responseCode = "200", + description = "조회 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = CommonCodeDto.Basic.class))), + @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) @PostMapping("/folder-list") public ApiResponseDto getDir(@RequestBody SrchFoldersDto srchDto) { @@ -249,22 +247,20 @@ public class MapSheetMngApiController { @Operation(summary = "지정폴더내 파일목록 조회", description = "지정폴더내 파일목록 조회") @ApiResponses( - value = { - @ApiResponse( - responseCode = "200", - description = "조회 성공", - content = - @Content( - mediaType = "application/json", - schema = @Schema(implementation = CommonCodeDto.Basic.class))), - @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), - @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) - }) + value = { + @ApiResponse( + responseCode = "200", + description = "조회 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = CommonCodeDto.Basic.class))), + @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) @PostMapping("/file-list") public ApiResponseDto getFiles(@RequestBody SrchFilesDto srchDto) { return ApiResponseDto.createOK(mapSheetMngService.getFilesAll(srchDto)); } - - } diff --git a/src/main/java/com/kamco/cd/kamcoback/mapsheet/service/MapSheetMngService.java b/src/main/java/com/kamco/cd/kamcoback/mapsheet/service/MapSheetMngService.java index d5f2c031..6983e434 100644 --- a/src/main/java/com/kamco/cd/kamcoback/mapsheet/service/MapSheetMngService.java +++ b/src/main/java/com/kamco/cd/kamcoback/mapsheet/service/MapSheetMngService.java @@ -14,7 +14,6 @@ import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngDto; import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngFilesDto; import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService; import jakarta.validation.Valid; -import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -86,7 +85,8 @@ public class MapSheetMngService { } @Transactional - public DmlReturn uploadPair(MultipartFile tfwFile, MultipartFile tifFile, Long hstUid, Boolean overwrite) { + public DmlReturn uploadPair( + MultipartFile tfwFile, MultipartFile tifFile, Long hstUid, Boolean overwrite) { String rootPath = syncRootDir; String tmpPath = syncTmpDir; @@ -104,26 +104,29 @@ public class MapSheetMngService { return new DmlReturn("fail", "CREATE TEMP FOLDER ERROR"); } - //업로드 파일 사이즈,확장자명 체크 + // 업로드 파일 사이즈,확장자명 체크 dmlReturn = this.validationFile(tfwFile, tifFile); - if( dmlReturn.getFlag().equals("fail") )return dmlReturn; - + if (dmlReturn.getFlag().equals("fail")) return dmlReturn; MngDto mngDto = mapSheetMngCoreService.findMapSheetMng(errDto.getMngYyyy()); String targetYearDir = mngDto.getMngPath(); // 중복체크 - if( !overwrite ) { - dmlReturn = this.duplicateFile(errDto.getMngYyyy(), tfwFile.getOriginalFilename(), tifFile.getOriginalFilename()); - if( dmlReturn.getFlag().equals("duplicate") )return dmlReturn; + if (!overwrite) { + dmlReturn = + this.duplicateFile( + errDto.getMngYyyy(), tfwFile.getOriginalFilename(), tifFile.getOriginalFilename()); + if (dmlReturn.getFlag().equals("duplicate")) return dmlReturn; } - //멀티파트 파일 tmp폴더 저장(파일형식 체크를 위해) + // 멀티파트 파일 tmp폴더 저장(파일형식 체크를 위해) String tfwTmpPath = tmpPath + tfwFile.getOriginalFilename(); String tifTmpPath = tmpPath + tifFile.getOriginalFilename(); - if(!FIleChecker.multipartSaveTo(tfwFile, tfwTmpPath))return new DmlReturn("fail", "UPLOAD ERROR"); - if(!FIleChecker.multipartSaveTo(tifFile, tifTmpPath))return new DmlReturn("fail", "UPLOAD ERROR"); + if (!FIleChecker.multipartSaveTo(tfwFile, tfwTmpPath)) + return new DmlReturn("fail", "UPLOAD ERROR"); + if (!FIleChecker.multipartSaveTo(tifFile, tifTmpPath)) + return new DmlReturn("fail", "UPLOAD ERROR"); if (!FIleChecker.cmmndGdalInfo(tifTmpPath)) return new DmlReturn("fail", "TIF TYPE ERROR"); if (!FIleChecker.checkTfw(tfwTmpPath)) return new DmlReturn("fail", "TFW TYPE ERROR"); @@ -136,7 +139,6 @@ public class MapSheetMngService { break; } - Path tfwTmpSavePath = Paths.get(tfwTmpPath); Path tifTmpSavePath = Paths.get(tifTmpPath); Path tfwTargetPath = null; @@ -232,20 +234,18 @@ public class MapSheetMngService { return new DmlReturn("success", fileUids.size() + "개 파일이 삭제되었습니다."); } - - - public DmlReturn validationFile(MultipartFile tfwFile, MultipartFile tifFile) - { - if( !FIleChecker.validationMultipart(tfwFile) )return new DmlReturn("fail", "TFW SIZE 오류"); - else if( !FIleChecker.validationMultipart(tifFile) )return new DmlReturn("fail", "TFW SIZE 오류"); - else if (!FIleChecker.checkExtensions(tfwFile.getOriginalFilename(), "tfw"))return new DmlReturn("fail", "TFW FILENAME ERROR"); - else if (!FIleChecker.checkExtensions(tifFile.getOriginalFilename(), "tif"))return new DmlReturn("fail", "TIF FILENAME ERROR"); + public DmlReturn validationFile(MultipartFile tfwFile, MultipartFile tifFile) { + if (!FIleChecker.validationMultipart(tfwFile)) return new DmlReturn("fail", "TFW SIZE 오류"); + else if (!FIleChecker.validationMultipart(tifFile)) return new DmlReturn("fail", "TFW SIZE 오류"); + else if (!FIleChecker.checkExtensions(tfwFile.getOriginalFilename(), "tfw")) + return new DmlReturn("fail", "TFW FILENAME ERROR"); + else if (!FIleChecker.checkExtensions(tifFile.getOriginalFilename(), "tif")) + return new DmlReturn("fail", "TIF FILENAME ERROR"); return new DmlReturn("success", "파일체크"); } - public DmlReturn duplicateFile(int mngYyyy, String tfwFileName, String tifFileName) - { + public DmlReturn duplicateFile(int mngYyyy, String tfwFileName, String tifFileName) { int tfwCnt = mapSheetMngCoreService.findByYearFileNameFileCount(mngYyyy, tfwFileName); int tifCnt = mapSheetMngCoreService.findByYearFileNameFileCount(mngYyyy, tifFileName); @@ -254,14 +254,11 @@ public class MapSheetMngService { if (tfwCnt > 0 || tifCnt > 0) { String resMsg = ""; - if (tfwCnt > 0) - resMsg = tfwFileName; + if (tfwCnt > 0) resMsg = tfwFileName; if (tifCnt > 0) { - if (tfwCnt > 0) - resMsg = resMsg + "," + tifFileName; - else - resMsg = tifFileName; + if (tfwCnt > 0) resMsg = resMsg + "," + tifFileName; + else resMsg = tifFileName; } return new DmlReturn("duplicate", resMsg); } @@ -279,8 +276,8 @@ public class MapSheetMngService { int folderTotCnt = folderList.size(); int folderErrTotCnt = - (int) - folderList.stream().filter(dto -> dto.getIsValid().toString().equals("false")).count(); + (int) + folderList.stream().filter(dto -> dto.getIsValid().toString().equals("false")).count(); return new FoldersDto(dirPath, folderTotCnt, folderErrTotCnt, folderList); } @@ -292,14 +289,14 @@ public class MapSheetMngService { int endPos = srchDto.getEndPos(); List files = - FIleChecker.getFilesFromAllDepth( - srchDto.getDirPath(), - "*", - srchDto.getExtension(), - 1, - srchDto.getSortType(), - startPos, - endPos); + FIleChecker.getFilesFromAllDepth( + srchDto.getDirPath(), + "*", + srchDto.getExtension(), + 1, + srchDto.getSortType(), + startPos, + endPos); int fileListPos = 0; int fileTotCnt = files.size(); diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java index ff061b53..b9872cff 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultShpCoreService.java @@ -16,15 +16,12 @@ public class InferenceResultShpCoreService { private final InferenceResultRepository inferenceResultRepository; - public record ShpKey(Integer stage, Long mapId, Integer input1, Integer input2) { + public record ShpKey(Integer stage, Long mapId, Integer input1, Integer input2) {} - } - - /** - * DB를 스트리밍하면서 그룹이 완성될 때마다 handler로 넘김 handler: (key, groupRows) - */ + /** DB를 스트리밍하면서 그룹이 완성될 때마다 handler로 넘김 handler: (key, groupRows) */ @Transactional(readOnly = true) - public void streamGrouped(int fetchSize, BiConsumer> handler) { + public void streamGrouped( + int fetchSize, BiConsumer> handler) { ScrollableResults cursor = inferenceResultRepository.scrollAllOrdered(fetchSize); diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceGeomEntity.java b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceGeomEntity.java index 05d8ccaa..e174067e 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceGeomEntity.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceGeomEntity.java @@ -135,16 +135,12 @@ public class MapSheetAnalDataInferenceGeomEntity { @Column(name = "file_created_yn") private Boolean fileCreatedYn; - @Column(name = "geom", columnDefinition = "geometry") private Geometry geom; - @Column(name = "geom_center", columnDefinition = "geometry") private Geometry geomCenter; - @Column(name = "before_geom", columnDefinition = "geometry") private Geometry beforeGeom; - } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java index 306d2021..ff2ab3e5 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java @@ -5,5 +5,4 @@ import org.hibernate.ScrollableResults; public interface InferenceResultRepositoryCustom { ScrollableResults scrollAllOrdered(int fetchSize); - } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java index 7dc992f5..288c5728 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java @@ -20,22 +20,16 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC QInferenceResultEntity e = QInferenceResultEntity.inferenceResultEntity; JPAQuery q = - queryFactory - .selectFrom(e) - .orderBy( - e.stage.asc(), - e.mapId.asc(), - e.input1.asc(), - e.input2.asc(), - e.id.asc() - ); + queryFactory + .selectFrom(e) + .orderBy(e.stage.asc(), e.mapId.asc(), e.input1.asc(), e.input2.asc(), e.id.asc()); // QueryDSL -> Hibernate Query로 unwrap 해서 커서 스트리밍 Query hQuery = q.createQuery().unwrap(Query.class); return hQuery - .setReadOnly(true) - .setFetchSize(fetchSize) // PostgreSQL 커서/스트리밍에 영향 - .scroll(ScrollMode.FORWARD_ONLY); + .setReadOnly(true) + .setFetchSize(fetchSize) // PostgreSQL 커서/스트리밍에 영향 + .scroll(ScrollMode.FORWARD_ONLY); } } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepositoryImpl.java index 4437987d..c357036e 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceGeomRepositoryImpl.java @@ -6,6 +6,4 @@ import org.springframework.stereotype.Repository; @Repository @RequiredArgsConstructor public class MapSheetAnalDataInferenceGeomRepositoryImpl - implements MapSheetAnalDataInferenceGeomRepositoryCustom { - -} + implements MapSheetAnalDataInferenceGeomRepositoryCustom {} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepository.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepository.java index 0a8b8417..784f8b01 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepository.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetAnalDataInferenceRepository.java @@ -4,7 +4,5 @@ import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity; import org.springframework.data.jpa.repository.JpaRepository; public interface MapSheetAnalDataInferenceRepository - extends JpaRepository, - MapSheetAnalDataInferenceRepositoryCustom { - -} + extends JpaRepository, + MapSheetAnalDataInferenceRepositoryCustom {} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/mapsheet/MapSheetMngRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/mapsheet/MapSheetMngRepositoryImpl.java index 049a73a1..6675d2dd 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/mapsheet/MapSheetMngRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/mapsheet/MapSheetMngRepositoryImpl.java @@ -661,13 +661,17 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport } @Override - public int findByYearFileNameFileCount(int mngYyyy, String fileName){ - Long execCount = queryFactory - .select(mapSheetMngFileEntity.count()) - .from(mapSheetMngFileEntity) - .where(mapSheetMngFileEntity.mngYyyy.eq(mngYyyy) - .and(mapSheetMngFileEntity.fileName.eq(fileName))) - .fetchOne(); + public int findByYearFileNameFileCount(int mngYyyy, String fileName) { + Long execCount = + queryFactory + .select(mapSheetMngFileEntity.count()) + .from(mapSheetMngFileEntity) + .where( + mapSheetMngFileEntity + .mngYyyy + .eq(mngYyyy) + .and(mapSheetMngFileEntity.fileName.eq(fileName))) + .fetchOne(); return Math.toIntExact(execCount); } diff --git a/src/main/java/com/kamco/cd/kamcoback/scheduler/MapSheetMngFileJobController.java b/src/main/java/com/kamco/cd/kamcoback/scheduler/MapSheetMngFileJobController.java index 95fe378e..2c6110e7 100644 --- a/src/main/java/com/kamco/cd/kamcoback/scheduler/MapSheetMngFileJobController.java +++ b/src/main/java/com/kamco/cd/kamcoback/scheduler/MapSheetMngFileJobController.java @@ -25,7 +25,6 @@ public class MapSheetMngFileJobController { mapSheetMngFileJobService.checkMapSheetFileProcess(0, mngSyncPageSize); } - @Scheduled(fixedDelay = 5000) public void mngFileSyncJob01() { if (!isSchedulerEnabled) return; @@ -98,7 +97,6 @@ public class MapSheetMngFileJobController { mapSheetMngFileJobService.checkMapSheetFileProcess(9, mngSyncPageSize); } - // 3. 외부에서 플래그를 변경할 수 있는 Setter 메서드 public void setSchedulerEnabled(boolean enabled) { this.isSchedulerEnabled = enabled;