Merge pull request 'feat/demo-20251205' (#18) from feat/demo-20251205 into develop

Reviewed-on: https://kamco.gitea.gs.dabeeo.com/dabeeo/kamco-dabeeo-backoffice/pulls/18
This commit is contained in:
2025-11-27 18:06:23 +09:00
46 changed files with 38182 additions and 39096 deletions

View File

@@ -5,13 +5,15 @@ import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
import com.kamco.cd.kamcoback.changedetection.service.ChangeDetectionService;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import io.swagger.v3.oas.annotations.Hidden;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.transaction.Transactional;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.bind.annotation.*;
import java.time.LocalDate;
import java.util.List;
@Tag(name = "변화탐지", description = "변화탐지 API")
@RequiredArgsConstructor
@@ -31,11 +33,34 @@ public class ChangeDetectionApiController {
/**
* PolygonData -> JsonNode 변환 예제
*
* @return
*/
@Hidden
@GetMapping("/json-data")
public ApiResponseDto<List<JsonNode>> getPolygonToJson() {
public ApiResponseDto<List<JsonNode>> getPolygonToJson(){
return ApiResponseDto.ok(changeDetectionService.getPolygonToJson());
}
@Operation(summary = "변화탐지 분류별 건수", description = "변화탐지 분류별 건수")
@GetMapping("/class-count/{id}")
public ApiResponseDto<List<ChangeDetectionDto.CountDto>> getChangeDetectionClassCount(
@Parameter(description = "변화탐지 년도(차수) /year-list 의 analUid", example = "1") @PathVariable Long id){
return ApiResponseDto.ok(changeDetectionService.getChangeDetectionClassCount(id));
}
@Operation(summary = "변화탐지 COG Url", description = "변화탐지 COG Url")
@GetMapping("/cog-url")
public ApiResponseDto<ChangeDetectionDto.CogUrlDto> getChangeDetectionCogUrl(
@Parameter(description = "이전 년도", example = "2023") @RequestParam Integer beforeYear,
@Parameter(description = "이후 년도", example = "2024") @RequestParam Integer afterYear,
@Parameter(description = "도엽번호(5k)", example = "36809010") @RequestParam String mapSheetNum){
ChangeDetectionDto.CogUrlReq req = new ChangeDetectionDto.CogUrlReq(beforeYear, afterYear, mapSheetNum);
return ApiResponseDto.ok(changeDetectionService.getChangeDetectionCogUrl(req));
}
@Operation(summary = "변화탐지 년도(차수) 목록", description = "변화탐지 년도(차수) 목록")
@GetMapping("/year-list")
public ApiResponseDto<List<ChangeDetectionDto.AnalYearList>> getChangeDetectionYearList(){
return ApiResponseDto.ok(changeDetectionService.getChangeDetectionYearList());
}
}

View File

@@ -1,5 +1,6 @@
package com.kamco.cd.kamcoback.changedetection.dto;
import com.fasterxml.jackson.databind.JsonNode;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AllArgsConstructor;
import lombok.Getter;
@@ -7,7 +8,7 @@ import lombok.NoArgsConstructor;
import lombok.Setter;
import org.locationtech.jts.geom.Geometry;
public class ChangeDetectionDto {
public class ChangeDetectionDto{
@Schema(name = "TestDto", description = "테스트용")
@Getter
@@ -17,22 +18,54 @@ public class ChangeDetectionDto {
public static class TestDto {
private Long id;
private Geometry polygon;
private Double centroidX;
;
private Double centroidX;;
private Double centroidY;
}
@Schema(name = "PolygonGeometry", description = "폴리곤 리턴 객체")
@Schema(name = "CountDto", description = "분류별 탐지 건수")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class PointGeometry {
private Long geoUid;
private String type; // "Point"
private Geometry coordinates; // Point 값
private String before_class; // 기준 분류
private String after_class; // 비교 분류
public static class CountDto {
private String classCd; //영문코드
private String className; //한글명
private Long count; //건수
}
@Schema(name = "CogUrlReq", description = "COG Url Req")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class CogUrlReq {
private Integer beforeYear;
private Integer afterYear;
private String mapSheetNum;
}
@Schema(name = "CogUrlDto", description = "COG Url 정보")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class CogUrlDto {
private String beforeCogUrl;
private String afterCogUrl;
private JsonNode bbox;
}
@Schema(name = "AnalYearList", description = "년도(차수) 목록")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class AnalYearList {
private Long analUid;
private String analTitle;
private Integer beforeYear;
private Integer afterYear;
private String baseMapSheetNum;
}
@Schema(name = "PolygonGeometry", description = "폴리곤 리턴 객체")
@@ -40,22 +73,25 @@ public class ChangeDetectionDto {
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class PolygonGeometry {
public static class PointGeometry{
private Long geoUid;
private String type; // "MultiPolygon"
private Geometry coordinates; // Polygon
private Double center_latitude; // 폴리곤 중심 위도
private Double center_longitude; // 폴리곤 중심 경도
private String type; // "Point"
private Geometry coordinates; //Point
private String before_class; //기준 분류
private String after_class; //비교 분류
}
@Schema(name = "CogURL", description = "COG URL")
@Schema(name = "PolygonGeometry", description = "폴리곤 리턴 객체")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class CogURL {
private String before_cog_url; // 기준 COG URL
private String after_cog_url; // 비교 COG URL
public static class PolygonGeometry{
private Long geoUid;
private String type; // "MultiPolygon"
private Geometry coordinates; //Polygon 값
private Double center_latitude; //폴리곤 중심 위도
private Double center_longitude; //폴리곤 중심 경도
}
@Schema(name = "PolygonProperties", description = "폴리곤 정보")
@@ -63,13 +99,13 @@ public class ChangeDetectionDto {
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class PolygonProperties {
private Double area; // 면적
private String before_year; // 기준년도
private Double before_confidence; // 기준 신뢰도(확률)
private String before_class; // 기준 분류
private String after_year; // 비교년도
private Double after_confidence; // 비교 신뢰도(확률)
private String after_class; // 비교 분류
public static class PolygonProperties{
private Double area; //면적
private String before_year; //기준년도
private Double before_confidence; //기준 신뢰도(확률)
private String before_class; //기준 분류
private String after_year; //비교년도
private Double after_confidence; //비교 신뢰도(확률)
private String after_class; //비교 분류
}
}

View File

@@ -20,4 +20,16 @@ public class ChangeDetectionService {
public List<JsonNode> getPolygonToJson() {
return changeDetectionCoreService.getPolygonToJson();
}
public List<ChangeDetectionDto.CountDto> getChangeDetectionClassCount(Long id) {
return changeDetectionCoreService.getChangeDetectionClassCount(id);
}
public ChangeDetectionDto.CogUrlDto getChangeDetectionCogUrl(ChangeDetectionDto.CogUrlReq req) {
return changeDetectionCoreService.getChangeDetectionCogUrl(req);
}
public List<ChangeDetectionDto.AnalYearList> getChangeDetectionYearList() {
return changeDetectionCoreService.getChangeDetectionYearList();
}
}

View File

@@ -3,11 +3,15 @@ package com.kamco.cd.kamcoback.geojson.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.time.ZonedDateTime;
import java.util.*;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.geojson.GeoJsonReader;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -18,7 +22,9 @@ import org.springframework.transaction.annotation.Transactional;
public class GeoJsonDataService {
private final MapSheetLearnDataRepository mapSheetLearnDataRepository;
private final MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
private final ObjectMapper objectMapper;
private final GeoJsonReader geoJsonReader = new GeoJsonReader();
/** GeoJSON 파일들을 데이터베이스에 저장 */
@Transactional
@@ -37,6 +43,11 @@ public class GeoJsonDataService {
if (savedId != null) {
savedIds.add(savedId);
log.debug("GeoJSON 파일 저장 성공: {} (ID: {})", fileName, savedId);
// 학습 모델 결과 파일인지 확인하여 geometry 데이터 처리
if (isLearningModelResult(fileName, geoJsonContent)) {
processLearningModelGeometry(savedId, geoJsonContent, fileName);
}
}
} catch (Exception e) {
log.error("GeoJSON 파일 처리 실패: {}", fileName, e);
@@ -163,7 +174,24 @@ public class GeoJsonDataService {
/** 연도 정보 추출 */
private void setYearInformation(MapSheetLearnDataEntity entity, String fileName) {
// 파일명에서 연도 추출 시도 (예: kamco_2021_2022_35813023.geojson)
// 학습 모델 결과 파일인지 확인하고 특별 처리
if (fileName.matches(".*캠코_\\d{4}_\\d{4}_\\d+.*")) {
String[] parts = fileName.split("_");
if (parts.length >= 4) {
String beforeYear = parts[1];
String afterYear = parts[2];
// 비교년도 정보를 첫 번째 연도의 정수로 저장
try {
entity.setCompareYyyy(Integer.parseInt(beforeYear));
log.debug("학습 모델 연도 정보 설정: {}", beforeYear);
} catch (NumberFormatException e) {
log.warn("연도 파싱 실패: {}", beforeYear, e);
}
return;
}
}
// 기존 로직: 파일명에서 연도 추출 시도
String[] parts = fileName.split("_");
for (String part : parts) {
if (part.matches("\\d{4}")) { // 4자리 숫자 (연도)
@@ -226,4 +254,177 @@ public class GeoJsonDataService {
return true;
}
/** 학습 모델 결과 파일인지 확인 */
private boolean isLearningModelResult(String fileName, String geoJsonContent) {
try {
// 파일명으로 확인 (캠코_YYYY_YYYY_번호 패턴)
if (fileName.matches(".*캠코_\\d{4}_\\d{4}_\\d+.*\\.geojson")) {
return true;
}
// GeoJSON 내용으로 확인 (학습 모델 특화 필드 존재 여부)
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
if (rootNode.has("features")) {
JsonNode features = rootNode.get("features");
if (features.isArray() && features.size() > 0) {
JsonNode firstFeature = features.get(0);
if (firstFeature.has("properties")) {
JsonNode properties = firstFeature.get("properties");
// 학습 모델 특화 필드 확인
return properties.has("cd_prob")
|| properties.has("class")
|| (properties.has("before") && properties.has("after"));
}
}
}
} catch (Exception e) {
log.debug("학습 모델 결과 파일 확인 중 오류: {}", fileName, e);
}
return false;
}
/** 학습 모델 결과의 geometry 데이터 처리 */
@Transactional
public void processLearningModelGeometry(Long dataUid, String geoJsonContent, String fileName) {
try {
log.info("학습 모델 geometry 데이터 처리 시작: {} (dataUid: {})", fileName, dataUid);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// 메타데이터 추출
String mapSheetName =
rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// 파일명에서 연도 및 지도번호 추출 (캠코_2021_2022_35813023)
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1];
afterYear = parts[2];
mapSheetNum = parts[3];
}
if (beforeYear == null || afterYear == null) {
log.warn("연도 정보를 추출할 수 없습니다: {}", fileName);
return;
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
log.warn("features 배열이 없습니다: {}", fileName);
return;
}
List<MapSheetLearnDataGeomEntity> geomEntities = new ArrayList<>();
int processedCount = 0;
for (JsonNode feature : features) {
try {
MapSheetLearnDataGeomEntity geomEntity =
createGeometryEntity(feature, dataUid, beforeYear, afterYear, mapSheetNum);
if (geomEntity != null) {
geomEntities.add(geomEntity);
processedCount++;
}
} catch (Exception e) {
log.warn("Feature geometry 처리 실패 (feature {}): {}", processedCount, e.getMessage());
}
}
// 배치 저장
if (!geomEntities.isEmpty()) {
mapSheetLearnDataGeomRepository.saveAll(geomEntities);
log.info("학습 모델 geometry 데이터 저장 완료: {} ({}개 feature)", fileName, geomEntities.size());
}
} catch (Exception e) {
log.error("학습 모델 geometry 데이터 처리 실패: {}", fileName, e);
}
}
/** 개별 feature에서 geometry entity 생성 */
private MapSheetLearnDataGeomEntity createGeometryEntity(
JsonNode feature, Long dataUid, String beforeYear, String afterYear, String mapSheetNum) {
JsonNode properties = feature.get("properties");
JsonNode geometry = feature.get("geometry");
if (properties == null || geometry == null) {
return null;
}
MapSheetLearnDataGeomEntity entity = new MapSheetLearnDataGeomEntity();
// 기본 정보
entity.setDataUid(dataUid);
entity.setBeforeYyyy(Integer.parseInt(beforeYear));
entity.setAfterYyyy(Integer.parseInt(afterYear));
if (mapSheetNum != null) {
try {
entity.setMapSheetNum(Long.parseLong(mapSheetNum));
} catch (NumberFormatException e) {
log.warn("지도번호 파싱 실패: {}", mapSheetNum, e);
}
}
// 변화 탐지 확률
if (properties.has("cd_prob")) {
entity.setCdProb(properties.get("cd_prob").asDouble());
}
// 면적 정보
if (properties.has("area")) {
entity.setArea(properties.get("area").asDouble());
}
// 분류 정보 처리
if (properties.has("class")) {
JsonNode classNode = properties.get("class");
// before 분류
if (classNode.has("before") && classNode.get("before").isArray()) {
JsonNode beforeArray = classNode.get("before");
if (beforeArray.size() > 0) {
JsonNode firstBefore = beforeArray.get(0);
if (firstBefore.has("class_name")) {
entity.setClassBeforeName(firstBefore.get("class_name").asText());
}
if (firstBefore.has("probability")) {
entity.setClassBeforeProb(firstBefore.get("probability").asDouble());
}
}
}
// after 분류
if (classNode.has("after") && classNode.get("after").isArray()) {
JsonNode afterArray = classNode.get("after");
if (afterArray.size() > 0) {
JsonNode firstAfter = afterArray.get(0);
if (firstAfter.has("class_name")) {
entity.setClassAfterName(firstAfter.get("class_name").asText());
}
if (firstAfter.has("probability")) {
entity.setClassAfterProb(firstAfter.get("probability").asDouble());
}
}
}
}
// geometry 변환
try {
Geometry geom = geoJsonReader.read(geometry.toString());
if (geom != null) {
geom.setSRID(5186); // EPSG:5186
entity.setGeom(geom);
}
} catch (Exception e) {
log.warn("Geometry 파싱 실패: {}", e.getMessage());
return null;
}
return entity;
}
}

View File

@@ -2,7 +2,9 @@ package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.LearningModelResultDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultService;
import com.kamco.cd.kamcoback.inference.service.LearningModelResultProcessor;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content;
@@ -10,10 +12,17 @@ import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@@ -24,7 +33,10 @@ import org.springframework.web.bind.annotation.RestController;
@RestController
public class InferenceResultApiController {
private static final Logger logger = LoggerFactory.getLogger(InferenceResultApiController.class);
private final InferenceResultService inferenceResultService;
private final LearningModelResultProcessor learningModelResultProcessor;
@Operation(summary = "추론관리 분석결과 목록 조회", description = "분석상태, 제목으로 분석결과를 조회 합니다.")
@ApiResponses(
@@ -132,4 +144,182 @@ public class InferenceResultApiController {
inferenceResultService.getInferenceResultGeomList(searchGeoReq);
return ApiResponseDto.ok(geomList);
}
@Operation(summary = "학습모델 결과 처리", description = "실제 학습모델 GeoJSON 파일을 처리하여 데이터베이스에 저장합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "처리 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(implementation = LearningModelResultDto.ProcessResponse.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/learning-model/process")
public ApiResponseDto<LearningModelResultDto.ProcessResponse> processLearningModelResult(
@RequestBody LearningModelResultDto.ProcessRequest request) {
try {
logger.info("Processing learning model result file: {}", request.getFilePath());
Path filePath = Paths.get(request.getFilePath());
int processedFeatures = learningModelResultProcessor.processLearningModelResult(filePath);
LearningModelResultDto.ProcessResponse response =
LearningModelResultDto.ProcessResponse.builder()
.success(true)
.message("학습모델 결과 처리가 완료되었습니다.")
.processedFeatures(processedFeatures)
.filePath(request.getFilePath())
.build();
logger.info(
"Successfully processed {} features from file: {}",
processedFeatures,
request.getFilePath());
return ApiResponseDto.ok(response);
} catch (Exception e) {
logger.error("Failed to process learning model result: {}", request.getFilePath(), e);
LearningModelResultDto.ProcessResponse response =
LearningModelResultDto.ProcessResponse.builder()
.success(false)
.message("학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
.processedFeatures(0)
.filePath(request.getFilePath())
.build();
return ApiResponseDto.ok(response);
}
}
@Operation(summary = "학습모델 결과 일괄 처리", description = "여러 학습모델 GeoJSON 파일을 일괄 처리하여 데이터베이스에 저장합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "처리 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(
implementation = LearningModelResultDto.BatchProcessResponse.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/learning-model/process-batch")
public ApiResponseDto<LearningModelResultDto.BatchProcessResponse>
processBatchLearningModelResults(
@RequestBody LearningModelResultDto.BatchProcessRequest request) {
try {
logger.info("Processing {} learning model result files", request.getFilePaths().size());
List<Path> filePaths = new ArrayList<>();
for (String filePath : request.getFilePaths()) {
filePaths.add(Paths.get(filePath));
}
int totalProcessedFeatures =
learningModelResultProcessor.processMultipleLearningModelResults(filePaths);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(true)
.message("일괄 학습모델 결과 처리가 완료되었습니다.")
.totalProcessedFeatures(totalProcessedFeatures)
.processedFileCount(request.getFilePaths().size())
.filePaths(request.getFilePaths())
.build();
logger.info(
"Successfully processed {} features from {} files",
totalProcessedFeatures,
request.getFilePaths().size());
return ApiResponseDto.ok(response);
} catch (Exception e) {
logger.error("Failed to process batch learning model results", e);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(false)
.message("일괄 학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
.totalProcessedFeatures(0)
.processedFileCount(0)
.filePaths(request.getFilePaths())
.build();
return ApiResponseDto.ok(response);
}
}
@Operation(summary = "기본 학습모델 파일 처리", description = "미리 준비된 학습모델 파일을 처리합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "처리 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(
implementation = LearningModelResultDto.BatchProcessResponse.class))),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/learning-model/process-default")
public ApiResponseDto<LearningModelResultDto.BatchProcessResponse>
processDefaultLearningModelResults() {
try {
logger.info("Processing default learning model result files");
// Process the two default learning model files from upload directory
List<String> defaultFilePaths =
List.of(
"/Users/deniallee/geojson/upload/캠코_2021_2022_35813023.geojson",
"/Users/deniallee/geojson/upload/캠코_2023_2024_35810049.geojson");
List<Path> filePaths = new ArrayList<>();
for (String filePath : defaultFilePaths) {
filePaths.add(Paths.get(filePath));
}
int totalProcessedFeatures =
learningModelResultProcessor.processMultipleLearningModelResults(filePaths);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(true)
.message("기본 학습모델 결과 파일 처리가 완료되었습니다.")
.totalProcessedFeatures(totalProcessedFeatures)
.processedFileCount(defaultFilePaths.size())
.filePaths(defaultFilePaths)
.build();
logger.info(
"Successfully processed {} features from {} default files",
totalProcessedFeatures,
defaultFilePaths.size());
return ApiResponseDto.ok(response);
} catch (Exception e) {
logger.error("Failed to process default learning model results", e);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(false)
.message("기본 학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
.totalProcessedFeatures(0)
.processedFileCount(0)
.filePaths(List.of())
.build();
return ApiResponseDto.ok(response);
}
}
}

View File

@@ -0,0 +1,180 @@
package com.kamco.cd.kamcoback.inference.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/** DTO classes for learning model result processing */
public class LearningModelResultDto {
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 결과 처리 요청")
public static class ProcessRequest {
@Schema(
description = "GeoJSON 파일 경로",
example =
"src/main/resources/db/migration/sample-results_updated/캠코_2021_2022_35813023.geojson")
private String filePath;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 결과 처리 응답")
public static class ProcessResponse {
@Schema(description = "처리 성공 여부")
private boolean success;
@Schema(description = "처리 결과 메시지")
private String message;
@Schema(description = "처리된 feature 개수")
private int processedFeatures;
@Schema(description = "처리된 파일 경로")
private String filePath;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 결과 일괄 처리 요청")
public static class BatchProcessRequest {
@Schema(description = "GeoJSON 파일 경로 목록")
private List<String> filePaths;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 결과 일괄 처리 응답")
public static class BatchProcessResponse {
@Schema(description = "처리 성공 여부")
private boolean success;
@Schema(description = "처리 결과 메시지")
private String message;
@Schema(description = "전체 처리된 feature 개수")
private int totalProcessedFeatures;
@Schema(description = "처리된 파일 개수")
private int processedFileCount;
@Schema(description = "처리된 파일 경로 목록")
private List<String> filePaths;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 처리 상태")
public static class ProcessingStatus {
@Schema(description = "처리 ID")
private String processingId;
@Schema(description = "처리 상태 (PENDING, PROCESSING, COMPLETED, FAILED)")
private String status;
@Schema(description = "진행률 (0-100)")
private int progressPercentage;
@Schema(description = "현재 처리 중인 파일")
private String currentFile;
@Schema(description = "전체 파일 개수")
private int totalFiles;
@Schema(description = "처리 완료된 파일 개수")
private int completedFiles;
@Schema(description = "시작 시간")
private String startTime;
@Schema(description = "예상 완료 시간")
private String estimatedEndTime;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 데이터 요약")
public static class DataSummary {
@Schema(description = "전체 데이터 개수")
private long totalRecords;
@Schema(description = "연도별 데이터 개수")
private List<YearDataCount> yearDataCounts;
@Schema(description = "분류별 데이터 개수")
private List<ClassDataCount> classDataCounts;
@Schema(description = "지도 영역별 데이터 개수")
private List<MapSheetDataCount> mapSheetDataCounts;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "연도별 데이터 개수")
public static class YearDataCount {
@Schema(description = "비교 연도 (예: 2021_2022)")
private String compareYear;
@Schema(description = "데이터 개수")
private long count;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "분류별 데이터 개수")
public static class ClassDataCount {
@Schema(description = "분류명")
private String className;
@Schema(description = "변화 전 개수")
private long beforeCount;
@Schema(description = "변화 후 개수")
private long afterCount;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "지도 영역별 데이터 개수")
public static class MapSheetDataCount {
@Schema(description = "지도 영역 번호")
private String mapSheetNum;
@Schema(description = "데이터 개수")
private long count;
@Schema(description = "평균 변화 탐지 확률")
private double avgChangeDetectionProb;
}
}

View File

@@ -0,0 +1,384 @@
package com.kamco.cd.kamcoback.inference.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.LocalDateTime;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.geojson.GeoJsonReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* Service for processing actual learning model result GeoJSON files and storing them in the
* database with proper field mapping.
*/
@Service
public class LearningModelResultProcessor {
private static final Logger logger = LoggerFactory.getLogger(LearningModelResultProcessor.class);
@Autowired private MapSheetLearnDataRepository mapSheetLearnDataRepository;
@Autowired private MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
private final ObjectMapper objectMapper = new ObjectMapper();
private final GeoJsonReader geoJsonReader = new GeoJsonReader();
/**
* Process large learning model result files with optimized batch processing
*/
public int processLearningModelResultOptimized(Path geoJsonFilePath) {
try {
logger.info("Processing learning model result file (optimized): {}", geoJsonFilePath);
// Read and parse GeoJSON file
String geoJsonContent = Files.readString(geoJsonFilePath);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// Validate GeoJSON structure
if (!rootNode.has("type") || !"FeatureCollection".equals(rootNode.get("type").asText())) {
throw new IllegalArgumentException("Invalid GeoJSON: Not a FeatureCollection");
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
throw new IllegalArgumentException("Invalid GeoJSON: No features array found");
}
// Extract metadata from file name and content
String fileName = geoJsonFilePath.getFileName().toString();
String mapSheetName = rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// Parse years from filename
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1];
afterYear = parts[2];
mapSheetNum = parts[3];
}
if (beforeYear == null || afterYear == null || mapSheetNum == null) {
throw new IllegalArgumentException("Cannot parse years and map sheet number from filename: " + fileName);
}
int totalFeatures = features.size();
logger.info("Total features to process: {}", totalFeatures);
// Step 1: Create main data record first
MapSheetLearnDataEntity savedMainData = createMainDataRecord(geoJsonContent, fileName, geoJsonFilePath.toString(), beforeYear, afterYear, mapSheetNum);
// Step 2: Process features in small batches to avoid transaction timeout
int totalProcessed = 0;
int batchSize = 25; // Smaller batch size for reliability
for (int i = 0; i < totalFeatures; i += batchSize) {
int endIndex = Math.min(i + batchSize, totalFeatures);
logger.info("Processing batch {}-{} of {}", i + 1, endIndex, totalFeatures);
List<JsonNode> batch = new ArrayList<>();
for (int j = i; j < endIndex; j++) {
batch.add(features.get(j));
}
try {
int processed = processBatchSafely(batch, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
totalProcessed += processed;
logger.info("Batch processed successfully. Total so far: {}/{}", totalProcessed, totalFeatures);
} catch (Exception e) {
logger.error("Failed to process batch {}-{}: {}", i + 1, endIndex, e.getMessage());
// Continue with next batch instead of failing completely
}
}
logger.info("Successfully processed {} out of {} features from file: {}", totalProcessed, totalFeatures, fileName);
return totalProcessed;
} catch (IOException e) {
logger.error("Failed to read GeoJSON file: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result file", e);
} catch (Exception e) {
logger.error("Error processing learning model result: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result", e);
}
}
@Transactional
private MapSheetLearnDataEntity createMainDataRecord(String geoJsonContent, String fileName, String filePath, String beforeYear, String afterYear, String mapSheetNum) {
MapSheetLearnDataEntity mainData = createMainDataEntity(geoJsonContent, fileName, filePath, beforeYear, afterYear, mapSheetNum);
MapSheetLearnDataEntity saved = mapSheetLearnDataRepository.save(mainData);
logger.info("Created main data record with ID: {}", saved.getId());
return saved;
}
@Transactional
private int processBatchSafely(List<JsonNode> features, Long dataUid, String beforeYear, String afterYear, String mapSheetNum) {
int processed = 0;
for (JsonNode feature : features) {
try {
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000);
insertFeatureWithPostGIS(feature, geoUid, dataUid, beforeYear, afterYear, mapSheetNum);
processed++;
// Small delay to prevent ID collisions
try { Thread.sleep(1); } catch (InterruptedException e) { Thread.currentThread().interrupt(); }
} catch (Exception e) {
logger.warn("Failed to process individual feature: {}", e.getMessage());
// Continue processing other features in this batch
}
}
return processed;
}
/**
* Process a learning model result GeoJSON file and store it in the database
*
* @param geoJsonFilePath Path to the GeoJSON file
* @return Number of features processed
*/
@Transactional
public int processLearningModelResult(Path geoJsonFilePath) {
try {
logger.info("Processing learning model result file: {}", geoJsonFilePath);
// Read and parse GeoJSON file
String geoJsonContent = Files.readString(geoJsonFilePath);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// Validate GeoJSON structure
if (!rootNode.has("type") || !"FeatureCollection".equals(rootNode.get("type").asText())) {
throw new IllegalArgumentException("Invalid GeoJSON: Not a FeatureCollection");
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
throw new IllegalArgumentException("Invalid GeoJSON: No features array found");
}
// Extract metadata from file name and content
String fileName = geoJsonFilePath.getFileName().toString();
String mapSheetName =
rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// Parse years from filename or data (e.g., "캠코_2021_2022_35813023")
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1]; // 2021 or 2023
afterYear = parts[2]; // 2022 or 2024
mapSheetNum = parts[3]; // 35813023 or 35810049
}
if (beforeYear == null || afterYear == null || mapSheetNum == null) {
throw new IllegalArgumentException(
"Cannot parse years and map sheet number from filename: " + fileName);
}
// Create main data record
MapSheetLearnDataEntity mainData =
createMainDataEntity(
geoJsonContent,
fileName,
geoJsonFilePath.toString(),
beforeYear,
afterYear,
mapSheetNum);
MapSheetLearnDataEntity savedMainData = mapSheetLearnDataRepository.save(mainData);
logger.info("Saved main data record with ID: {}", savedMainData.getId());
// Process each feature in the GeoJSON using direct PostGIS insertion
int featureCount = 0;
int batchSize = 10; // Much smaller batch size to avoid transaction timeout
for (int i = 0; i < features.size(); i += batchSize) {
int endIndex = Math.min(i + batchSize, features.size());
logger.info("Processing batch {}-{} of {} features", i + 1, endIndex, features.size());
// Process each feature individually within this logging batch
for (int j = i; j < endIndex; j++) {
JsonNode feature = features.get(j);
try {
// Generate unique ID for this geometry entity
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000) + j;
// Extract feature data and insert directly with PostGIS
insertFeatureWithPostGIS(feature, geoUid, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
featureCount++;
// Small delay to prevent issues
if (j % 5 == 0) {
try { Thread.sleep(10); } catch (InterruptedException e) { Thread.currentThread().interrupt(); }
}
} catch (Exception e) {
logger.warn("Failed to process feature {}: {}", j + 1, e.getMessage());
}
}
// Log progress after each batch
if (featureCount > 0 && endIndex % batchSize == 0) {
logger.info("Processed {} features so far, success rate: {:.1f}%",
featureCount, (featureCount * 100.0) / endIndex);
}
}
logger.info("Successfully processed {} features from file: {}", featureCount, fileName);
return featureCount;
} catch (IOException e) {
logger.error("Failed to read GeoJSON file: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result file", e);
} catch (Exception e) {
logger.error("Error processing learning model result: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result", e);
}
}
/** Create the main data entity for tb_map_sheet_learn_data table */
private MapSheetLearnDataEntity createMainDataEntity(
String geoJsonContent,
String fileName,
String filePath,
String beforeYear,
String afterYear,
String mapSheetNum) {
MapSheetLearnDataEntity entity = new MapSheetLearnDataEntity();
// Generate unique ID (using current timestamp + random component)
entity.setId(System.currentTimeMillis() + (long) (Math.random() * 1000));
LocalDateTime now = LocalDateTime.now();
entity.setAnalStrtDttm(ZonedDateTime.now());
entity.setAnalEndDttm(ZonedDateTime.now());
entity.setCompareYyyy(Integer.parseInt(beforeYear)); // 첫 번째 연도만 저장
// JSON 데이터를 Map으로 변환하여 저장
try {
@SuppressWarnings("unchecked")
Map<String, Object> jsonMap = objectMapper.readValue(geoJsonContent, Map.class);
entity.setDataJson(jsonMap);
} catch (Exception e) {
logger.warn("JSON 파싱 실패, 빈 Map으로 저장: {}", fileName, e);
entity.setDataJson(new HashMap<>());
}
entity.setDataName(fileName);
entity.setDataPath(filePath);
entity.setDataState("PROCESSED");
entity.setCreatedDttm(ZonedDateTime.now());
entity.setUpdatedDttm(ZonedDateTime.now());
return entity;
}
/** Insert GeoJSON feature directly using PostGIS functions */
private void insertFeatureWithPostGIS(
JsonNode feature, Long geoUid, Long dataUid, String beforeYear, String afterYear, String mapSheetNum)
throws Exception {
JsonNode properties = feature.get("properties");
JsonNode geometry = feature.get("geometry");
if (properties == null || geometry == null) {
throw new IllegalArgumentException("Feature missing properties or geometry");
}
// Extract properties
Double cdProb = properties.has("cd_prob") ? properties.get("cd_prob").asDouble() : null;
Double area = properties.has("area") ? properties.get("area").asDouble() : null;
String classBeforeName = null;
Double classBeforeProb = null;
String classAfterName = null;
Double classAfterProb = null;
// Classification data
JsonNode classNode = properties.get("class");
if (classNode != null) {
// Before classification
JsonNode beforeClass = classNode.get("before");
if (beforeClass != null && beforeClass.isArray() && beforeClass.size() > 0) {
JsonNode firstBefore = beforeClass.get(0);
if (firstBefore.has("class_name")) {
classBeforeName = firstBefore.get("class_name").asText();
}
if (firstBefore.has("probability")) {
classBeforeProb = firstBefore.get("probability").asDouble();
}
}
// After classification
JsonNode afterClass = classNode.get("after");
if (afterClass != null && afterClass.isArray() && afterClass.size() > 0) {
JsonNode firstAfter = afterClass.get(0);
if (firstAfter.has("class_name")) {
classAfterName = firstAfter.get("class_name").asText();
}
if (firstAfter.has("probability")) {
classAfterProb = firstAfter.get("probability").asDouble();
}
}
}
// Get geometry type
String geoType = geometry.has("type") ? geometry.get("type").asText() : "Unknown";
// Convert geometry to JSON string for PostGIS
String geometryJson = geometry.toString();
// Insert using PostGIS functions
mapSheetLearnDataGeomRepository.insertWithPostGISGeometry(
geoUid, cdProb, classBeforeName, classBeforeProb,
classAfterName, classAfterProb, Long.parseLong(mapSheetNum),
Integer.parseInt(beforeYear), Integer.parseInt(afterYear),
area, geometryJson, geoType, dataUid
);
logger.debug("Inserted geometry entity with ID: {} using PostGIS", geoUid);
}
/**
* Process multiple learning model result files
*
* @param filePaths List of GeoJSON file paths
* @return Total number of features processed across all files
*/
@Transactional
public int processMultipleLearningModelResults(List<Path> filePaths) {
int totalProcessed = 0;
for (Path filePath : filePaths) {
try {
int processed = processLearningModelResult(filePath);
totalProcessed += processed;
logger.info("Processed {} features from file: {}", processed, filePath.getFileName());
} catch (Exception e) {
logger.error("Failed to process file: {}", filePath, e);
// Continue processing other files even if one fails
}
}
logger.info("Total features processed across all files: {}", totalProcessed);
return totalProcessed;
}
}

View File

@@ -50,4 +50,25 @@ public class ChangeDetectionCoreService {
})
.collect(Collectors.toList());
}
public List<ChangeDetectionDto.CountDto> getChangeDetectionClassCount(Long id) {
return changeDetectionRepository.getChangeDetectionClassCount(id);
}
public ChangeDetectionDto.CogUrlDto getChangeDetectionCogUrl(ChangeDetectionDto.CogUrlReq req) {
ObjectMapper mapper = new ObjectMapper();
ChangeDetectionDto.CogUrlDto resultDto = changeDetectionRepository.getChangeDetectionCogUrl(req);
try {
JsonNode geomNode = mapper.readTree(resultDto.getBbox().toString());
resultDto.setBbox(geomNode);
} catch (Exception e) {
throw new RuntimeException("Failed to parse geom JSON", e);
}
return changeDetectionRepository.getChangeDetectionCogUrl(req);
}
public List<ChangeDetectionDto.AnalYearList> getChangeDetectionYearList() {
return changeDetectionRepository.getChangeDetectionYearList();
}
}

View File

@@ -0,0 +1,55 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.*;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
import java.time.ZonedDateTime;
import java.util.UUID;
@Getter
@Setter
@Table(name = "imagery")
@Entity
public class ImageryEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE,generator = "imagery_id_seq_gen")
@SequenceGenerator(name = "imagery_id_seq_gen",sequenceName = "imagery_id_seq",allocationSize = 1)
@Column(name = "id", nullable = false)
private Long id;
@Column(name = "uuid", columnDefinition = "uuid default gen_random_uuid()")
private UUID uuid;
@Column(name = "year")
private Integer year;
@Column(name = "scene_50k")
private String scene50k;
@Column(name = "scene_5k")
private String scene5k;
@Column(name = "scene_id_50k")
private Integer sceneId50k;
@Column(name = "scene_id_5k")
private Integer sceneId5k;
@ColumnDefault("now()")
@Column(name = "created_date", columnDefinition = "TIMESTAMP WITH TIME ZONE DEFAULT now()")
private ZonedDateTime createdDate;
@Column(name = "middle_path")
private String middlePath;
@Column(name = "cog_middle_path")
private String cogMiddlePath;
@Column(name = "filename")
private String filename;
@Column(name = "cog_filename")
private String cogFilename;
}

View File

@@ -0,0 +1,33 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.*;
import lombok.Getter;
import lombok.Setter;
import org.locationtech.jts.geom.Geometry;
@Getter
@Setter
@Table(name = "tb_map_inkx_50k")
@Entity
public class MapInkx50kEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_inkx_50k_fid_seq_gen")
@SequenceGenerator(
name = "tb_map_inkx_50k_fid_seq_gen",
sequenceName = "tb_map_inkx_50k_fid_seq",
allocationSize = 1)
private Integer fid;
@Column(name = "mapidcd_no")
private String mapidcdNo;
@Column(name = "mapid_nm")
private String mapidNm;
@Column(name = "mapid_no")
private String mapidNo;
@Column(name = "geom")
private Geometry geom;
}

View File

@@ -0,0 +1,32 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.*;
import lombok.Getter;
import lombok.Setter;
import org.locationtech.jts.geom.Geometry;
@Getter
@Setter
@Table(name = "tb_map_inkx_5k")
@Entity
public class MapInkx5kEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_inkx_5k_fid_seq_gen")
@SequenceGenerator(
name = "tb_map_inkx_5k_fid_seq_gen",
sequenceName = "tb_map_inkx_5k_fid_seq",
allocationSize = 1)
private Integer fid;
@Column(name = "mapidcd_no")
private String mapidcdNo;
@Column(name = "mapid_nm")
private String mapidNm;
@Column(name = "geom")
private Geometry geom;
@Column(name = "fid_k50")
private Long fidK50;
}

View File

@@ -91,4 +91,7 @@ public class MapSheetAnalEntity {
@Column(name = "detecting_cnt")
private Long detectingCnt;
@Column(name = "base_map_sheet_num")
private String baseMapSheetNum;
}

View File

@@ -47,6 +47,12 @@ public class MapSheetAnalSttcEntity {
@Column(name = "data_uid", nullable = false)
private Long dataUid;
@Column(name = "class_before_cd")
private String classBeforeCd;
@Column(name = "class_after_cd")
private String classAfterCd;
public InferenceResultDto.Dashboard toDto() {
return new InferenceResultDto.Dashboard(
id.getCompareYyyy(),

View File

@@ -3,7 +3,11 @@ package com.kamco.cd.kamcoback.postgres.repository;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
@Repository
public interface MapSheetLearnDataGeomRepository
@@ -24,4 +28,40 @@ public interface MapSheetLearnDataGeomRepository
/** 데이터 UID로 기존 지오메트리 데이터 삭제 (재생성 전에 사용) */
void deleteByDataUid(Long dataUid);
/**
* PostGIS 함수를 사용하여 geometry 데이터를 직접 삽입
* ST_SetSRID(ST_GeomFromGeoJSON(...), 5186) 형식으로 저장
*/
@Modifying
@Transactional
@Query(value = """
INSERT INTO tb_map_sheet_learn_data_geom (
geo_uid, cd_prob, class_before_name, class_before_prob,
class_after_name, class_after_prob, map_sheet_num,
before_yyyy, after_yyyy, area, geom, geo_type, data_uid,
created_dttm, updated_dttm
) VALUES (
:geoUid, :cdProb, :classBeforeName, :classBeforeProb,
:classAfterName, :classAfterProb, :mapSheetNum,
:beforeYyyy, :afterYyyy, :area,
ST_SetSRID(ST_GeomFromGeoJSON(CAST(:geometryJson AS TEXT)), 5186),
:geoType, :dataUid, NOW(), NOW()
) ON CONFLICT (geo_uid) DO NOTHING
""", nativeQuery = true)
void insertWithPostGISGeometry(
@Param("geoUid") Long geoUid,
@Param("cdProb") Double cdProb,
@Param("classBeforeName") String classBeforeName,
@Param("classBeforeProb") Double classBeforeProb,
@Param("classAfterName") String classAfterName,
@Param("classAfterProb") Double classAfterProb,
@Param("mapSheetNum") Long mapSheetNum,
@Param("beforeYyyy") Integer beforeYyyy,
@Param("afterYyyy") Integer afterYyyy,
@Param("area") Double area,
@Param("geometryJson") String geometryJson,
@Param("geoType") String geoType,
@Param("dataUid") Long dataUid
);
}

View File

@@ -1,5 +1,8 @@
package com.kamco.cd.kamcoback.postgres.repository.changedetection;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
import com.querydsl.core.Tuple;
import java.util.List;
public interface ChangeDetectionRepositoryCustom {
@@ -7,4 +10,10 @@ public interface ChangeDetectionRepositoryCustom {
String getPolygonToPoint();
List<String> findPolygonJson();
List<ChangeDetectionDto.CountDto> getChangeDetectionClassCount(Long id);
ChangeDetectionDto.CogUrlDto getChangeDetectionCogUrl(ChangeDetectionDto.CogUrlReq req);
List<ChangeDetectionDto.AnalYearList> getChangeDetectionYearList();
}

View File

@@ -1,13 +1,28 @@
package com.kamco.cd.kamcoback.postgres.repository.changedetection;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.querydsl.core.Tuple;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.List;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
import java.util.List;
import java.util.Objects;
import static com.kamco.cd.kamcoback.postgres.entity.QImageryEntity.imageryEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity.mapInkx5kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataEntity.mapSheetAnalDataEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity.mapSheetAnalEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
implements ChangeDetectionRepositoryCustom {
@@ -32,4 +47,83 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
.orderBy(mapSheetAnalDataGeomEntity.id.desc())
.fetch();
}
@Override
public List<ChangeDetectionDto.CountDto> getChangeDetectionClassCount(Long id) {
return queryFactory
.select(Projections.constructor(
ChangeDetectionDto.CountDto.class,
mapSheetAnalSttcEntity.classAfterCd.toUpperCase(),
mapSheetAnalSttcEntity.id.classAfterName,
mapSheetAnalSttcEntity.classAfterCnt.sum()
))
.from(mapSheetAnalEntity)
.innerJoin(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.innerJoin(mapSheetAnalSttcEntity)
.on(mapSheetAnalSttcEntity.dataUid.eq(mapSheetAnalDataEntity.id))
.where(mapSheetAnalEntity.id.eq(id))
.groupBy(mapSheetAnalSttcEntity.classAfterCd, mapSheetAnalSttcEntity.id.classAfterName)
.fetch();
}
@Override
public ChangeDetectionDto.CogUrlDto getChangeDetectionCogUrl(ChangeDetectionDto.CogUrlReq req) {
Tuple result = queryFactory
.select(
makeCogUrl(req.getBeforeYear()).max().as("beforeCogUrl"),
makeCogUrl(req.getAfterYear()).max().as("afterCogUrl"),
Expressions.stringTemplate("ST_AsGeoJSON({0})", mapInkx5kEntity.geom).as("bbox")
)
.from(imageryEntity)
.innerJoin(mapInkx5kEntity)
.on(imageryEntity.scene5k.eq(mapInkx5kEntity.mapidcdNo))
.where(
imageryEntity.year.eq(req.getBeforeYear()).or(imageryEntity.year.eq(req.getAfterYear())),
imageryEntity.scene5k.eq(req.getMapSheetNum())
)
.groupBy(mapInkx5kEntity.geom)
.fetchOne();
//Polygon -> JsonNode 로 변환
JsonNode geometryJson = changeGeometryJson(String.valueOf(Objects.requireNonNull(result).get(2, StringExpression.class)));
return new ChangeDetectionDto.CogUrlDto(result.get(0, String.class), result.get(1, String.class), geometryJson);
}
@Override
public List<ChangeDetectionDto.AnalYearList> getChangeDetectionYearList() {
return queryFactory
.select(
Projections.constructor(
ChangeDetectionDto.AnalYearList.class,
mapSheetAnalEntity.id,
mapSheetAnalEntity.analTitle,
mapSheetAnalEntity.compareYyyy.as("beforeYear"),
mapSheetAnalEntity.targetYyyy.as("afterYear"),
mapSheetAnalEntity.baseMapSheetNum
)
)
.from(mapSheetAnalEntity)
.orderBy(mapSheetAnalEntity.id.asc())
.fetch()
;
}
private StringExpression makeCogUrl(Integer year) {
return new CaseBuilder()
.when(imageryEntity.year.eq(year))
.then(Expressions.stringTemplate("{0} || {1}",imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
.otherwise("")
;
}
private JsonNode changeGeometryJson(String geometry){
ObjectMapper mapper = new ObjectMapper();
try {
return mapper.readTree(geometry);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -21,6 +21,13 @@ spring:
hikari:
minimum-idle: 10
maximum-pool-size: 20
connection-timeout: 60000 # 60초 연결 타임아웃
idle-timeout: 300000 # 5분 유휴 타임아웃
max-lifetime: 1800000 # 30분 최대 수명
leak-detection-threshold: 60000 # 연결 누수 감지
transaction:
default-timeout: 300 # 5분 트랜잭션 타임아웃

View File

@@ -5,7 +5,7 @@ spring:
application:
name: kamco-change-detection-api
profiles:
active: local # 사용할 프로파일 지정 (ex. dev, prod, test)
active: dev # 사용할 프로파일 지정 (ex. dev, prod, test)
datasource:
driver-class-name: org.postgresql.Driver

View File

@@ -0,0 +1,27 @@
-- Fix geometry column type in tb_map_sheet_learn_data_geom table
-- The table was incorrectly created with 'bytea' type instead of 'geometry' type
-- 1. First ensure PostGIS is enabled
CREATE EXTENSION IF NOT EXISTS postgis;
-- 2. Clear existing data since it's in incorrect format (JTS serialized objects)
-- This data needs to be reprocessed anyway with the correct PostGIS approach
DELETE FROM public.tb_map_sheet_learn_data_geom;
-- 3. Drop and recreate the geom column with correct PostGIS geometry type
ALTER TABLE public.tb_map_sheet_learn_data_geom DROP COLUMN IF EXISTS geom;
ALTER TABLE public.tb_map_sheet_learn_data_geom ADD COLUMN geom geometry(Polygon, 5186);
-- 4. Create spatial index for performance
CREATE INDEX IF NOT EXISTS idx_tb_map_sheet_learn_data_geom_spatial
ON public.tb_map_sheet_learn_data_geom USING GIST (geom);
-- 5. Update column comment
COMMENT ON COLUMN public.tb_map_sheet_learn_data_geom.geom IS 'PostGIS geometry 정보 (Polygon, EPSG:5186)';
-- 6. Verify the column type is correct
SELECT column_name, data_type, udt_name
FROM information_schema.columns
WHERE table_name = 'tb_map_sheet_learn_data_geom' AND column_name = 'geom';
SELECT 'Geometry column type fixed successfully' as message;

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
PROJCS["KGD2002_Central_Belt_2010",GEOGCS["GCS_KGD2002",DATUM["D_Korea_Geodetic_Datum_2002",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000.0],PARAMETER["False_Northing",600000.0],PARAMETER["Central_Meridian",127.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Latitude_Of_Origin",38.0],UNIT["Meter",1.0]]

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
PROJCS["KGD2002_Central_Belt_2010",GEOGCS["GCS_KGD2002",DATUM["D_Korea_Geodetic_Datum_2002",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000.0],PARAMETER["False_Northing",600000.0],PARAMETER["Central_Meridian",127.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Latitude_Of_Origin",38.0],UNIT["Meter",1.0]]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
PROJCS["Korea_2000_Korea_Unified_Coordinate_System",GEOGCS["GCS_Korea_2000",DATUM["D_Korea_2000",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1000000.0],PARAMETER["False_Northing",2000000.0],PARAMETER["Central_Meridian",127.5],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",38.0],UNIT["Meter",1.0]]