Geojson Polygon DATA Operating System Build Complete - Daniel C No.5
This commit is contained in:
@@ -2,7 +2,9 @@ package com.kamco.cd.kamcoback.inference;
|
||||
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.LearningModelResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.service.InferenceResultService;
|
||||
import com.kamco.cd.kamcoback.inference.service.LearningModelResultProcessor;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
@@ -10,10 +12,17 @@ import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponses;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
@@ -24,7 +33,10 @@ import org.springframework.web.bind.annotation.RestController;
|
||||
@RestController
|
||||
public class InferenceResultApiController {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(InferenceResultApiController.class);
|
||||
|
||||
private final InferenceResultService inferenceResultService;
|
||||
private final LearningModelResultProcessor learningModelResultProcessor;
|
||||
|
||||
@Operation(summary = "추론관리 분석결과 목록 조회", description = "분석상태, 제목으로 분석결과를 조회 합니다.")
|
||||
@ApiResponses(
|
||||
@@ -132,4 +144,182 @@ public class InferenceResultApiController {
|
||||
inferenceResultService.getInferenceResultGeomList(searchGeoReq);
|
||||
return ApiResponseDto.ok(geomList);
|
||||
}
|
||||
|
||||
@Operation(summary = "학습모델 결과 처리", description = "실제 학습모델 GeoJSON 파일을 처리하여 데이터베이스에 저장합니다.")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "처리 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema =
|
||||
@Schema(implementation = LearningModelResultDto.ProcessResponse.class))),
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@PostMapping("/learning-model/process")
|
||||
public ApiResponseDto<LearningModelResultDto.ProcessResponse> processLearningModelResult(
|
||||
@RequestBody LearningModelResultDto.ProcessRequest request) {
|
||||
try {
|
||||
logger.info("Processing learning model result file: {}", request.getFilePath());
|
||||
|
||||
Path filePath = Paths.get(request.getFilePath());
|
||||
int processedFeatures = learningModelResultProcessor.processLearningModelResult(filePath);
|
||||
|
||||
LearningModelResultDto.ProcessResponse response =
|
||||
LearningModelResultDto.ProcessResponse.builder()
|
||||
.success(true)
|
||||
.message("학습모델 결과 처리가 완료되었습니다.")
|
||||
.processedFeatures(processedFeatures)
|
||||
.filePath(request.getFilePath())
|
||||
.build();
|
||||
|
||||
logger.info(
|
||||
"Successfully processed {} features from file: {}",
|
||||
processedFeatures,
|
||||
request.getFilePath());
|
||||
return ApiResponseDto.ok(response);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to process learning model result: {}", request.getFilePath(), e);
|
||||
|
||||
LearningModelResultDto.ProcessResponse response =
|
||||
LearningModelResultDto.ProcessResponse.builder()
|
||||
.success(false)
|
||||
.message("학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
|
||||
.processedFeatures(0)
|
||||
.filePath(request.getFilePath())
|
||||
.build();
|
||||
|
||||
return ApiResponseDto.ok(response);
|
||||
}
|
||||
}
|
||||
|
||||
@Operation(summary = "학습모델 결과 일괄 처리", description = "여러 학습모델 GeoJSON 파일을 일괄 처리하여 데이터베이스에 저장합니다.")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "처리 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema =
|
||||
@Schema(
|
||||
implementation = LearningModelResultDto.BatchProcessResponse.class))),
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@PostMapping("/learning-model/process-batch")
|
||||
public ApiResponseDto<LearningModelResultDto.BatchProcessResponse>
|
||||
processBatchLearningModelResults(
|
||||
@RequestBody LearningModelResultDto.BatchProcessRequest request) {
|
||||
try {
|
||||
logger.info("Processing {} learning model result files", request.getFilePaths().size());
|
||||
|
||||
List<Path> filePaths = new ArrayList<>();
|
||||
for (String filePath : request.getFilePaths()) {
|
||||
filePaths.add(Paths.get(filePath));
|
||||
}
|
||||
|
||||
int totalProcessedFeatures =
|
||||
learningModelResultProcessor.processMultipleLearningModelResults(filePaths);
|
||||
|
||||
LearningModelResultDto.BatchProcessResponse response =
|
||||
LearningModelResultDto.BatchProcessResponse.builder()
|
||||
.success(true)
|
||||
.message("일괄 학습모델 결과 처리가 완료되었습니다.")
|
||||
.totalProcessedFeatures(totalProcessedFeatures)
|
||||
.processedFileCount(request.getFilePaths().size())
|
||||
.filePaths(request.getFilePaths())
|
||||
.build();
|
||||
|
||||
logger.info(
|
||||
"Successfully processed {} features from {} files",
|
||||
totalProcessedFeatures,
|
||||
request.getFilePaths().size());
|
||||
return ApiResponseDto.ok(response);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to process batch learning model results", e);
|
||||
|
||||
LearningModelResultDto.BatchProcessResponse response =
|
||||
LearningModelResultDto.BatchProcessResponse.builder()
|
||||
.success(false)
|
||||
.message("일괄 학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
|
||||
.totalProcessedFeatures(0)
|
||||
.processedFileCount(0)
|
||||
.filePaths(request.getFilePaths())
|
||||
.build();
|
||||
|
||||
return ApiResponseDto.ok(response);
|
||||
}
|
||||
}
|
||||
|
||||
@Operation(summary = "기본 학습모델 파일 처리", description = "미리 준비된 학습모델 파일을 처리합니다.")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "처리 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema =
|
||||
@Schema(
|
||||
implementation = LearningModelResultDto.BatchProcessResponse.class))),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@PostMapping("/learning-model/process-default")
|
||||
public ApiResponseDto<LearningModelResultDto.BatchProcessResponse>
|
||||
processDefaultLearningModelResults() {
|
||||
try {
|
||||
logger.info("Processing default learning model result files");
|
||||
|
||||
// Process the two default learning model files from upload directory
|
||||
List<String> defaultFilePaths =
|
||||
List.of(
|
||||
"/Users/deniallee/geojson/upload/캠코_2021_2022_35813023.geojson",
|
||||
"/Users/deniallee/geojson/upload/캠코_2023_2024_35810049.geojson");
|
||||
|
||||
List<Path> filePaths = new ArrayList<>();
|
||||
for (String filePath : defaultFilePaths) {
|
||||
filePaths.add(Paths.get(filePath));
|
||||
}
|
||||
|
||||
int totalProcessedFeatures =
|
||||
learningModelResultProcessor.processMultipleLearningModelResults(filePaths);
|
||||
|
||||
LearningModelResultDto.BatchProcessResponse response =
|
||||
LearningModelResultDto.BatchProcessResponse.builder()
|
||||
.success(true)
|
||||
.message("기본 학습모델 결과 파일 처리가 완료되었습니다.")
|
||||
.totalProcessedFeatures(totalProcessedFeatures)
|
||||
.processedFileCount(defaultFilePaths.size())
|
||||
.filePaths(defaultFilePaths)
|
||||
.build();
|
||||
|
||||
logger.info(
|
||||
"Successfully processed {} features from {} default files",
|
||||
totalProcessedFeatures,
|
||||
defaultFilePaths.size());
|
||||
return ApiResponseDto.ok(response);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to process default learning model results", e);
|
||||
|
||||
LearningModelResultDto.BatchProcessResponse response =
|
||||
LearningModelResultDto.BatchProcessResponse.builder()
|
||||
.success(false)
|
||||
.message("기본 학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
|
||||
.totalProcessedFeatures(0)
|
||||
.processedFileCount(0)
|
||||
.filePaths(List.of())
|
||||
.build();
|
||||
|
||||
return ApiResponseDto.ok(response);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,180 @@
|
||||
package com.kamco.cd.kamcoback.inference.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import java.util.List;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/** DTO classes for learning model result processing */
|
||||
public class LearningModelResultDto {
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
@Schema(description = "학습모델 결과 처리 요청")
|
||||
public static class ProcessRequest {
|
||||
|
||||
@Schema(
|
||||
description = "GeoJSON 파일 경로",
|
||||
example =
|
||||
"src/main/resources/db/migration/sample-results_updated/캠코_2021_2022_35813023.geojson")
|
||||
private String filePath;
|
||||
}
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
@Schema(description = "학습모델 결과 처리 응답")
|
||||
public static class ProcessResponse {
|
||||
|
||||
@Schema(description = "처리 성공 여부")
|
||||
private boolean success;
|
||||
|
||||
@Schema(description = "처리 결과 메시지")
|
||||
private String message;
|
||||
|
||||
@Schema(description = "처리된 feature 개수")
|
||||
private int processedFeatures;
|
||||
|
||||
@Schema(description = "처리된 파일 경로")
|
||||
private String filePath;
|
||||
}
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
@Schema(description = "학습모델 결과 일괄 처리 요청")
|
||||
public static class BatchProcessRequest {
|
||||
|
||||
@Schema(description = "GeoJSON 파일 경로 목록")
|
||||
private List<String> filePaths;
|
||||
}
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
@Schema(description = "학습모델 결과 일괄 처리 응답")
|
||||
public static class BatchProcessResponse {
|
||||
|
||||
@Schema(description = "처리 성공 여부")
|
||||
private boolean success;
|
||||
|
||||
@Schema(description = "처리 결과 메시지")
|
||||
private String message;
|
||||
|
||||
@Schema(description = "전체 처리된 feature 개수")
|
||||
private int totalProcessedFeatures;
|
||||
|
||||
@Schema(description = "처리된 파일 개수")
|
||||
private int processedFileCount;
|
||||
|
||||
@Schema(description = "처리된 파일 경로 목록")
|
||||
private List<String> filePaths;
|
||||
}
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
@Schema(description = "학습모델 처리 상태")
|
||||
public static class ProcessingStatus {
|
||||
|
||||
@Schema(description = "처리 ID")
|
||||
private String processingId;
|
||||
|
||||
@Schema(description = "처리 상태 (PENDING, PROCESSING, COMPLETED, FAILED)")
|
||||
private String status;
|
||||
|
||||
@Schema(description = "진행률 (0-100)")
|
||||
private int progressPercentage;
|
||||
|
||||
@Schema(description = "현재 처리 중인 파일")
|
||||
private String currentFile;
|
||||
|
||||
@Schema(description = "전체 파일 개수")
|
||||
private int totalFiles;
|
||||
|
||||
@Schema(description = "처리 완료된 파일 개수")
|
||||
private int completedFiles;
|
||||
|
||||
@Schema(description = "시작 시간")
|
||||
private String startTime;
|
||||
|
||||
@Schema(description = "예상 완료 시간")
|
||||
private String estimatedEndTime;
|
||||
}
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
@Schema(description = "학습모델 데이터 요약")
|
||||
public static class DataSummary {
|
||||
|
||||
@Schema(description = "전체 데이터 개수")
|
||||
private long totalRecords;
|
||||
|
||||
@Schema(description = "연도별 데이터 개수")
|
||||
private List<YearDataCount> yearDataCounts;
|
||||
|
||||
@Schema(description = "분류별 데이터 개수")
|
||||
private List<ClassDataCount> classDataCounts;
|
||||
|
||||
@Schema(description = "지도 영역별 데이터 개수")
|
||||
private List<MapSheetDataCount> mapSheetDataCounts;
|
||||
}
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
@Schema(description = "연도별 데이터 개수")
|
||||
public static class YearDataCount {
|
||||
|
||||
@Schema(description = "비교 연도 (예: 2021_2022)")
|
||||
private String compareYear;
|
||||
|
||||
@Schema(description = "데이터 개수")
|
||||
private long count;
|
||||
}
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
@Schema(description = "분류별 데이터 개수")
|
||||
public static class ClassDataCount {
|
||||
|
||||
@Schema(description = "분류명")
|
||||
private String className;
|
||||
|
||||
@Schema(description = "변화 전 개수")
|
||||
private long beforeCount;
|
||||
|
||||
@Schema(description = "변화 후 개수")
|
||||
private long afterCount;
|
||||
}
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
@Schema(description = "지도 영역별 데이터 개수")
|
||||
public static class MapSheetDataCount {
|
||||
|
||||
@Schema(description = "지도 영역 번호")
|
||||
private String mapSheetNum;
|
||||
|
||||
@Schema(description = "데이터 개수")
|
||||
private long count;
|
||||
|
||||
@Schema(description = "평균 변화 탐지 확률")
|
||||
private double avgChangeDetectionProb;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,384 @@
|
||||
package com.kamco.cd.kamcoback.inference.service;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
|
||||
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
import org.locationtech.jts.io.geojson.GeoJsonReader;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
/**
|
||||
* Service for processing actual learning model result GeoJSON files and storing them in the
|
||||
* database with proper field mapping.
|
||||
*/
|
||||
@Service
|
||||
public class LearningModelResultProcessor {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(LearningModelResultProcessor.class);
|
||||
|
||||
@Autowired private MapSheetLearnDataRepository mapSheetLearnDataRepository;
|
||||
|
||||
@Autowired private MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
|
||||
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
private final GeoJsonReader geoJsonReader = new GeoJsonReader();
|
||||
|
||||
/**
|
||||
* Process large learning model result files with optimized batch processing
|
||||
*/
|
||||
public int processLearningModelResultOptimized(Path geoJsonFilePath) {
|
||||
try {
|
||||
logger.info("Processing learning model result file (optimized): {}", geoJsonFilePath);
|
||||
|
||||
// Read and parse GeoJSON file
|
||||
String geoJsonContent = Files.readString(geoJsonFilePath);
|
||||
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
|
||||
|
||||
// Validate GeoJSON structure
|
||||
if (!rootNode.has("type") || !"FeatureCollection".equals(rootNode.get("type").asText())) {
|
||||
throw new IllegalArgumentException("Invalid GeoJSON: Not a FeatureCollection");
|
||||
}
|
||||
|
||||
JsonNode features = rootNode.get("features");
|
||||
if (features == null || !features.isArray()) {
|
||||
throw new IllegalArgumentException("Invalid GeoJSON: No features array found");
|
||||
}
|
||||
|
||||
// Extract metadata from file name and content
|
||||
String fileName = geoJsonFilePath.getFileName().toString();
|
||||
String mapSheetName = rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
|
||||
|
||||
// Parse years from filename
|
||||
String[] parts = mapSheetName.split("_");
|
||||
String beforeYear = null, afterYear = null, mapSheetNum = null;
|
||||
|
||||
if (parts.length >= 4) {
|
||||
beforeYear = parts[1];
|
||||
afterYear = parts[2];
|
||||
mapSheetNum = parts[3];
|
||||
}
|
||||
|
||||
if (beforeYear == null || afterYear == null || mapSheetNum == null) {
|
||||
throw new IllegalArgumentException("Cannot parse years and map sheet number from filename: " + fileName);
|
||||
}
|
||||
|
||||
int totalFeatures = features.size();
|
||||
logger.info("Total features to process: {}", totalFeatures);
|
||||
|
||||
// Step 1: Create main data record first
|
||||
MapSheetLearnDataEntity savedMainData = createMainDataRecord(geoJsonContent, fileName, geoJsonFilePath.toString(), beforeYear, afterYear, mapSheetNum);
|
||||
|
||||
// Step 2: Process features in small batches to avoid transaction timeout
|
||||
int totalProcessed = 0;
|
||||
int batchSize = 25; // Smaller batch size for reliability
|
||||
|
||||
for (int i = 0; i < totalFeatures; i += batchSize) {
|
||||
int endIndex = Math.min(i + batchSize, totalFeatures);
|
||||
logger.info("Processing batch {}-{} of {}", i + 1, endIndex, totalFeatures);
|
||||
|
||||
List<JsonNode> batch = new ArrayList<>();
|
||||
for (int j = i; j < endIndex; j++) {
|
||||
batch.add(features.get(j));
|
||||
}
|
||||
|
||||
try {
|
||||
int processed = processBatchSafely(batch, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
|
||||
totalProcessed += processed;
|
||||
logger.info("Batch processed successfully. Total so far: {}/{}", totalProcessed, totalFeatures);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to process batch {}-{}: {}", i + 1, endIndex, e.getMessage());
|
||||
// Continue with next batch instead of failing completely
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Successfully processed {} out of {} features from file: {}", totalProcessed, totalFeatures, fileName);
|
||||
return totalProcessed;
|
||||
|
||||
} catch (IOException e) {
|
||||
logger.error("Failed to read GeoJSON file: {}", geoJsonFilePath, e);
|
||||
throw new RuntimeException("Failed to process learning model result file", e);
|
||||
} catch (Exception e) {
|
||||
logger.error("Error processing learning model result: {}", geoJsonFilePath, e);
|
||||
throw new RuntimeException("Failed to process learning model result", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Transactional
|
||||
private MapSheetLearnDataEntity createMainDataRecord(String geoJsonContent, String fileName, String filePath, String beforeYear, String afterYear, String mapSheetNum) {
|
||||
MapSheetLearnDataEntity mainData = createMainDataEntity(geoJsonContent, fileName, filePath, beforeYear, afterYear, mapSheetNum);
|
||||
MapSheetLearnDataEntity saved = mapSheetLearnDataRepository.save(mainData);
|
||||
logger.info("Created main data record with ID: {}", saved.getId());
|
||||
return saved;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
private int processBatchSafely(List<JsonNode> features, Long dataUid, String beforeYear, String afterYear, String mapSheetNum) {
|
||||
int processed = 0;
|
||||
|
||||
for (JsonNode feature : features) {
|
||||
try {
|
||||
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000);
|
||||
insertFeatureWithPostGIS(feature, geoUid, dataUid, beforeYear, afterYear, mapSheetNum);
|
||||
processed++;
|
||||
|
||||
// Small delay to prevent ID collisions
|
||||
try { Thread.sleep(1); } catch (InterruptedException e) { Thread.currentThread().interrupt(); }
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to process individual feature: {}", e.getMessage());
|
||||
// Continue processing other features in this batch
|
||||
}
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a learning model result GeoJSON file and store it in the database
|
||||
*
|
||||
* @param geoJsonFilePath Path to the GeoJSON file
|
||||
* @return Number of features processed
|
||||
*/
|
||||
@Transactional
|
||||
public int processLearningModelResult(Path geoJsonFilePath) {
|
||||
try {
|
||||
logger.info("Processing learning model result file: {}", geoJsonFilePath);
|
||||
|
||||
// Read and parse GeoJSON file
|
||||
String geoJsonContent = Files.readString(geoJsonFilePath);
|
||||
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
|
||||
|
||||
// Validate GeoJSON structure
|
||||
if (!rootNode.has("type") || !"FeatureCollection".equals(rootNode.get("type").asText())) {
|
||||
throw new IllegalArgumentException("Invalid GeoJSON: Not a FeatureCollection");
|
||||
}
|
||||
|
||||
JsonNode features = rootNode.get("features");
|
||||
if (features == null || !features.isArray()) {
|
||||
throw new IllegalArgumentException("Invalid GeoJSON: No features array found");
|
||||
}
|
||||
|
||||
// Extract metadata from file name and content
|
||||
String fileName = geoJsonFilePath.getFileName().toString();
|
||||
String mapSheetName =
|
||||
rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
|
||||
|
||||
// Parse years from filename or data (e.g., "캠코_2021_2022_35813023")
|
||||
String[] parts = mapSheetName.split("_");
|
||||
String beforeYear = null, afterYear = null, mapSheetNum = null;
|
||||
|
||||
if (parts.length >= 4) {
|
||||
beforeYear = parts[1]; // 2021 or 2023
|
||||
afterYear = parts[2]; // 2022 or 2024
|
||||
mapSheetNum = parts[3]; // 35813023 or 35810049
|
||||
}
|
||||
|
||||
if (beforeYear == null || afterYear == null || mapSheetNum == null) {
|
||||
throw new IllegalArgumentException(
|
||||
"Cannot parse years and map sheet number from filename: " + fileName);
|
||||
}
|
||||
|
||||
// Create main data record
|
||||
MapSheetLearnDataEntity mainData =
|
||||
createMainDataEntity(
|
||||
geoJsonContent,
|
||||
fileName,
|
||||
geoJsonFilePath.toString(),
|
||||
beforeYear,
|
||||
afterYear,
|
||||
mapSheetNum);
|
||||
|
||||
MapSheetLearnDataEntity savedMainData = mapSheetLearnDataRepository.save(mainData);
|
||||
logger.info("Saved main data record with ID: {}", savedMainData.getId());
|
||||
|
||||
// Process each feature in the GeoJSON using direct PostGIS insertion
|
||||
int featureCount = 0;
|
||||
int batchSize = 10; // Much smaller batch size to avoid transaction timeout
|
||||
|
||||
for (int i = 0; i < features.size(); i += batchSize) {
|
||||
int endIndex = Math.min(i + batchSize, features.size());
|
||||
logger.info("Processing batch {}-{} of {} features", i + 1, endIndex, features.size());
|
||||
|
||||
// Process each feature individually within this logging batch
|
||||
for (int j = i; j < endIndex; j++) {
|
||||
JsonNode feature = features.get(j);
|
||||
try {
|
||||
// Generate unique ID for this geometry entity
|
||||
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000) + j;
|
||||
|
||||
// Extract feature data and insert directly with PostGIS
|
||||
insertFeatureWithPostGIS(feature, geoUid, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
|
||||
featureCount++;
|
||||
|
||||
// Small delay to prevent issues
|
||||
if (j % 5 == 0) {
|
||||
try { Thread.sleep(10); } catch (InterruptedException e) { Thread.currentThread().interrupt(); }
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to process feature {}: {}", j + 1, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// Log progress after each batch
|
||||
if (featureCount > 0 && endIndex % batchSize == 0) {
|
||||
logger.info("Processed {} features so far, success rate: {:.1f}%",
|
||||
featureCount, (featureCount * 100.0) / endIndex);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Successfully processed {} features from file: {}", featureCount, fileName);
|
||||
return featureCount;
|
||||
|
||||
} catch (IOException e) {
|
||||
logger.error("Failed to read GeoJSON file: {}", geoJsonFilePath, e);
|
||||
throw new RuntimeException("Failed to process learning model result file", e);
|
||||
} catch (Exception e) {
|
||||
logger.error("Error processing learning model result: {}", geoJsonFilePath, e);
|
||||
throw new RuntimeException("Failed to process learning model result", e);
|
||||
}
|
||||
}
|
||||
|
||||
/** Create the main data entity for tb_map_sheet_learn_data table */
|
||||
private MapSheetLearnDataEntity createMainDataEntity(
|
||||
String geoJsonContent,
|
||||
String fileName,
|
||||
String filePath,
|
||||
String beforeYear,
|
||||
String afterYear,
|
||||
String mapSheetNum) {
|
||||
|
||||
MapSheetLearnDataEntity entity = new MapSheetLearnDataEntity();
|
||||
|
||||
// Generate unique ID (using current timestamp + random component)
|
||||
entity.setId(System.currentTimeMillis() + (long) (Math.random() * 1000));
|
||||
|
||||
LocalDateTime now = LocalDateTime.now();
|
||||
entity.setAnalStrtDttm(ZonedDateTime.now());
|
||||
entity.setAnalEndDttm(ZonedDateTime.now());
|
||||
entity.setCompareYyyy(Integer.parseInt(beforeYear)); // 첫 번째 연도만 저장
|
||||
// JSON 데이터를 Map으로 변환하여 저장
|
||||
try {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> jsonMap = objectMapper.readValue(geoJsonContent, Map.class);
|
||||
entity.setDataJson(jsonMap);
|
||||
} catch (Exception e) {
|
||||
logger.warn("JSON 파싱 실패, 빈 Map으로 저장: {}", fileName, e);
|
||||
entity.setDataJson(new HashMap<>());
|
||||
}
|
||||
entity.setDataName(fileName);
|
||||
entity.setDataPath(filePath);
|
||||
entity.setDataState("PROCESSED");
|
||||
entity.setCreatedDttm(ZonedDateTime.now());
|
||||
entity.setUpdatedDttm(ZonedDateTime.now());
|
||||
|
||||
return entity;
|
||||
}
|
||||
|
||||
/** Insert GeoJSON feature directly using PostGIS functions */
|
||||
private void insertFeatureWithPostGIS(
|
||||
JsonNode feature, Long geoUid, Long dataUid, String beforeYear, String afterYear, String mapSheetNum)
|
||||
throws Exception {
|
||||
|
||||
JsonNode properties = feature.get("properties");
|
||||
JsonNode geometry = feature.get("geometry");
|
||||
|
||||
if (properties == null || geometry == null) {
|
||||
throw new IllegalArgumentException("Feature missing properties or geometry");
|
||||
}
|
||||
|
||||
// Extract properties
|
||||
Double cdProb = properties.has("cd_prob") ? properties.get("cd_prob").asDouble() : null;
|
||||
Double area = properties.has("area") ? properties.get("area").asDouble() : null;
|
||||
|
||||
String classBeforeName = null;
|
||||
Double classBeforeProb = null;
|
||||
String classAfterName = null;
|
||||
Double classAfterProb = null;
|
||||
|
||||
// Classification data
|
||||
JsonNode classNode = properties.get("class");
|
||||
if (classNode != null) {
|
||||
// Before classification
|
||||
JsonNode beforeClass = classNode.get("before");
|
||||
if (beforeClass != null && beforeClass.isArray() && beforeClass.size() > 0) {
|
||||
JsonNode firstBefore = beforeClass.get(0);
|
||||
if (firstBefore.has("class_name")) {
|
||||
classBeforeName = firstBefore.get("class_name").asText();
|
||||
}
|
||||
if (firstBefore.has("probability")) {
|
||||
classBeforeProb = firstBefore.get("probability").asDouble();
|
||||
}
|
||||
}
|
||||
|
||||
// After classification
|
||||
JsonNode afterClass = classNode.get("after");
|
||||
if (afterClass != null && afterClass.isArray() && afterClass.size() > 0) {
|
||||
JsonNode firstAfter = afterClass.get(0);
|
||||
if (firstAfter.has("class_name")) {
|
||||
classAfterName = firstAfter.get("class_name").asText();
|
||||
}
|
||||
if (firstAfter.has("probability")) {
|
||||
classAfterProb = firstAfter.get("probability").asDouble();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get geometry type
|
||||
String geoType = geometry.has("type") ? geometry.get("type").asText() : "Unknown";
|
||||
|
||||
// Convert geometry to JSON string for PostGIS
|
||||
String geometryJson = geometry.toString();
|
||||
|
||||
// Insert using PostGIS functions
|
||||
mapSheetLearnDataGeomRepository.insertWithPostGISGeometry(
|
||||
geoUid, cdProb, classBeforeName, classBeforeProb,
|
||||
classAfterName, classAfterProb, Long.parseLong(mapSheetNum),
|
||||
Integer.parseInt(beforeYear), Integer.parseInt(afterYear),
|
||||
area, geometryJson, geoType, dataUid
|
||||
);
|
||||
|
||||
logger.debug("Inserted geometry entity with ID: {} using PostGIS", geoUid);
|
||||
}
|
||||
|
||||
/**
|
||||
* Process multiple learning model result files
|
||||
*
|
||||
* @param filePaths List of GeoJSON file paths
|
||||
* @return Total number of features processed across all files
|
||||
*/
|
||||
@Transactional
|
||||
public int processMultipleLearningModelResults(List<Path> filePaths) {
|
||||
int totalProcessed = 0;
|
||||
|
||||
for (Path filePath : filePaths) {
|
||||
try {
|
||||
int processed = processLearningModelResult(filePath);
|
||||
totalProcessed += processed;
|
||||
logger.info("Processed {} features from file: {}", processed, filePath.getFileName());
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to process file: {}", filePath, e);
|
||||
// Continue processing other files even if one fails
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Total features processed across all files: {}", totalProcessed);
|
||||
return totalProcessed;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user