learn 테이블 연결 소스 삭제 및 수정

This commit is contained in:
2026-01-07 14:35:45 +09:00
parent c8773dabdd
commit 74d1643384
11 changed files with 0 additions and 2583 deletions

View File

@@ -1,431 +0,0 @@
package com.kamco.cd.kamcoback.inference.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.LocalDateTime;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.locationtech.jts.io.geojson.GeoJsonReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* Service for processing actual learning model result GeoJSON files and storing them in the
* database with proper field mapping.
*/
@Service
public class LearningModelResultProcessor {
private static final Logger logger = LoggerFactory.getLogger(LearningModelResultProcessor.class);
@Autowired private MapSheetLearnDataRepository mapSheetLearnDataRepository;
@Autowired private MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
private final ObjectMapper objectMapper = new ObjectMapper();
private final GeoJsonReader geoJsonReader = new GeoJsonReader();
/** Process large learning model result files with optimized batch processing */
public int processLearningModelResultOptimized(Path geoJsonFilePath) {
try {
logger.info("Processing learning model result file (optimized): {}", geoJsonFilePath);
// Read and parse GeoJSON file
String geoJsonContent = Files.readString(geoJsonFilePath);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// Validate GeoJSON structure
if (!rootNode.has("type") || !"FeatureCollection".equals(rootNode.get("type").asText())) {
throw new IllegalArgumentException("Invalid GeoJSON: Not a FeatureCollection");
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
throw new IllegalArgumentException("Invalid GeoJSON: No features array found");
}
// Extract metadata from file name and content
String fileName = geoJsonFilePath.getFileName().toString();
String mapSheetName =
rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// Parse years from filename
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1];
afterYear = parts[2];
mapSheetNum = parts[3];
}
if (beforeYear == null || afterYear == null || mapSheetNum == null) {
throw new IllegalArgumentException(
"Cannot parse years and map sheet number from filename: " + fileName);
}
int totalFeatures = features.size();
logger.info("Total features to process: {}", totalFeatures);
// Step 1: Create main data record first
MapSheetLearnDataEntity savedMainData =
createMainDataRecord(
geoJsonContent,
fileName,
geoJsonFilePath.toString(),
beforeYear,
afterYear,
mapSheetNum);
// Step 2: Process features in small batches to avoid transaction timeout
int totalProcessed = 0;
int batchSize = 25; // Smaller batch size for reliability
for (int i = 0; i < totalFeatures; i += batchSize) {
int endIndex = Math.min(i + batchSize, totalFeatures);
logger.info("Processing batch {}-{} of {}", i + 1, endIndex, totalFeatures);
List<JsonNode> batch = new ArrayList<>();
for (int j = i; j < endIndex; j++) {
batch.add(features.get(j));
}
try {
int processed =
processBatchSafely(batch, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
totalProcessed += processed;
logger.info(
"Batch processed successfully. Total so far: {}/{}", totalProcessed, totalFeatures);
} catch (Exception e) {
logger.error("Failed to process batch {}-{}: {}", i + 1, endIndex, e.getMessage());
// Continue with next batch instead of failing completely
}
}
logger.info(
"Successfully processed {} out of {} features from file: {}",
totalProcessed,
totalFeatures,
fileName);
return totalProcessed;
} catch (IOException e) {
logger.error("Failed to read GeoJSON file: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result file", e);
} catch (Exception e) {
logger.error("Error processing learning model result: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result", e);
}
}
@Transactional
private MapSheetLearnDataEntity createMainDataRecord(
String geoJsonContent,
String fileName,
String filePath,
String beforeYear,
String afterYear,
String mapSheetNum) {
MapSheetLearnDataEntity mainData =
createMainDataEntity(
geoJsonContent, fileName, filePath, beforeYear, afterYear, mapSheetNum);
MapSheetLearnDataEntity saved = mapSheetLearnDataRepository.save(mainData);
logger.info("Created main data record with ID: {}", saved.getId());
return saved;
}
@Transactional
private int processBatchSafely(
List<JsonNode> features,
Long dataUid,
String beforeYear,
String afterYear,
String mapSheetNum) {
int processed = 0;
for (JsonNode feature : features) {
try {
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000);
insertFeatureWithPostGIS(feature, geoUid, dataUid, beforeYear, afterYear, mapSheetNum);
processed++;
// Small delay to prevent ID collisions
try {
Thread.sleep(1);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
} catch (Exception e) {
logger.warn("Failed to process individual feature: {}", e.getMessage());
// Continue processing other features in this batch
}
}
return processed;
}
/**
* Process a learning model result GeoJSON file and store it in the database
*
* @param geoJsonFilePath Path to the GeoJSON file
* @return Number of features processed
*/
@Transactional
public int processLearningModelResult(Path geoJsonFilePath) {
try {
logger.info("Processing learning model result file: {}", geoJsonFilePath);
// Read and parse GeoJSON file
String geoJsonContent = Files.readString(geoJsonFilePath);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// Validate GeoJSON structure
if (!rootNode.has("type") || !"FeatureCollection".equals(rootNode.get("type").asText())) {
throw new IllegalArgumentException("Invalid GeoJSON: Not a FeatureCollection");
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
throw new IllegalArgumentException("Invalid GeoJSON: No features array found");
}
// Extract metadata from file name and content
String fileName = geoJsonFilePath.getFileName().toString();
String mapSheetName =
rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// Parse years from filename or data (e.g., "캠코_2021_2022_35813023")
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1]; // 2021 or 2023
afterYear = parts[2]; // 2022 or 2024
mapSheetNum = parts[3]; // 35813023 or 35810049
}
if (beforeYear == null || afterYear == null || mapSheetNum == null) {
throw new IllegalArgumentException(
"Cannot parse years and map sheet number from filename: " + fileName);
}
// Create main data record
MapSheetLearnDataEntity mainData =
createMainDataEntity(
geoJsonContent,
fileName,
geoJsonFilePath.toString(),
beforeYear,
afterYear,
mapSheetNum);
MapSheetLearnDataEntity savedMainData = mapSheetLearnDataRepository.save(mainData);
logger.info("Saved main data record with ID: {}", savedMainData.getId());
// Process each feature in the GeoJSON using direct PostGIS insertion
int featureCount = 0;
int batchSize = 10; // Much smaller batch size to avoid transaction timeout
for (int i = 0; i < features.size(); i += batchSize) {
int endIndex = Math.min(i + batchSize, features.size());
logger.info("Processing batch {}-{} of {} features", i + 1, endIndex, features.size());
// Process each feature individually within this logging batch
for (int j = i; j < endIndex; j++) {
JsonNode feature = features.get(j);
try {
// Generate unique ID for this geometry entity
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000) + j;
// Extract feature data and insert directly with PostGIS
insertFeatureWithPostGIS(
feature, geoUid, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
featureCount++;
// Small delay to prevent issues
if (j % 5 == 0) {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
} catch (Exception e) {
logger.warn("Failed to process feature {}: {}", j + 1, e.getMessage());
}
}
// Log progress after each batch
if (featureCount > 0 && endIndex % batchSize == 0) {
logger.info(
"Processed {} features so far, success rate: {:.1f}%",
featureCount, (featureCount * 100.0) / endIndex);
}
}
logger.info("Successfully processed {} features from file: {}", featureCount, fileName);
return featureCount;
} catch (IOException e) {
logger.error("Failed to read GeoJSON file: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result file", e);
} catch (Exception e) {
logger.error("Error processing learning model result: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result", e);
}
}
/** Create the main data entity for tb_map_sheet_learn_data table */
private MapSheetLearnDataEntity createMainDataEntity(
String geoJsonContent,
String fileName,
String filePath,
String beforeYear,
String afterYear,
String mapSheetNum) {
MapSheetLearnDataEntity entity = new MapSheetLearnDataEntity();
// Generate unique ID (using current timestamp + random component)
entity.setId(System.currentTimeMillis() + (long) (Math.random() * 1000));
LocalDateTime now = LocalDateTime.now();
entity.setAnalStrtDttm(ZonedDateTime.now());
entity.setAnalEndDttm(ZonedDateTime.now());
entity.setCompareYyyy(Integer.parseInt(beforeYear)); // 첫 번째 연도만 저장
// JSON 데이터를 Map으로 변환하여 저장
try {
@SuppressWarnings("unchecked")
Map<String, Object> jsonMap = objectMapper.readValue(geoJsonContent, Map.class);
entity.setDataJson(jsonMap);
} catch (Exception e) {
logger.warn("JSON 파싱 실패, 빈 Map으로 저장: {}", fileName, e);
entity.setDataJson(new HashMap<>());
}
entity.setDataName(fileName);
entity.setDataPath(filePath);
entity.setDataState("PROCESSED");
entity.setCreatedDttm(ZonedDateTime.now());
entity.setUpdatedDttm(ZonedDateTime.now());
return entity;
}
/** Insert GeoJSON feature directly using PostGIS functions */
private void insertFeatureWithPostGIS(
JsonNode feature,
Long geoUid,
Long dataUid,
String beforeYear,
String afterYear,
String mapSheetNum)
throws Exception {
JsonNode properties = feature.get("properties");
JsonNode geometry = feature.get("geometry");
if (properties == null || geometry == null) {
throw new IllegalArgumentException("Feature missing properties or geometry");
}
// Extract properties
Double cdProb = properties.has("cd_prob") ? properties.get("cd_prob").asDouble() : null;
Double area = properties.has("area") ? properties.get("area").asDouble() : null;
String classBeforeName = null;
Double classBeforeProb = null;
String classAfterName = null;
Double classAfterProb = null;
// Classification data
JsonNode classNode = properties.get("class");
if (classNode != null) {
// Before classification
JsonNode beforeClass = classNode.get("before");
if (beforeClass != null && beforeClass.isArray() && beforeClass.size() > 0) {
JsonNode firstBefore = beforeClass.get(0);
if (firstBefore.has("class_name")) {
classBeforeName = firstBefore.get("class_name").asText();
}
if (firstBefore.has("probability")) {
classBeforeProb = firstBefore.get("probability").asDouble();
}
}
// After classification
JsonNode afterClass = classNode.get("after");
if (afterClass != null && afterClass.isArray() && afterClass.size() > 0) {
JsonNode firstAfter = afterClass.get(0);
if (firstAfter.has("class_name")) {
classAfterName = firstAfter.get("class_name").asText();
}
if (firstAfter.has("probability")) {
classAfterProb = firstAfter.get("probability").asDouble();
}
}
}
// Get geometry type
String geoType = geometry.has("type") ? geometry.get("type").asText() : "Unknown";
// Convert geometry to JSON string for PostGIS
String geometryJson = geometry.toString();
// Insert using PostGIS functions
mapSheetLearnDataGeomRepository.insertWithPostGISGeometry(
geoUid,
cdProb,
classBeforeName,
classBeforeProb,
classAfterName,
classAfterProb,
Long.parseLong(mapSheetNum),
Integer.parseInt(beforeYear),
Integer.parseInt(afterYear),
area,
geometryJson,
geoType,
dataUid);
logger.debug("Inserted geometry entity with ID: {} using PostGIS", geoUid);
}
/**
* Process multiple learning model result files
*
* @param filePaths List of GeoJSON file paths
* @return Total number of features processed across all files
*/
@Transactional
public int processMultipleLearningModelResults(List<Path> filePaths) {
int totalProcessed = 0;
for (Path filePath : filePaths) {
try {
int processed = processLearningModelResult(filePath);
totalProcessed += processed;
logger.info("Processed {} features from file: {}", processed, filePath.getFileName());
} catch (Exception e) {
logger.error("Failed to process file: {}", filePath, e);
// Continue processing other files even if one fails
}
}
logger.info("Total features processed across all files: {}", totalProcessed);
return totalProcessed;
}
}