:spotlessApply 실행

This commit is contained in:
2025-11-27 18:13:42 +09:00
parent 8bab44d7e4
commit f74d49f7e1
10 changed files with 211 additions and 161 deletions

View File

@@ -3,7 +3,6 @@ package com.kamco.cd.kamcoback.inference.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.io.IOException;
@@ -15,7 +14,6 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.geojson.GeoJsonReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -39,9 +37,7 @@ public class LearningModelResultProcessor {
private final ObjectMapper objectMapper = new ObjectMapper();
private final GeoJsonReader geoJsonReader = new GeoJsonReader();
/**
* Process large learning model result files with optimized batch processing
*/
/** Process large learning model result files with optimized batch processing */
public int processLearningModelResultOptimized(Path geoJsonFilePath) {
try {
logger.info("Processing learning model result file (optimized): {}", geoJsonFilePath);
@@ -62,7 +58,8 @@ public class LearningModelResultProcessor {
// Extract metadata from file name and content
String fileName = geoJsonFilePath.getFileName().toString();
String mapSheetName = rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
String mapSheetName =
rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// Parse years from filename
String[] parts = mapSheetName.split("_");
@@ -70,44 +67,58 @@ public class LearningModelResultProcessor {
if (parts.length >= 4) {
beforeYear = parts[1];
afterYear = parts[2];
afterYear = parts[2];
mapSheetNum = parts[3];
}
if (beforeYear == null || afterYear == null || mapSheetNum == null) {
throw new IllegalArgumentException("Cannot parse years and map sheet number from filename: " + fileName);
throw new IllegalArgumentException(
"Cannot parse years and map sheet number from filename: " + fileName);
}
int totalFeatures = features.size();
logger.info("Total features to process: {}", totalFeatures);
// Step 1: Create main data record first
MapSheetLearnDataEntity savedMainData = createMainDataRecord(geoJsonContent, fileName, geoJsonFilePath.toString(), beforeYear, afterYear, mapSheetNum);
MapSheetLearnDataEntity savedMainData =
createMainDataRecord(
geoJsonContent,
fileName,
geoJsonFilePath.toString(),
beforeYear,
afterYear,
mapSheetNum);
// Step 2: Process features in small batches to avoid transaction timeout
int totalProcessed = 0;
int batchSize = 25; // Smaller batch size for reliability
for (int i = 0; i < totalFeatures; i += batchSize) {
int endIndex = Math.min(i + batchSize, totalFeatures);
logger.info("Processing batch {}-{} of {}", i + 1, endIndex, totalFeatures);
List<JsonNode> batch = new ArrayList<>();
for (int j = i; j < endIndex; j++) {
batch.add(features.get(j));
}
try {
int processed = processBatchSafely(batch, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
int processed =
processBatchSafely(batch, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
totalProcessed += processed;
logger.info("Batch processed successfully. Total so far: {}/{}", totalProcessed, totalFeatures);
logger.info(
"Batch processed successfully. Total so far: {}/{}", totalProcessed, totalFeatures);
} catch (Exception e) {
logger.error("Failed to process batch {}-{}: {}", i + 1, endIndex, e.getMessage());
// Continue with next batch instead of failing completely
}
}
logger.info("Successfully processed {} out of {} features from file: {}", totalProcessed, totalFeatures, fileName);
logger.info(
"Successfully processed {} out of {} features from file: {}",
totalProcessed,
totalFeatures,
fileName);
return totalProcessed;
} catch (IOException e) {
@@ -120,32 +131,49 @@ public class LearningModelResultProcessor {
}
@Transactional
private MapSheetLearnDataEntity createMainDataRecord(String geoJsonContent, String fileName, String filePath, String beforeYear, String afterYear, String mapSheetNum) {
MapSheetLearnDataEntity mainData = createMainDataEntity(geoJsonContent, fileName, filePath, beforeYear, afterYear, mapSheetNum);
private MapSheetLearnDataEntity createMainDataRecord(
String geoJsonContent,
String fileName,
String filePath,
String beforeYear,
String afterYear,
String mapSheetNum) {
MapSheetLearnDataEntity mainData =
createMainDataEntity(
geoJsonContent, fileName, filePath, beforeYear, afterYear, mapSheetNum);
MapSheetLearnDataEntity saved = mapSheetLearnDataRepository.save(mainData);
logger.info("Created main data record with ID: {}", saved.getId());
return saved;
}
@Transactional
private int processBatchSafely(List<JsonNode> features, Long dataUid, String beforeYear, String afterYear, String mapSheetNum) {
private int processBatchSafely(
List<JsonNode> features,
Long dataUid,
String beforeYear,
String afterYear,
String mapSheetNum) {
int processed = 0;
for (JsonNode feature : features) {
try {
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000);
insertFeatureWithPostGIS(feature, geoUid, dataUid, beforeYear, afterYear, mapSheetNum);
processed++;
// Small delay to prevent ID collisions
try { Thread.sleep(1); } catch (InterruptedException e) { Thread.currentThread().interrupt(); }
try {
Thread.sleep(1);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
} catch (Exception e) {
logger.warn("Failed to process individual feature: {}", e.getMessage());
// Continue processing other features in this batch
}
}
return processed;
}
@@ -214,32 +242,38 @@ public class LearningModelResultProcessor {
for (int i = 0; i < features.size(); i += batchSize) {
int endIndex = Math.min(i + batchSize, features.size());
logger.info("Processing batch {}-{} of {} features", i + 1, endIndex, features.size());
// Process each feature individually within this logging batch
for (int j = i; j < endIndex; j++) {
JsonNode feature = features.get(j);
try {
// Generate unique ID for this geometry entity
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000) + j;
// Extract feature data and insert directly with PostGIS
insertFeatureWithPostGIS(feature, geoUid, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
insertFeatureWithPostGIS(
feature, geoUid, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
featureCount++;
// Small delay to prevent issues
if (j % 5 == 0) {
try { Thread.sleep(10); } catch (InterruptedException e) { Thread.currentThread().interrupt(); }
try {
Thread.sleep(10);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
} catch (Exception e) {
logger.warn("Failed to process feature {}: {}", j + 1, e.getMessage());
}
}
// Log progress after each batch
if (featureCount > 0 && endIndex % batchSize == 0) {
logger.info("Processed {} features so far, success rate: {:.1f}%",
featureCount, (featureCount * 100.0) / endIndex);
logger.info(
"Processed {} features so far, success rate: {:.1f}%",
featureCount, (featureCount * 100.0) / endIndex);
}
}
@@ -293,7 +327,12 @@ public class LearningModelResultProcessor {
/** Insert GeoJSON feature directly using PostGIS functions */
private void insertFeatureWithPostGIS(
JsonNode feature, Long geoUid, Long dataUid, String beforeYear, String afterYear, String mapSheetNum)
JsonNode feature,
Long geoUid,
Long dataUid,
String beforeYear,
String afterYear,
String mapSheetNum)
throws Exception {
JsonNode properties = feature.get("properties");
@@ -306,12 +345,12 @@ public class LearningModelResultProcessor {
// Extract properties
Double cdProb = properties.has("cd_prob") ? properties.get("cd_prob").asDouble() : null;
Double area = properties.has("area") ? properties.get("area").asDouble() : null;
String classBeforeName = null;
Double classBeforeProb = null;
String classAfterName = null;
Double classAfterProb = null;
// Classification data
JsonNode classNode = properties.get("class");
if (classNode != null) {
@@ -342,18 +381,26 @@ public class LearningModelResultProcessor {
// Get geometry type
String geoType = geometry.has("type") ? geometry.get("type").asText() : "Unknown";
// Convert geometry to JSON string for PostGIS
String geometryJson = geometry.toString();
// Insert using PostGIS functions
mapSheetLearnDataGeomRepository.insertWithPostGISGeometry(
geoUid, cdProb, classBeforeName, classBeforeProb,
classAfterName, classAfterProb, Long.parseLong(mapSheetNum),
Integer.parseInt(beforeYear), Integer.parseInt(afterYear),
area, geometryJson, geoType, dataUid
);
geoUid,
cdProb,
classBeforeName,
classBeforeProb,
classAfterName,
classAfterProb,
Long.parseLong(mapSheetNum),
Integer.parseInt(beforeYear),
Integer.parseInt(afterYear),
area,
geometryJson,
geoType,
dataUid);
logger.debug("Inserted geometry entity with ID: {} using PostGIS", geoUid);
}