Merge pull request 'learn 테이블 연결 소스 삭제 및 수정' (#155) from feat/dev_251201 into develop

Reviewed-on: https://kamco.gitea.gs.dabeeo.com/dabeeo/kamco-dabeeo-backoffice/pulls/155
This commit is contained in:
2026-01-07 14:37:57 +09:00
20 changed files with 244 additions and 2659 deletions

View File

@@ -66,7 +66,7 @@ public class Enums {
return result; return result;
} }
/** ✅ @CodeExpose + EnumType 인 enum만 스캔해서 Map 구성 */ /** CodeExpose + EnumType 인 enum만 스캔해서 Map 구성 */
private static Map<String, Class<? extends Enum<?>>> scanExposedEnumMap() { private static Map<String, Class<? extends Enum<?>>> scanExposedEnumMap() {
Reflections reflections = new Reflections(BASE_PACKAGE); Reflections reflections = new Reflections(BASE_PACKAGE);

View File

@@ -1,52 +0,0 @@
package com.kamco.cd.kamcoback.geojson.config;
import jakarta.annotation.PostConstruct;
import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
/** GeoJSON 파일 모니터링 설정 */
@Component
@ConfigurationProperties(prefix = "geojson.monitor")
@Getter
@Setter
public class GeoJsonMonitorConfig {
/** 모니터링할 폴더 경로 */
private String watchDirectory = "~/geojson/upload";
/** 처리 완료 후 파일을 이동할 폴더 경로 */
private String processedDirectory = "~/geojson/processed";
/** 처리 실패 파일을 이동할 폴더 경로 */
private String errorDirectory = "~/geojson/error";
/** 파일 모니터링 스케줄 (cron 표현식) 기본값: 매 1분마다 실행 */
private String cronExpression = "0 * * * * *";
/** 지원하는 압축파일 확장자 */
private String[] supportedExtensions = {"zip", "tar", "tar.gz", "tgz"};
/** 처리할 최대 파일 크기 (바이트) */
private long maxFileSize = 100 * 1024 * 1024; // 100MB
/** 임시 압축해제 폴더 */
private String tempDirectory = "/tmp/geojson_extract";
/** 홈 디렉토리 경로 확장 */
@PostConstruct
public void expandPaths() {
watchDirectory = expandPath(watchDirectory);
processedDirectory = expandPath(processedDirectory);
errorDirectory = expandPath(errorDirectory);
tempDirectory = expandPath(tempDirectory);
}
private String expandPath(String path) {
if (path.startsWith("~")) {
return path.replace("~", System.getProperty("user.home"));
}
return path;
}
}

View File

@@ -1,198 +0,0 @@
package com.kamco.cd.kamcoback.geojson.controller;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.PageRequest;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestParam;
/** GeoJSON 데이터 조회 및 테스트용 API 컨트롤러 */
@Slf4j
@RequiredArgsConstructor
public class GeoJsonDataController {
private final MapSheetLearnDataRepository mapSheetLearnDataRepository;
private final MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
/** 학습 데이터 목록 조회 */
@GetMapping("/learn-data")
public ResponseEntity<Map<String, Object>> getLearnDataList(
@RequestParam(defaultValue = "0") int page,
@RequestParam(defaultValue = "10") int size,
@RequestParam(required = false) String dataState,
@RequestParam(required = false) String analState) {
try {
PageRequest pageRequest = PageRequest.of(page, size);
List<MapSheetLearnDataEntity> learnDataList;
if (dataState != null) {
learnDataList = mapSheetLearnDataRepository.findByDataState(dataState);
} else if (analState != null) {
learnDataList = mapSheetLearnDataRepository.findByAnalState(analState);
} else {
learnDataList = mapSheetLearnDataRepository.findAll(pageRequest).getContent();
}
Map<String, Object> response = new HashMap<>();
response.put("data", learnDataList);
response.put("totalCount", learnDataList.size());
response.put("page", page);
response.put("size", size);
return ResponseEntity.ok(response);
} catch (Exception e) {
log.error("학습 데이터 목록 조회 실패", e);
return ResponseEntity.internalServerError()
.body(Map.of("error", "데이터 조회 실패: " + e.getMessage()));
}
}
/** 특정 학습 데이터 상세 조회 */
@GetMapping("/learn-data/{id}")
public ResponseEntity<Map<String, Object>> getLearnDataDetail(@PathVariable Long id) {
try {
if (id == null) {
return ResponseEntity.badRequest().body(Map.of("error", "ID가 필요합니다."));
}
Optional<MapSheetLearnDataEntity> learnDataOpt = mapSheetLearnDataRepository.findById(id);
if (learnDataOpt.isEmpty()) {
return ResponseEntity.notFound().build();
}
MapSheetLearnDataEntity learnData = learnDataOpt.get();
List<MapSheetLearnDataGeomEntity> geometryList =
mapSheetLearnDataGeomRepository.findByDataUid(id);
Map<String, Object> response = new HashMap<>();
response.put("learnData", learnData);
response.put("geometryData", geometryList);
response.put("geometryCount", geometryList.size());
return ResponseEntity.ok(response);
} catch (Exception e) {
log.error("학습 데이터 상세 조회 실패: {}", id, e);
return ResponseEntity.internalServerError()
.body(Map.of("error", "데이터 조회 실패: " + e.getMessage()));
}
}
/** Geometry 데이터 목록 조회 */
@GetMapping("/geometry")
public ResponseEntity<Map<String, Object>> getGeometryDataList(
@RequestParam(defaultValue = "0") int page,
@RequestParam(defaultValue = "10") int size,
@RequestParam(required = false) Long dataUid,
@RequestParam(required = false) String geoType) {
try {
List<MapSheetLearnDataGeomEntity> geometryList;
if (dataUid != null) {
geometryList = mapSheetLearnDataGeomRepository.findByDataUid(dataUid);
} else if (geoType != null) {
geometryList = mapSheetLearnDataGeomRepository.findByGeoType(geoType);
} else {
PageRequest pageRequest = PageRequest.of(page, size);
geometryList = mapSheetLearnDataGeomRepository.findAll(pageRequest).getContent();
}
Map<String, Object> response = new HashMap<>();
response.put("data", geometryList);
response.put("totalCount", geometryList.size());
response.put("page", page);
response.put("size", size);
return ResponseEntity.ok(response);
} catch (Exception e) {
log.error("Geometry 데이터 목록 조회 실패", e);
return ResponseEntity.internalServerError()
.body(Map.of("error", "데이터 조회 실패: " + e.getMessage()));
}
}
/** 시스템 통계 정보 조회 */
@GetMapping("/statistics")
public ResponseEntity<Map<String, Object>> getStatistics() {
try {
long totalLearnData = mapSheetLearnDataRepository.count();
long totalGeometryData = mapSheetLearnDataGeomRepository.count();
List<MapSheetLearnDataEntity> processedData =
mapSheetLearnDataRepository.findByDataState("PROCESSED");
List<MapSheetLearnDataEntity> pendingAnalysis =
mapSheetLearnDataRepository.findByAnalState("PENDING");
List<MapSheetLearnDataEntity> completedAnalysis =
mapSheetLearnDataRepository.findByAnalState("COMPLETED");
List<MapSheetLearnDataEntity> errorAnalysis =
mapSheetLearnDataRepository.findByAnalState("ERROR");
Map<String, Object> statistics = new HashMap<>();
statistics.put("totalLearnData", totalLearnData);
statistics.put("totalGeometryData", totalGeometryData);
statistics.put("processedDataCount", processedData.size());
statistics.put("pendingAnalysisCount", pendingAnalysis.size());
statistics.put("completedAnalysisCount", completedAnalysis.size());
statistics.put("errorAnalysisCount", errorAnalysis.size());
// 처리 완료율 계산
if (totalLearnData > 0) {
double completionRate = (double) completedAnalysis.size() / totalLearnData * 100;
statistics.put("completionRate", Math.round(completionRate * 100.0) / 100.0);
} else {
statistics.put("completionRate", 0.0);
}
return ResponseEntity.ok(
Map.of("statistics", statistics, "timestamp", java.time.Instant.now()));
} catch (Exception e) {
log.error("통계 정보 조회 실패", e);
return ResponseEntity.internalServerError()
.body(Map.of("error", "통계 조회 실패: " + e.getMessage()));
}
}
/** 데이터 상태별 카운트 조회 */
@GetMapping("/status-counts")
public ResponseEntity<Map<String, Object>> getStatusCounts() {
try {
Map<String, Long> dataStateCounts = new HashMap<>();
Map<String, Long> analStateCounts = new HashMap<>();
// 데이터 상태별 카운트
dataStateCounts.put(
"PROCESSED", mapSheetLearnDataRepository.findByDataState("PROCESSED").size() + 0L);
dataStateCounts.put(
"PENDING",
mapSheetLearnDataRepository.findByDataStateIsNullOrDataState("PENDING").size() + 0L);
// 분석 상태별 카운트
analStateCounts.put(
"PENDING", mapSheetLearnDataRepository.findByAnalState("PENDING").size() + 0L);
analStateCounts.put(
"COMPLETED", mapSheetLearnDataRepository.findByAnalState("COMPLETED").size() + 0L);
analStateCounts.put(
"ERROR", mapSheetLearnDataRepository.findByAnalState("ERROR").size() + 0L);
return ResponseEntity.ok(
Map.of(
"dataStateCounts", dataStateCounts,
"analStateCounts", analStateCounts,
"timestamp", java.time.Instant.now()));
} catch (Exception e) {
log.error("상태별 카운트 조회 실패", e);
return ResponseEntity.internalServerError()
.body(Map.of("error", "카운트 조회 실패: " + e.getMessage()));
}
}
}

View File

@@ -1,135 +0,0 @@
package com.kamco.cd.kamcoback.geojson.controller;
import com.kamco.cd.kamcoback.geojson.service.GeoJsonFileMonitorService;
import com.kamco.cd.kamcoback.geojson.service.GeometryConversionService;
import java.util.List;
import java.util.Map;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestParam;
/** GeoJSON 파일 모니터링 및 처리 API 컨트롤러 */
@Slf4j
@RequiredArgsConstructor
public class GeoJsonMonitorController {
private final GeoJsonFileMonitorService monitorService;
private final GeometryConversionService geometryConversionService;
/** 모니터링 상태 조회 */
@GetMapping("/monitor/status")
public Map<String, Object> getMonitorStatus() {
return monitorService.getMonitorStatus();
}
/** 시스템 통계 정보 조회 */
@GetMapping("/monitor/stats")
public ResponseEntity<Map<String, Object>> getSystemStats() {
try {
Map<String, Object> stats = monitorService.getSystemStats();
return ResponseEntity.ok(stats);
} catch (Exception e) {
log.error("시스템 통계 조회 실패", e);
return ResponseEntity.internalServerError()
.body(Map.of("error", "시스템 통계 조회 실패: " + e.getMessage(), "status", "error"));
}
}
/** 디렉토리 초기화 (수동 실행) */
@PostMapping("/monitor/init-directories")
public ResponseEntity<Map<String, Object>> initializeDirectories() {
try {
log.info("디렉토리 초기화 수동 실행 요청");
monitorService.initializeDirectoriesManually();
return ResponseEntity.ok(
Map.of(
"message", "디렉토리 초기화가 완료되었습니다.",
"status", "success"));
} catch (Exception e) {
log.error("디렉토리 초기화 실패", e);
return ResponseEntity.internalServerError()
.body(Map.of("error", "디렉토리 초기화 실패: " + e.getMessage(), "status", "error"));
}
}
/** 수동으로 특정 파일 처리 */
@PostMapping("/process/file")
public ResponseEntity<Map<String, Object>> processFileManually(@RequestParam String filePath) {
try {
log.info("수동 파일 처리 요청: {}", filePath);
monitorService.processFileManually(filePath);
return ResponseEntity.ok(
Map.of(
"message", "파일 처리가 완료되었습니다.",
"filePath", filePath,
"status", "success"));
} catch (Exception e) {
log.error("수동 파일 처리 실패: {}", filePath, e);
return ResponseEntity.internalServerError()
.body(
Map.of(
"error", "파일 처리 실패: " + e.getMessage(), "filePath", filePath, "status", "error"));
}
}
/** 미처리된 Geometry 데이터 수동 변환 */
@PostMapping("/process/geometry")
public ResponseEntity<Map<String, Object>> processUnprocessedGeometry() {
try {
log.info("미처리 Geometry 변환 수동 실행 요청");
List<Long> processedIds = geometryConversionService.processUnprocessedLearnData();
return ResponseEntity.ok(
Map.of(
"message",
"Geometry 변환이 완료되었습니다.",
"processedCount",
processedIds.size(),
"processedIds",
processedIds,
"status",
"success"));
} catch (Exception e) {
log.error("Geometry 변환 실패", e);
return ResponseEntity.internalServerError()
.body(Map.of("error", "Geometry 변환 실패: " + e.getMessage(), "status", "error"));
}
}
/** 특정 학습 데이터의 Geometry 변환 */
@PostMapping("/process/geometry/convert")
public ResponseEntity<Map<String, Object>> convertSpecificGeometry(
@RequestBody List<Long> learnDataIds) {
try {
if (learnDataIds == null || learnDataIds.isEmpty()) {
return ResponseEntity.badRequest().body(Map.of("error", "변환할 학습 데이터 ID가 없습니다."));
}
log.info("특정 학습 데이터 Geometry 변환 요청: {}", learnDataIds);
List<Long> geometryIds = geometryConversionService.convertToGeometryData(learnDataIds);
return ResponseEntity.ok(
Map.of(
"message",
"Geometry 변환이 완료되었습니다.",
"inputCount",
learnDataIds.size(),
"outputCount",
geometryIds.size(),
"geometryIds",
geometryIds,
"status",
"success"));
} catch (Exception e) {
log.error("특정 Geometry 변환 실패: {}", learnDataIds, e);
return ResponseEntity.internalServerError()
.body(Map.of("error", "Geometry 변환 실패: " + e.getMessage(), "status", "error"));
}
}
}

View File

@@ -1,155 +0,0 @@
package com.kamco.cd.kamcoback.geojson.service;
import com.kamco.cd.kamcoback.geojson.config.GeoJsonMonitorConfig;
import java.io.*;
import java.nio.file.*;
import java.util.*;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipFile;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.springframework.stereotype.Service;
/** 압축파일 처리 서비스 */
@Slf4j
@Service
@RequiredArgsConstructor
public class ArchiveExtractorService {
private final GeoJsonMonitorConfig config;
/** 압축파일에서 GeoJSON 파일들을 추출 */
public Map<String, String> extractGeoJsonFiles(Path archiveFile) throws IOException {
Map<String, String> geoJsonContents = new HashMap<>();
String fileName = archiveFile.getFileName().toString().toLowerCase();
log.info("압축파일 추출 시작: {}", archiveFile);
try {
if (fileName.endsWith(".zip")) {
extractFromZip(archiveFile, geoJsonContents);
} else if (fileName.endsWith(".tar")
|| fileName.endsWith(".tar.gz")
|| fileName.endsWith(".tgz")) {
extractFromTar(archiveFile, geoJsonContents);
} else {
throw new IllegalArgumentException("지원하지 않는 압축파일 형식: " + fileName);
}
} catch (Exception e) {
log.error("압축파일 추출 실패: {}", archiveFile, e);
throw e;
}
log.info("압축파일에서 {}개의 GeoJSON 파일을 추출했습니다: {}", geoJsonContents.size(), archiveFile);
return geoJsonContents;
}
/** ZIP 파일에서 GeoJSON 추출 */
private void extractFromZip(Path zipFile, Map<String, String> geoJsonContents)
throws IOException {
try (ZipFile zip = new ZipFile(zipFile.toFile())) {
Enumeration<ZipArchiveEntry> entries = zip.getEntries();
while (entries.hasMoreElements()) {
ZipArchiveEntry entry = entries.nextElement();
if (!entry.isDirectory() && isGeoJsonFile(entry.getName())) {
try (InputStream inputStream = zip.getInputStream(entry)) {
String content = readInputStream(inputStream);
geoJsonContents.put(entry.getName(), content);
log.debug("ZIP에서 추출: {}", entry.getName());
}
}
}
}
}
/** TAR 파일에서 GeoJSON 추출 */
private void extractFromTar(Path tarFile, Map<String, String> geoJsonContents)
throws IOException {
String fileName = tarFile.getFileName().toString().toLowerCase();
InputStream fileInputStream = Files.newInputStream(tarFile);
try {
// GZIP 압축된 TAR 파일인지 확인
if (fileName.endsWith(".gz") || fileName.endsWith(".tgz")) {
fileInputStream = new GzipCompressorInputStream(fileInputStream);
}
try (TarArchiveInputStream tarInputStream = new TarArchiveInputStream(fileInputStream)) {
ArchiveEntry entry;
while ((entry = tarInputStream.getNextEntry()) != null) {
if (!entry.isDirectory() && isGeoJsonFile(entry.getName())) {
String content = readInputStream(tarInputStream);
geoJsonContents.put(entry.getName(), content);
log.debug("TAR에서 추출: {}", entry.getName());
}
}
}
} finally {
try {
fileInputStream.close();
} catch (IOException e) {
log.warn("파일 스트림 종료 실패", e);
}
}
}
/** InputStream에서 문자열 읽기 */
private String readInputStream(InputStream inputStream) throws IOException {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"))) {
StringBuilder content = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
content.append(line).append("\n");
}
return content.toString();
}
}
/** 파일이 GeoJSON 파일인지 확인 */
private boolean isGeoJsonFile(String fileName) {
String lowerFileName = fileName.toLowerCase();
return lowerFileName.endsWith(".geojson") || lowerFileName.endsWith(".json");
}
/** 지원하는 압축파일인지 확인 */
public boolean isSupportedArchive(Path file) {
String fileName = file.getFileName().toString().toLowerCase();
for (String extension : config.getSupportedExtensions()) {
if (fileName.endsWith("." + extension)) {
return true;
}
}
return false;
}
/** 파일 크기가 제한 범위 내인지 확인 */
public boolean isFileSizeValid(Path file) {
try {
long fileSize = Files.size(file);
boolean isValid = fileSize <= config.getMaxFileSize();
if (!isValid) {
log.warn(
"파일 크기가 제한을 초과했습니다: {} ({}MB > {}MB)",
file,
fileSize / 1024 / 1024,
config.getMaxFileSize() / 1024 / 1024);
}
return isValid;
} catch (IOException e) {
log.error("파일 크기 확인 실패: {}", file, e);
return false;
}
}
}

View File

@@ -1,483 +0,0 @@
package com.kamco.cd.kamcoback.geojson.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.time.ZonedDateTime;
import java.util.*;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.geojson.GeoJsonReader;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/** GeoJSON 데이터 처리 서비스 */
@Slf4j
@Service
@RequiredArgsConstructor
public class GeoJsonDataService {
private final MapSheetLearnDataRepository mapSheetLearnDataRepository;
private final MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
private final ObjectMapper objectMapper;
private final GeoJsonReader geoJsonReader = new GeoJsonReader();
/** GeoJSON 파일들을 데이터베이스에 저장 */
public List<Long> processGeoJsonFiles(
Map<String, String> geoJsonContents, String archiveFileName) {
List<Long> savedIds = new ArrayList<>();
log.info("GeoJSON 파일 처리 시작: {} ({}개 파일)", archiveFileName, geoJsonContents.size());
// 개별 파일별로 독립적인 트랜잭션으로 처리
for (Map.Entry<String, String> entry : geoJsonContents.entrySet()) {
String fileName = entry.getKey();
String geoJsonContent = entry.getValue();
try {
Long savedId = processGeoJsonFileWithTransaction(fileName, geoJsonContent, archiveFileName);
if (savedId != null) {
savedIds.add(savedId);
log.debug("GeoJSON 파일 저장 성공: {} (ID: {})", fileName, savedId);
}
} catch (Exception e) {
log.error("GeoJSON 파일 처리 실패: {}", fileName, e);
// 개별 파일 처리 실패는 전체 처리를 중단시키지 않음
}
// 메모리 정리
System.gc();
}
log.info(
"GeoJSON 파일 처리 완료: {} (성공: {}개, 전체: {}개)",
archiveFileName,
savedIds.size(),
geoJsonContents.size());
return savedIds;
}
/** 개별 파일을 별도 트랜잭션으로 처리 */
@Transactional
public Long processGeoJsonFileWithTransaction(
String fileName, String geoJsonContent, String archiveFileName) {
try {
Long savedId = processGeoJsonFile(fileName, geoJsonContent, archiveFileName);
if (savedId != null && isLearningModelResult(fileName, geoJsonContent)) {
processLearningModelGeometryOptimized(savedId, geoJsonContent, fileName);
}
return savedId;
} catch (Exception e) {
log.error("파일 처리 중 트랜잭션 에러: {}", fileName, e);
throw e; // 트랜잭션 롤백을 위해 재throw
}
}
/** 개별 GeoJSON 파일을 MapSheetLearnDataEntity로 변환하여 저장 */
private Long processGeoJsonFile(String fileName, String geoJsonContent, String archiveFileName) {
try {
// GeoJSON 파싱 및 검증
JsonNode geoJsonNode = objectMapper.readTree(geoJsonContent);
validateGeoJsonStructure(geoJsonNode);
// 파일이 이미 처리되었는지 확인
String dataPath = generateDataPath(archiveFileName, fileName);
Optional<MapSheetLearnDataEntity> existingData =
mapSheetLearnDataRepository.findByDataPath(dataPath);
if (existingData.isPresent()) {
log.warn("이미 처리된 파일입니다: {}", dataPath);
return existingData.get().getId();
}
// 새 엔티티 생성 및 저장
MapSheetLearnDataEntity entity =
createMapSheetLearnDataEntity(fileName, geoJsonContent, archiveFileName, geoJsonNode);
MapSheetLearnDataEntity savedEntity = mapSheetLearnDataRepository.save(entity);
return savedEntity.getId();
} catch (Exception e) {
log.error("GeoJSON 파일 처리 중 오류 발생: {}", fileName, e);
throw new RuntimeException("GeoJSON 파일 처리 실패: " + fileName, e);
}
}
/** GeoJSON 구조 검증 */
private void validateGeoJsonStructure(JsonNode geoJsonNode) {
if (!geoJsonNode.has("type")) {
throw new IllegalArgumentException("유효하지 않은 GeoJSON: 'type' 필드가 없습니다.");
}
String type = geoJsonNode.get("type").asText();
if (!"FeatureCollection".equals(type) && !"Feature".equals(type) && !"Geometry".equals(type)) {
throw new IllegalArgumentException("지원하지 않는 GeoJSON type: " + type);
}
}
/** MapSheetLearnDataEntity 생성 */
private MapSheetLearnDataEntity createMapSheetLearnDataEntity(
String fileName, String geoJsonContent, String archiveFileName, JsonNode geoJsonNode) {
MapSheetLearnDataEntity entity = new MapSheetLearnDataEntity();
// 기본 정보 설정
entity.setDataName(fileName);
entity.setDataPath(generateDataPath(archiveFileName, fileName));
entity.setDataType("GeoJSON");
entity.setDataTitle(extractTitle(fileName, geoJsonNode));
// CRS 정보 추출 및 설정
setCrsInformation(entity, geoJsonNode);
// JSON 데이터 저장
try {
@SuppressWarnings("unchecked")
Map<String, Object> jsonMap = objectMapper.readValue(geoJsonContent, Map.class);
entity.setDataJson(jsonMap);
} catch (Exception e) {
log.warn("JSON 파싱 실패, 원본 텍스트로 저장: {}", fileName, e);
// JSON 파싱이 실패하면 원본을 Map 형태로 저장
Map<String, Object> fallbackMap = new HashMap<>();
fallbackMap.put("raw_content", geoJsonContent);
fallbackMap.put("parse_error", e.getMessage());
entity.setDataJson(fallbackMap);
}
// 연도 정보 추출 (파일명에서 추출 시도)
setYearInformation(entity, fileName);
// 상태 정보 설정
entity.setDataState("PROCESSED");
entity.setAnalState("PENDING");
// 시간 정보 설정
ZonedDateTime now = ZonedDateTime.now();
entity.setCreatedDttm(now);
entity.setUpdatedDttm(now);
entity.setDataStateDttm(now);
return entity;
}
/** CRS 정보 설정 */
private void setCrsInformation(MapSheetLearnDataEntity entity, JsonNode geoJsonNode) {
if (geoJsonNode.has("crs")) {
JsonNode crsNode = geoJsonNode.get("crs");
if (crsNode.has("type") && crsNode.has("properties")) {
String crsType = crsNode.get("type").asText();
entity.setDataCrsType(crsType);
JsonNode propertiesNode = crsNode.get("properties");
if (propertiesNode.has("name")) {
String crsName = propertiesNode.get("name").asText();
entity.setDataCrsTypeName(crsName);
}
}
} else {
// CRS가 명시되지 않은 경우 기본값 설정 (WGS84)
entity.setDataCrsType("EPSG");
entity.setDataCrsTypeName("EPSG:4326");
}
}
/** 연도 정보 추출 */
private void setYearInformation(MapSheetLearnDataEntity entity, String fileName) {
// 학습 모델 결과 파일인지 확인하고 특별 처리
if (fileName.matches(".*캠코_\\d{4}_\\d{4}_\\d+.*")) {
String[] parts = fileName.split("_");
if (parts.length >= 4) {
String beforeYear = parts[1];
String afterYear = parts[2];
// 비교년도 정보를 첫 번째 연도의 정수로 저장
try {
entity.setCompareYyyy(Integer.parseInt(beforeYear));
log.debug("학습 모델 연도 정보 설정: {}", beforeYear);
} catch (NumberFormatException e) {
log.warn("연도 파싱 실패: {}", beforeYear, e);
}
return;
}
}
// 기존 로직: 파일명에서 연도 추출 시도
String[] parts = fileName.split("_");
for (String part : parts) {
if (part.matches("\\d{4}")) { // 4자리 숫자 (연도)
try {
Integer year = Integer.parseInt(part);
if (year >= 1900 && year <= 2100) {
if (entity.getDataYyyy() == null) {
entity.setDataYyyy(year);
} else {
entity.setCompareYyyy(year);
break;
}
}
} catch (NumberFormatException ignored) {
// 무시
}
}
}
}
/** 제목 추출 */
private String extractTitle(String fileName, JsonNode geoJsonNode) {
// GeoJSON 메타데이터에서 제목 추출 시도
if (geoJsonNode.has("properties")) {
JsonNode properties = geoJsonNode.get("properties");
if (properties.has("title")) {
return properties.get("title").asText();
}
if (properties.has("name")) {
return properties.get("name").asText();
}
}
// 파일명에서 확장자 제거하여 제목으로 사용
int lastDotIndex = fileName.lastIndexOf('.');
if (lastDotIndex > 0) {
return fileName.substring(0, lastDotIndex);
}
return fileName;
}
/** 데이터 경로 생성 */
private String generateDataPath(String archiveFileName, String fileName) {
return archiveFileName + "/" + fileName;
}
/** 처리 가능한 파일 개수 확인 */
public boolean isProcessable(Map<String, String> geoJsonContents) {
if (geoJsonContents == null || geoJsonContents.isEmpty()) {
return false;
}
// 최대 처리 가능한 파일 수 제한 (성능 고려)
int maxFiles = 50;
if (geoJsonContents.size() > maxFiles) {
log.warn("처리 가능한 최대 파일 수를 초과했습니다: {} > {}", geoJsonContents.size(), maxFiles);
return false;
}
return true;
}
/** 학습 모델 결과 파일인지 확인 */
private boolean isLearningModelResult(String fileName, String geoJsonContent) {
try {
// 파일명으로 확인 (캠코_YYYY_YYYY_번호 패턴)
if (fileName.matches(".*캠코_\\d{4}_\\d{4}_\\d+.*\\.geojson")) {
return true;
}
// GeoJSON 내용으로 확인 (학습 모델 특화 필드 존재 여부)
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
if (rootNode.has("features")) {
JsonNode features = rootNode.get("features");
if (features.isArray() && features.size() > 0) {
JsonNode firstFeature = features.get(0);
if (firstFeature.has("properties")) {
JsonNode properties = firstFeature.get("properties");
// 학습 모델 특화 필드 확인
return properties.has("cd_prob")
|| properties.has("class")
|| (properties.has("before") && properties.has("after"));
}
}
}
} catch (Exception e) {
log.debug("학습 모델 결과 파일 확인 중 오류: {}", fileName, e);
}
return false;
}
/** 학습 모델 결과의 geometry 데이터 처리 - 최적화된 배치 처리 */
public void processLearningModelGeometryOptimized(
Long dataUid, String geoJsonContent, String fileName) {
try {
log.info("학습 모델 geometry 데이터 처리 시작: {} (dataUid: {})", fileName, dataUid);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// 메타데이터 추출
String mapSheetName =
rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// 파일명에서 연도 및 지도번호 추출 (캠코_2021_2022_35813023)
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1];
afterYear = parts[2];
mapSheetNum = parts[3];
}
if (beforeYear == null || afterYear == null) {
log.warn("연도 정보를 추출할 수 없습니다: {}", fileName);
return;
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
log.warn("features 배열이 없습니다: {}", fileName);
return;
}
// 소규모 배치로 나누어 처리
int totalFeatures = features.size();
int batchSize = 10; // 작은 배치 크기
int processedCount = 0;
log.info("총 {}개 feature를 {}개씩 배치로 나누어 처리", totalFeatures, batchSize);
for (int i = 0; i < totalFeatures; i += batchSize) {
int endIndex = Math.min(i + batchSize, totalFeatures);
List<MapSheetLearnDataGeomEntity> batch = new ArrayList<>();
for (int j = i; j < endIndex; j++) {
try {
JsonNode feature = features.get(j);
MapSheetLearnDataGeomEntity geomEntity =
createGeometryEntity(feature, dataUid, beforeYear, afterYear, mapSheetNum);
if (geomEntity != null) {
batch.add(geomEntity);
}
} catch (Exception e) {
log.warn("Feature geometry 처리 실패 (feature {}): {}", j + 1, e.getMessage());
}
}
// 배치별 저장
if (!batch.isEmpty()) {
saveBatchGeometry(batch);
processedCount += batch.size();
log.debug("배치 {}-{} 처리 완료 ({}개)", i + 1, endIndex, batch.size());
}
// 메모리 정리
batch.clear();
}
log.info("학습 모델 geometry 데이터 저장 완료: {} ({}개 feature)", fileName, processedCount);
} catch (Exception e) {
log.error("학습 모델 geometry 데이터 처리 실패: {}", fileName, e);
throw new RuntimeException("Geometry 처리 실패: " + fileName, e);
}
}
/** 배치별 geometry 저장 - 별도 트랜잭션 */
@Transactional(propagation = org.springframework.transaction.annotation.Propagation.REQUIRES_NEW)
public void saveBatchGeometry(List<MapSheetLearnDataGeomEntity> batch) {
try {
if (batch == null || batch.isEmpty()) {
return;
}
mapSheetLearnDataGeomRepository.saveAll(batch);
} catch (Exception e) {
log.error("배치 geometry 저장 실패: {}", e.getMessage());
// 개별 저장 시도
for (MapSheetLearnDataGeomEntity entity : batch) {
try {
if (entity != null) {
mapSheetLearnDataGeomRepository.save(entity);
}
} catch (Exception individualError) {
log.warn("개별 geometry 저장 실패: {}", individualError.getMessage());
}
}
}
}
/** 개별 feature에서 geometry entity 생성 */
private MapSheetLearnDataGeomEntity createGeometryEntity(
JsonNode feature, Long dataUid, String beforeYear, String afterYear, String mapSheetNum) {
JsonNode properties = feature.get("properties");
JsonNode geometry = feature.get("geometry");
if (properties == null || geometry == null) {
return null;
}
MapSheetLearnDataGeomEntity entity = new MapSheetLearnDataGeomEntity();
// 기본 정보
entity.setDataUid(dataUid);
entity.setBeforeYyyy(Integer.parseInt(beforeYear));
entity.setAfterYyyy(Integer.parseInt(afterYear));
if (mapSheetNum != null) {
try {
entity.setMapSheetNum(Long.parseLong(mapSheetNum));
} catch (NumberFormatException e) {
log.warn("지도번호 파싱 실패: {}", mapSheetNum, e);
}
}
// 변화 탐지 확률
if (properties.has("cd_prob")) {
entity.setCdProb(properties.get("cd_prob").asDouble());
}
// 면적 정보
if (properties.has("area")) {
entity.setArea(properties.get("area").asDouble());
}
// 분류 정보 처리
if (properties.has("class")) {
JsonNode classNode = properties.get("class");
// before 분류
if (classNode.has("before") && classNode.get("before").isArray()) {
JsonNode beforeArray = classNode.get("before");
if (beforeArray.size() > 0) {
JsonNode firstBefore = beforeArray.get(0);
if (firstBefore.has("class_name")) {
entity.setClassBeforeName(firstBefore.get("class_name").asText());
}
if (firstBefore.has("probability")) {
entity.setClassBeforeProb(firstBefore.get("probability").asDouble());
}
}
}
// after 분류
if (classNode.has("after") && classNode.get("after").isArray()) {
JsonNode afterArray = classNode.get("after");
if (afterArray.size() > 0) {
JsonNode firstAfter = afterArray.get(0);
if (firstAfter.has("class_name")) {
entity.setClassAfterName(firstAfter.get("class_name").asText());
}
if (firstAfter.has("probability")) {
entity.setClassAfterProb(firstAfter.get("probability").asDouble());
}
}
}
}
// geometry 변환
try {
Geometry geom = geoJsonReader.read(geometry.toString());
if (geom != null) {
geom.setSRID(5186); // EPSG:5186
entity.setGeom(geom);
}
} catch (Exception e) {
log.warn("Geometry 파싱 실패: {}", e.getMessage());
return null;
}
return entity;
}
}

View File

@@ -1,406 +0,0 @@
package com.kamco.cd.kamcoback.geojson.service;
import com.kamco.cd.kamcoback.geojson.config.GeoJsonMonitorConfig;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import jakarta.annotation.PostConstruct;
import java.io.IOException;
import java.nio.file.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
/** GeoJSON 파일 모니터링 서비스 지정된 폴더를 주기적으로 모니터링하여 압축파일을 자동으로 처리합니다. */
@Slf4j
@Service
@RequiredArgsConstructor
public class GeoJsonFileMonitorService {
private final GeoJsonMonitorConfig config;
private final ArchiveExtractorService archiveExtractorService;
private final GeoJsonDataService geoJsonDataService;
private final GeometryConversionService geometryConversionService;
private final MapSheetLearnDataRepository learnDataRepository;
private final MapSheetLearnDataGeomRepository geomRepository;
/** 애플리케이션 시작 시 필요한 디렉토리들을 미리 생성 */
@PostConstruct
public void initializeDirectories() {
try {
log.info("GeoJSON 모니터링 시스템 초기화 중...");
log.info(
"설정된 경로 - Watch: {}, Processed: {}, Error: {}, Temp: {}",
config.getWatchDirectory(),
config.getProcessedDirectory(),
config.getErrorDirectory(),
config.getTempDirectory());
ensureDirectoriesExist();
log.info("GeoJSON 모니터링 시스템 초기화 완료");
} catch (Exception e) {
log.warn("GeoJSON 모니터링 시스템 초기화 실패 - 스케줄러 실행 시 재시도됩니다", e);
// 초기화 실패해도 애플리케이션은 시작되도록 함 (RuntimeException 던지지 않음)
}
}
/** 스케줄러를 통한 파일 모니터링 설정된 cron 표현식에 따라 주기적으로 실행 */
// @Scheduled(cron = "#{@geoJsonMonitorConfig.cronExpression}")
public void monitorFiles() {
log.debug("파일 모니터링 시작");
try {
// 모니터링 폴더 존재 확인 및 생성
ensureDirectoriesExist();
// 압축파일 검색 및 처리
processArchiveFiles();
// 미처리된 Geometry 변환 작업 수행
processUnprocessedGeometryData();
} catch (RuntimeException e) {
log.error("파일 모니터링 중 치명적 오류 발생 - 이번 주기 건너뜀", e);
} catch (Exception e) {
log.error("파일 모니터링 중 오류 발생", e);
}
log.debug("파일 모니터링 완료");
}
/** 필요한 디렉토리들이 존재하는지 확인하고 생성 */
private void ensureDirectoriesExist() {
boolean hasError = false;
try {
createDirectoryIfNotExists(config.getWatchDirectory());
} catch (IOException e) {
log.error("Watch 디렉토리 생성 실패: {} - {}", config.getWatchDirectory(), e.getMessage());
hasError = true;
}
try {
createDirectoryIfNotExists(config.getProcessedDirectory());
} catch (IOException e) {
log.error("Processed 디렉토리 생성 실패: {} - {}", config.getProcessedDirectory(), e.getMessage());
hasError = true;
}
try {
createDirectoryIfNotExists(config.getErrorDirectory());
} catch (IOException e) {
log.error("Error 디렉토리 생성 실패: {} - {}", config.getErrorDirectory(), e.getMessage());
hasError = true;
}
try {
createDirectoryIfNotExists(config.getTempDirectory());
} catch (IOException e) {
log.error("Temp 디렉토리 생성 실패: {} - {}", config.getTempDirectory(), e.getMessage());
hasError = true;
}
if (hasError) {
log.warn("일부 디렉토리 생성에 실패했습니다. 해당 기능은 제한될 수 있습니다.");
log.info("수동으로 다음 디렉토리들을 생성해주세요:");
log.info(" - {}", config.getWatchDirectory());
log.info(" - {}", config.getProcessedDirectory());
log.info(" - {}", config.getErrorDirectory());
log.info(" - {}", config.getTempDirectory());
} else {
log.info("모든 필요한 디렉토리가 준비되었습니다.");
}
}
/** 디렉토리가 존재하지 않으면 생성 */
private void createDirectoryIfNotExists(String directory) throws IOException {
if (directory == null || directory.trim().isEmpty()) {
throw new IllegalArgumentException("디렉토리 경로가 비어있습니다.");
}
Path path = Paths.get(directory);
if (!Files.exists(path)) {
try {
Files.createDirectories(path);
log.info("디렉토리 생성 완료: {}", directory);
// 디렉토리 권한 설정 (Unix/Linux 환경에서)
try {
if (!System.getProperty("os.name").toLowerCase().contains("windows")) {
// rwxrwxr-x 권한 설정
java.nio.file.attribute.PosixFilePermissions.asFileAttribute(
java.nio.file.attribute.PosixFilePermissions.fromString("rwxrwxr-x"));
}
} catch (Exception permissionException) {
log.debug("권한 설정 실패 (무시됨): {}", permissionException.getMessage());
}
} catch (IOException e) {
log.error("디렉토리 생성 실패: {} - {}", directory, e.getMessage());
throw new IOException("디렉토리를 생성할 수 없습니다: " + directory, e);
}
} else if (!Files.isDirectory(path)) {
throw new IOException("지정된 경로가 디렉토리가 아닙니다: " + directory);
} else if (!Files.isWritable(path)) {
log.warn("디렉토리에 쓰기 권한이 없습니다: {}", directory);
} else {
log.debug("디렉토리가 이미 존재합니다: {}", directory);
}
}
/** 모니터링 폴더에서 압축파일들을 찾아서 처리 */
private void processArchiveFiles() {
Path watchDir = Paths.get(config.getWatchDirectory());
// 디렉토리 존재 확인
if (!Files.exists(watchDir)) {
log.debug("Watch 디렉토리가 존재하지 않습니다: {}", watchDir);
return;
}
if (!Files.isDirectory(watchDir)) {
log.warn("Watch 경로가 디렉토리가 아닙니다: {}", watchDir);
return;
}
if (!Files.isReadable(watchDir)) {
log.warn("Watch 디렉토리에 읽기 권한이 없습니다: {}", watchDir);
return;
}
try (Stream<Path> files = Files.list(watchDir)) {
files
.filter(Files::isRegularFile)
.filter(archiveExtractorService::isSupportedArchive)
.filter(archiveExtractorService::isFileSizeValid)
.forEach(this::processArchiveFile);
} catch (IOException e) {
log.error("파일 목록 조회 실패: {}", watchDir, e);
}
}
/** 개별 압축파일 처리 */
private void processArchiveFile(Path archiveFile) {
String fileName = archiveFile.getFileName().toString();
log.info("압축파일 처리 시작: {}", fileName);
try {
// 1. 압축파일에서 GeoJSON 파일들 추출
Map<String, String> geoJsonContents =
archiveExtractorService.extractGeoJsonFiles(archiveFile);
if (geoJsonContents.isEmpty()) {
log.warn("압축파일에서 GeoJSON 파일을 찾을 수 없습니다: {}", fileName);
moveFileToError(archiveFile, "GeoJSON 파일 없음");
return;
}
// 2. 처리 가능한 파일 수인지 확인
if (!geoJsonDataService.isProcessable(geoJsonContents)) {
log.warn("처리할 수 없는 파일입니다: {}", fileName);
moveFileToError(archiveFile, "처리 불가능한 파일");
return;
}
// 3. GeoJSON 데이터를 데이터베이스에 저장
List<Long> savedLearnDataIds =
geoJsonDataService.processGeoJsonFiles(geoJsonContents, fileName);
if (savedLearnDataIds.isEmpty()) {
log.warn("저장된 학습 데이터가 없습니다: {}", fileName);
moveFileToError(archiveFile, "데이터 저장 실패");
return;
}
// 4. Geometry 데이터로 변환
List<Long> geometryIds = geometryConversionService.convertToGeometryData(savedLearnDataIds);
// 5. 처리 완료된 파일을 처리된 폴더로 이동
moveFileToProcessed(archiveFile);
log.info(
"압축파일 처리 완료: {} (학습 데이터: {}개, Geometry: {}개)",
fileName,
savedLearnDataIds.size(),
geometryIds.size());
} catch (Exception e) {
log.error("압축파일 처리 실패: {}", fileName, e);
try {
moveFileToError(archiveFile, "처리 중 오류 발생: " + e.getMessage());
} catch (IOException moveError) {
log.error("오류 파일 이동 실패: {}", fileName, moveError);
}
}
}
/** 미처리된 Geometry 변환 작업 수행 */
private void processUnprocessedGeometryData() {
try {
List<Long> processedIds = geometryConversionService.processUnprocessedLearnData();
if (!processedIds.isEmpty()) {
log.info("미처리 Geometry 변환 완료: {}개", processedIds.size());
}
} catch (Exception e) {
log.error("미처리 Geometry 변환 작업 실패", e);
}
}
/** 처리 완료된 파일을 processed 폴더로 이동 */
private void moveFileToProcessed(Path sourceFile) throws IOException {
String fileName = sourceFile.getFileName().toString();
String timestampedFileName = addTimestamp(fileName);
Path targetPath = Paths.get(config.getProcessedDirectory(), timestampedFileName);
Files.move(sourceFile, targetPath, StandardCopyOption.REPLACE_EXISTING);
log.info("파일을 처리된 폴더로 이동: {} -> {}", fileName, timestampedFileName);
}
/** 오류가 발생한 파일을 error 폴더로 이동 */
private void moveFileToError(Path sourceFile, String errorReason) throws IOException {
String fileName = sourceFile.getFileName().toString();
String errorFileName = addTimestamp(fileName) + ".error";
Path targetPath = Paths.get(config.getErrorDirectory(), errorFileName);
Files.move(sourceFile, targetPath, StandardCopyOption.REPLACE_EXISTING);
// 오류 정보를 별도 파일로 저장
String errorInfoFileName = errorFileName + ".info";
Path errorInfoPath = Paths.get(config.getErrorDirectory(), errorInfoFileName);
String errorInfo =
String.format(
"파일: %s%n오류 시간: %s%n오류 원인: %s%n", fileName, java.time.Instant.now(), errorReason);
Files.write(errorInfoPath, errorInfo.getBytes());
log.warn("파일을 오류 폴더로 이동: {} (원인: {})", fileName, errorReason);
}
/** 파일명에 타임스탬프 추가 */
private String addTimestamp(String fileName) {
int lastDotIndex = fileName.lastIndexOf('.');
String name = (lastDotIndex > 0) ? fileName.substring(0, lastDotIndex) : fileName;
String extension = (lastDotIndex > 0) ? fileName.substring(lastDotIndex) : "";
return String.format("%s_%d%s", name, System.currentTimeMillis(), extension);
}
/** 수동으로 특정 파일 처리 (테스트/관리 목적) */
public void processFileManually(String filePath) {
Path archiveFile = Paths.get(filePath);
if (!Files.exists(archiveFile)) {
log.error("파일이 존재하지 않습니다: {}", filePath);
return;
}
if (!archiveExtractorService.isSupportedArchive(archiveFile)) {
log.error("지원하지 않는 압축파일 형식입니다: {}", filePath);
return;
}
log.info("수동 파일 처리 시작: {}", filePath);
processArchiveFile(archiveFile);
}
/** 디렉토리 초기화를 수동으로 실행 (API에서 호출 가능) */
public void initializeDirectoriesManually() {
log.info("디렉토리 수동 초기화 시작");
try {
ensureDirectoriesExist();
log.info("디렉토리 수동 초기화 완료");
} catch (Exception e) {
log.error("디렉토리 수동 초기화 실패", e);
throw new RuntimeException("디렉토리 초기화 실패", e);
}
}
/** 모니터링 상태 정보 반환 */
public Map<String, Object> getMonitorStatus() {
return Map.of(
"watchDirectory", config.getWatchDirectory(),
"processedDirectory", config.getProcessedDirectory(),
"errorDirectory", config.getErrorDirectory(),
"cronExpression", config.getCronExpression(),
"supportedExtensions", config.getSupportedExtensions(),
"maxFileSize", config.getMaxFileSize(),
"maxFileSizeMB", config.getMaxFileSize() / 1024 / 1024);
}
/** 시스템 통계 정보 조회 */
public Map<String, Object> getSystemStats() {
Map<String, Object> stats = new HashMap<>();
try {
// 데이터베이스 통계
long totalLearnData = learnDataRepository.count();
long totalGeomData = geomRepository.count();
long pendingAnalysis = learnDataRepository.countByAnalState("PENDING");
stats.put(
"database",
Map.of(
"totalLearnData", totalLearnData,
"totalGeomData", totalGeomData,
"pendingAnalysis", pendingAnalysis));
// 파일 시스템 통계
stats.put("fileSystem", getFileSystemStats());
// 모니터링 설정
stats.put(
"monitoring",
Map.of(
"isActive", true,
"cronExpression", "0/30 * * * * *",
"watchDirectory", config.getWatchDirectory(),
"processedDirectory", config.getProcessedDirectory(),
"errorDirectory", config.getErrorDirectory()));
} catch (Exception e) {
log.error("통계 정보 조회 실패", e);
stats.put("error", e.getMessage());
}
return stats;
}
/** 파일 시스템 통계 조회 */
private Map<String, Object> getFileSystemStats() {
Map<String, Object> fileStats = new HashMap<>();
try {
// 각 디렉토리의 파일 수 계산
Path watchDir = Paths.get(config.getWatchDirectory());
Path processedDir = Paths.get(config.getProcessedDirectory());
Path errorDir = Paths.get(config.getErrorDirectory());
fileStats.put("watchDirectoryCount", countFilesInDirectory(watchDir));
fileStats.put("processedDirectoryCount", countFilesInDirectory(processedDir));
fileStats.put("errorDirectoryCount", countFilesInDirectory(errorDir));
} catch (Exception e) {
log.warn("파일 시스템 통계 조회 실패: {}", e.getMessage());
fileStats.put("error", e.getMessage());
}
return fileStats;
}
/** 디렉토리 내 파일 개수 계산 */
private long countFilesInDirectory(Path directory) {
if (!Files.exists(directory) || !Files.isDirectory(directory)) {
return 0;
}
try (Stream<Path> files = Files.list(directory)) {
return files.filter(Files::isRegularFile).count();
} catch (IOException e) {
log.warn("디렉토리 파일 계산 실패: {}", directory, e);
return 0;
}
}
}

View File

@@ -1,436 +0,0 @@
package com.kamco.cd.kamcoback.geojson.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.time.ZonedDateTime;
import java.util.*;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.locationtech.jts.geom.*;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/** Geometry 데이터 변환 서비스 */
@Slf4j
@Service
@RequiredArgsConstructor
public class GeometryConversionService {
private final MapSheetLearnDataRepository mapSheetLearnDataRepository;
private final MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
private final ObjectMapper objectMapper;
private final GeometryFactory geometryFactory = new GeometryFactory();
/** MapSheetLearnData의 JSON 데이터를 기반으로 Geometry 테이블에 저장 */
@Transactional
public List<Long> convertToGeometryData(List<Long> learnDataIds) {
List<Long> processedIds = new ArrayList<>();
log.info("Geometry 변환 시작: {} 개의 학습 데이터", learnDataIds.size());
for (Long dataId : learnDataIds) {
try {
if (dataId != null) {
Optional<MapSheetLearnDataEntity> learnDataOpt =
mapSheetLearnDataRepository.findById(dataId);
if (learnDataOpt.isPresent()) {
List<Long> geometryIds = processLearnDataToGeometry(learnDataOpt.get());
processedIds.addAll(geometryIds);
log.debug("학습 데이터 {} 에서 {} 개의 geometry 데이터 생성", dataId, geometryIds.size());
} else {
log.warn("학습 데이터를 찾을 수 없습니다: {}", dataId);
}
}
} catch (Exception e) {
log.error("Geometry 변환 실패 - 학습 데이터 ID: {}", dataId, e);
// 개별 변환 실패는 전체 처리를 중단시키지 않음
}
}
log.info(
"Geometry 변환 완료: {} 개 처리, {} 개의 geometry 생성", learnDataIds.size(), processedIds.size());
return processedIds;
}
/** 개별 학습 데이터를 Geometry 데이터로 변환 */
private List<Long> processLearnDataToGeometry(MapSheetLearnDataEntity learnData) {
List<Long> geometryIds = new ArrayList<>();
try {
// 기존 geometry 데이터 삭제 (재생성)
mapSheetLearnDataGeomRepository.deleteByDataUid(learnData.getId());
// JSON 데이터에서 GeoJSON 추출
Map<String, Object> dataJson = learnData.getDataJson();
if (dataJson == null || dataJson.isEmpty()) {
log.warn("JSON 데이터가 없습니다: {}", learnData.getId());
return geometryIds;
}
// JSON을 GeoJSON으로 파싱
String geoJsonString = objectMapper.writeValueAsString(dataJson);
JsonNode geoJsonNode = objectMapper.readTree(geoJsonString);
// GeoJSON 타입에 따라 처리
String type = geoJsonNode.get("type").asText();
switch (type) {
case "FeatureCollection":
geometryIds.addAll(processFeatureCollection(geoJsonNode, learnData));
break;
case "Feature":
Long geometryId = processFeature(geoJsonNode, learnData);
if (geometryId != null) {
geometryIds.add(geometryId);
}
break;
case "Point":
case "LineString":
case "Polygon":
case "MultiPoint":
case "MultiLineString":
case "MultiPolygon":
Long directGeometryId = processDirectGeometry(geoJsonNode, learnData);
if (directGeometryId != null) {
geometryIds.add(directGeometryId);
}
break;
default:
log.warn("지원하지 않는 GeoJSON type: {} (데이터 ID: {})", type, learnData.getId());
}
} catch (Exception e) {
log.error("Geometry 변환 실패: 학습 데이터 ID {}", learnData.getId(), e);
throw new RuntimeException("Geometry 변환 실패", e);
}
return geometryIds;
}
/** FeatureCollection 처리 */
private List<Long> processFeatureCollection(
JsonNode featureCollectionNode, MapSheetLearnDataEntity learnData) {
List<Long> geometryIds = new ArrayList<>();
if (!featureCollectionNode.has("features")) {
log.warn("FeatureCollection에 features 배열이 없습니다: {}", learnData.getId());
return geometryIds;
}
JsonNode featuresNode = featureCollectionNode.get("features");
if (featuresNode.isArray()) {
for (JsonNode featureNode : featuresNode) {
try {
Long geometryId = processFeature(featureNode, learnData);
if (geometryId != null) {
geometryIds.add(geometryId);
}
} catch (Exception e) {
log.error("Feature 처리 실패 (학습 데이터 ID: {})", learnData.getId(), e);
}
}
}
return geometryIds;
}
/** Feature 처리 */
private Long processFeature(JsonNode featureNode, MapSheetLearnDataEntity learnData) {
try {
if (!featureNode.has("geometry")) {
log.warn("Feature에 geometry가 없습니다: {}", learnData.getId());
return null;
}
JsonNode geometryNode = featureNode.get("geometry");
JsonNode propertiesNode =
featureNode.has("properties") ? featureNode.get("properties") : null;
return createGeometryEntity(geometryNode, propertiesNode, learnData);
} catch (Exception e) {
log.error("Feature 처리 중 오류 (학습 데이터 ID: {})", learnData.getId(), e);
return null;
}
}
/** 직접 Geometry 처리 */
private Long processDirectGeometry(JsonNode geometryNode, MapSheetLearnDataEntity learnData) {
return createGeometryEntity(geometryNode, null, learnData);
}
/** GeometryEntity 생성 및 저장 */
private Long createGeometryEntity(
JsonNode geometryNode, JsonNode propertiesNode, MapSheetLearnDataEntity learnData) {
try {
MapSheetLearnDataGeomEntity geometryEntity = new MapSheetLearnDataGeomEntity();
// 기본 정보 설정
geometryEntity.setDataUid(learnData.getId());
geometryEntity.setBeforeYyyy(learnData.getDataYyyy());
geometryEntity.setAfterYyyy(learnData.getCompareYyyy());
// Geometry 변환 및 설정
Geometry geometry = parseGeometryFromGeoJson(geometryNode);
if (geometry != null) {
geometryEntity.setGeom(geometry);
geometryEntity.setGeoType(geometry.getGeometryType());
// 면적 계산 (Polygon인 경우)
if (geometry instanceof Polygon || geometry.getGeometryType().contains("Polygon")) {
double area = geometry.getArea();
geometryEntity.setArea(area);
}
} else {
log.warn("Geometry 변환 실패: {}", geometryNode);
return null;
}
// Properties에서 추가 정보 추출
if (propertiesNode != null) {
extractPropertiesData(geometryEntity, propertiesNode, learnData);
}
// 시간 정보 설정
ZonedDateTime now = ZonedDateTime.now();
geometryEntity.setCreatedDttm(now);
geometryEntity.setUpdatedDttm(now);
// 저장
MapSheetLearnDataGeomEntity savedEntity =
mapSheetLearnDataGeomRepository.save(geometryEntity);
return savedEntity.getId();
} catch (Exception e) {
log.error("GeometryEntity 생성 실패 (학습 데이터 ID: {})", learnData.getId(), e);
return null;
}
}
/** GeoJSON 노드에서 JTS Geometry 객체 생성 */
private Geometry parseGeometryFromGeoJson(JsonNode geometryNode) {
try {
if (!geometryNode.has("type") || !geometryNode.has("coordinates")) {
log.warn("유효하지 않은 Geometry 형식: type 또는 coordinates가 없습니다.");
return null;
}
String geometryType = geometryNode.get("type").asText();
JsonNode coordinatesNode = geometryNode.get("coordinates");
switch (geometryType.toLowerCase()) {
case "point":
return createPoint(coordinatesNode);
case "linestring":
return createLineString(coordinatesNode);
case "polygon":
return createPolygon(coordinatesNode);
case "multipoint":
return createMultiPoint(coordinatesNode);
case "multilinestring":
return createMultiLineString(coordinatesNode);
case "multipolygon":
return createMultiPolygon(coordinatesNode);
default:
log.warn("지원하지 않는 Geometry 타입: {}", geometryType);
return null;
}
} catch (Exception e) {
log.error("Geometry 파싱 실패", e);
return null;
}
}
private Point createPoint(JsonNode coordinatesNode) {
if (coordinatesNode.size() < 2) return null;
double x = coordinatesNode.get(0).asDouble();
double y = coordinatesNode.get(1).asDouble();
return geometryFactory.createPoint(new Coordinate(x, y));
}
private LineString createLineString(JsonNode coordinatesNode) {
List<Coordinate> coords = new ArrayList<>();
for (JsonNode coordNode : coordinatesNode) {
if (coordNode.size() >= 2) {
coords.add(new Coordinate(coordNode.get(0).asDouble(), coordNode.get(1).asDouble()));
}
}
return geometryFactory.createLineString(coords.toArray(new Coordinate[0]));
}
private Polygon createPolygon(JsonNode coordinatesNode) {
if (coordinatesNode.size() == 0) return null;
// Exterior ring
JsonNode exteriorRing = coordinatesNode.get(0);
List<Coordinate> coords = new ArrayList<>();
for (JsonNode coordNode : exteriorRing) {
if (coordNode.size() >= 2) {
coords.add(new Coordinate(coordNode.get(0).asDouble(), coordNode.get(1).asDouble()));
}
}
if (coords.size() < 3) return null;
// Close ring if not already closed
if (!coords.get(0).equals2D(coords.get(coords.size() - 1))) {
coords.add(new Coordinate(coords.get(0)));
}
LinearRing shell = geometryFactory.createLinearRing(coords.toArray(new Coordinate[0]));
// Interior rings (holes)
LinearRing[] holes = new LinearRing[coordinatesNode.size() - 1];
for (int i = 1; i < coordinatesNode.size(); i++) {
JsonNode holeRing = coordinatesNode.get(i);
List<Coordinate> holeCoords = new ArrayList<>();
for (JsonNode coordNode : holeRing) {
if (coordNode.size() >= 2) {
holeCoords.add(new Coordinate(coordNode.get(0).asDouble(), coordNode.get(1).asDouble()));
}
}
if (holeCoords.size() >= 3) {
if (!holeCoords.get(0).equals2D(holeCoords.get(holeCoords.size() - 1))) {
holeCoords.add(new Coordinate(holeCoords.get(0)));
}
holes[i - 1] = geometryFactory.createLinearRing(holeCoords.toArray(new Coordinate[0]));
}
}
return geometryFactory.createPolygon(shell, holes);
}
private MultiPoint createMultiPoint(JsonNode coordinatesNode) {
List<Point> points = new ArrayList<>();
for (JsonNode pointNode : coordinatesNode) {
Point point = createPoint(pointNode);
if (point != null) {
points.add(point);
}
}
return geometryFactory.createMultiPoint(points.toArray(new Point[0]));
}
private MultiLineString createMultiLineString(JsonNode coordinatesNode) {
List<LineString> lineStrings = new ArrayList<>();
for (JsonNode lineNode : coordinatesNode) {
LineString line = createLineString(lineNode);
if (line != null) {
lineStrings.add(line);
}
}
return geometryFactory.createMultiLineString(lineStrings.toArray(new LineString[0]));
}
private MultiPolygon createMultiPolygon(JsonNode coordinatesNode) {
List<Polygon> polygons = new ArrayList<>();
for (JsonNode polygonNode : coordinatesNode) {
Polygon polygon = createPolygon(polygonNode);
if (polygon != null) {
polygons.add(polygon);
}
}
return geometryFactory.createMultiPolygon(polygons.toArray(new Polygon[0]));
}
/** Properties에서 추가 정보 추출 */
private void extractPropertiesData(
MapSheetLearnDataGeomEntity geometryEntity,
JsonNode propertiesNode,
MapSheetLearnDataEntity learnData) {
// CD 정확도 정보
if (propertiesNode.has("cd_prob")) {
try {
double cdProb = propertiesNode.get("cd_prob").asDouble();
geometryEntity.setCdProb(cdProb);
} catch (Exception e) {
log.debug("cd_prob 파싱 실패", e);
}
}
// Before class 정보
if (propertiesNode.has("class_before_name")) {
geometryEntity.setClassBeforeName(propertiesNode.get("class_before_name").asText());
}
if (propertiesNode.has("class_before_prob")) {
try {
double beforeProb = propertiesNode.get("class_before_prob").asDouble();
geometryEntity.setClassBeforeProb(beforeProb);
} catch (Exception e) {
log.debug("class_before_prob 파싱 실패", e);
}
}
// After class 정보
if (propertiesNode.has("class_after_name")) {
geometryEntity.setClassAfterName(propertiesNode.get("class_after_name").asText());
}
if (propertiesNode.has("class_after_prob")) {
try {
double afterProb = propertiesNode.get("class_after_prob").asDouble();
geometryEntity.setClassAfterProb(afterProb);
} catch (Exception e) {
log.debug("class_after_prob 파싱 실패", e);
}
}
// 도엽 번호
if (propertiesNode.has("map_sheet_num")) {
try {
long mapSheetNum = propertiesNode.get("map_sheet_num").asLong();
geometryEntity.setMapSheetNum(mapSheetNum);
} catch (Exception e) {
log.debug("map_sheet_num 파싱 실패", e);
}
}
// 면적 (properties에서 제공되는 경우)
if (propertiesNode.has("area")) {
try {
double area = propertiesNode.get("area").asDouble();
geometryEntity.setArea(area);
} catch (Exception e) {
log.debug("area 파싱 실패", e);
}
}
}
/** 미처리된 학습 데이터들을 찾아서 자동으로 geometry 변환 수행 */
@Transactional
public List<Long> processUnprocessedLearnData() {
// 분석 상태가 PENDING인 학습 데이터 조회
List<MapSheetLearnDataEntity> unprocessedData =
mapSheetLearnDataRepository.findByAnalState("PENDING");
if (unprocessedData.isEmpty()) {
log.debug("처리할 미완료 학습 데이터가 없습니다.");
return new ArrayList<>();
}
log.info("미처리 학습 데이터 {}개에 대해 geometry 변환을 수행합니다.", unprocessedData.size());
List<Long> processedIds = new ArrayList<>();
for (MapSheetLearnDataEntity data : unprocessedData) {
try {
List<Long> geometryIds = processLearnDataToGeometry(data);
processedIds.addAll(geometryIds);
// 처리 완료 상태로 업데이트
data.setAnalState("COMPLETED");
mapSheetLearnDataRepository.save(data);
} catch (Exception e) {
log.error("미처리 학습 데이터 처리 실패: {}", data.getId(), e);
// 실패한 경우 ERROR 상태로 설정
data.setAnalState("ERROR");
mapSheetLearnDataRepository.save(data);
}
}
return processedIds;
}
}

View File

@@ -2,9 +2,7 @@ package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.LearningModelResultDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultService; import com.kamco.cd.kamcoback.inference.service.InferenceResultService;
import com.kamco.cd.kamcoback.inference.service.LearningModelResultProcessor;
import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content; import io.swagger.v3.oas.annotations.media.Content;
@@ -12,9 +10,6 @@ import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag; import io.swagger.v3.oas.annotations.tags.Tag;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -22,7 +17,6 @@ import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.RestController;
@@ -36,7 +30,6 @@ public class InferenceResultApiController {
private static final Logger logger = LoggerFactory.getLogger(InferenceResultApiController.class); private static final Logger logger = LoggerFactory.getLogger(InferenceResultApiController.class);
private final InferenceResultService inferenceResultService; private final InferenceResultService inferenceResultService;
private final LearningModelResultProcessor learningModelResultProcessor;
@Operation(summary = "추론관리 분석결과 목록 조회", description = "분석상태, 제목으로 분석결과를 조회 합니다.") @Operation(summary = "추론관리 분석결과 목록 조회", description = "분석상태, 제목으로 분석결과를 조회 합니다.")
@ApiResponses( @ApiResponses(
@@ -141,186 +134,4 @@ public class InferenceResultApiController {
inferenceResultService.getInferenceResultGeomList(id, searchGeoReq); inferenceResultService.getInferenceResultGeomList(id, searchGeoReq);
return ApiResponseDto.ok(geomList); return ApiResponseDto.ok(geomList);
} }
// @Operation(summary = "학습모델 결과 처리", description = "실제 학습모델 GeoJSON 파일을 처리하여 데이터베이스에 저장합니다.")
// @ApiResponses(
// value = {
// @ApiResponse(
// responseCode = "200",
// description = "처리 성공",
// content =
// @Content(
// mediaType = "application/json",
// schema =
// @Schema(implementation =
// LearningModelResultDto.ProcessResponse.class))),
// @ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
// @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
// })
// @PostMapping("/learning-model/process")
public ApiResponseDto<LearningModelResultDto.ProcessResponse> processLearningModelResult(
@RequestBody LearningModelResultDto.ProcessRequest request) {
try {
logger.info("Processing learning model result file: {}", request.getFilePath());
Path filePath = Paths.get(request.getFilePath());
int processedFeatures = learningModelResultProcessor.processLearningModelResult(filePath);
LearningModelResultDto.ProcessResponse response =
LearningModelResultDto.ProcessResponse.builder()
.success(true)
.message("학습모델 결과 처리가 완료되었습니다.")
.processedFeatures(processedFeatures)
.filePath(request.getFilePath())
.build();
logger.info(
"Successfully processed {} features from file: {}",
processedFeatures,
request.getFilePath());
return ApiResponseDto.ok(response);
} catch (Exception e) {
logger.error("Failed to process learning model result: {}", request.getFilePath(), e);
LearningModelResultDto.ProcessResponse response =
LearningModelResultDto.ProcessResponse.builder()
.success(false)
.message("학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
.processedFeatures(0)
.filePath(request.getFilePath())
.build();
return ApiResponseDto.ok(response);
}
}
// @Operation(summary = "학습모델 결과 일괄 처리", description = "여러 학습모델 GeoJSON 파일을 일괄 처리하여 데이터베이스에
// 저장합니다.")
// @ApiResponses(
// value = {
// @ApiResponse(
// responseCode = "200",
// description = "처리 성공",
// content =
// @Content(
// mediaType = "application/json",
// schema =
// @Schema(
// implementation =
// LearningModelResultDto.BatchProcessResponse.class))),
// @ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
// @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
// })
// @PostMapping("/learning-model/process-batch")
public ApiResponseDto<LearningModelResultDto.BatchProcessResponse>
processBatchLearningModelResults(
@RequestBody LearningModelResultDto.BatchProcessRequest request) {
try {
logger.info("Processing {} learning model result files", request.getFilePaths().size());
List<Path> filePaths = new ArrayList<>();
for (String filePath : request.getFilePaths()) {
filePaths.add(Paths.get(filePath));
}
int totalProcessedFeatures =
learningModelResultProcessor.processMultipleLearningModelResults(filePaths);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(true)
.message("일괄 학습모델 결과 처리가 완료되었습니다.")
.totalProcessedFeatures(totalProcessedFeatures)
.processedFileCount(request.getFilePaths().size())
.filePaths(request.getFilePaths())
.build();
logger.info(
"Successfully processed {} features from {} files",
totalProcessedFeatures,
request.getFilePaths().size());
return ApiResponseDto.ok(response);
} catch (Exception e) {
logger.error("Failed to process batch learning model results", e);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(false)
.message("일괄 학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
.totalProcessedFeatures(0)
.processedFileCount(0)
.filePaths(request.getFilePaths())
.build();
return ApiResponseDto.ok(response);
}
}
// @Operation(summary = "기본 학습모델 파일 처리", description = "미리 준비된 학습모델 파일을 처리합니다.")
// @ApiResponses(
// value = {
// @ApiResponse(
// responseCode = "200",
// description = "처리 성공",
// content =
// @Content(
// mediaType = "application/json",
// schema =
// @Schema(
// implementation =
// LearningModelResultDto.BatchProcessResponse.class))),
// @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
// })
// @PostMapping("/learning-model/process-default")
public ApiResponseDto<LearningModelResultDto.BatchProcessResponse>
processDefaultLearningModelResults() {
try {
logger.info("Processing default learning model result files");
// Process the two default learning model files from upload directory
List<String> defaultFilePaths =
List.of(
"/Users/deniallee/geojson/upload/캠코_2021_2022_35813023.geojson",
"/Users/deniallee/geojson/upload/캠코_2023_2024_35810049.geojson");
List<Path> filePaths = new ArrayList<>();
for (String filePath : defaultFilePaths) {
filePaths.add(Paths.get(filePath));
}
int totalProcessedFeatures =
learningModelResultProcessor.processMultipleLearningModelResults(filePaths);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(true)
.message("기본 학습모델 결과 파일 처리가 완료되었습니다.")
.totalProcessedFeatures(totalProcessedFeatures)
.processedFileCount(defaultFilePaths.size())
.filePaths(defaultFilePaths)
.build();
logger.info(
"Successfully processed {} features from {} default files",
totalProcessedFeatures,
defaultFilePaths.size());
return ApiResponseDto.ok(response);
} catch (Exception e) {
logger.error("Failed to process default learning model results", e);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(false)
.message("기본 학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
.totalProcessedFeatures(0)
.processedFileCount(0)
.filePaths(List.of())
.build();
return ApiResponseDto.ok(response);
}
}
} }

View File

@@ -1,431 +0,0 @@
package com.kamco.cd.kamcoback.inference.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.LocalDateTime;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.locationtech.jts.io.geojson.GeoJsonReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* Service for processing actual learning model result GeoJSON files and storing them in the
* database with proper field mapping.
*/
@Service
public class LearningModelResultProcessor {
private static final Logger logger = LoggerFactory.getLogger(LearningModelResultProcessor.class);
@Autowired private MapSheetLearnDataRepository mapSheetLearnDataRepository;
@Autowired private MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
private final ObjectMapper objectMapper = new ObjectMapper();
private final GeoJsonReader geoJsonReader = new GeoJsonReader();
/** Process large learning model result files with optimized batch processing */
public int processLearningModelResultOptimized(Path geoJsonFilePath) {
try {
logger.info("Processing learning model result file (optimized): {}", geoJsonFilePath);
// Read and parse GeoJSON file
String geoJsonContent = Files.readString(geoJsonFilePath);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// Validate GeoJSON structure
if (!rootNode.has("type") || !"FeatureCollection".equals(rootNode.get("type").asText())) {
throw new IllegalArgumentException("Invalid GeoJSON: Not a FeatureCollection");
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
throw new IllegalArgumentException("Invalid GeoJSON: No features array found");
}
// Extract metadata from file name and content
String fileName = geoJsonFilePath.getFileName().toString();
String mapSheetName =
rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// Parse years from filename
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1];
afterYear = parts[2];
mapSheetNum = parts[3];
}
if (beforeYear == null || afterYear == null || mapSheetNum == null) {
throw new IllegalArgumentException(
"Cannot parse years and map sheet number from filename: " + fileName);
}
int totalFeatures = features.size();
logger.info("Total features to process: {}", totalFeatures);
// Step 1: Create main data record first
MapSheetLearnDataEntity savedMainData =
createMainDataRecord(
geoJsonContent,
fileName,
geoJsonFilePath.toString(),
beforeYear,
afterYear,
mapSheetNum);
// Step 2: Process features in small batches to avoid transaction timeout
int totalProcessed = 0;
int batchSize = 25; // Smaller batch size for reliability
for (int i = 0; i < totalFeatures; i += batchSize) {
int endIndex = Math.min(i + batchSize, totalFeatures);
logger.info("Processing batch {}-{} of {}", i + 1, endIndex, totalFeatures);
List<JsonNode> batch = new ArrayList<>();
for (int j = i; j < endIndex; j++) {
batch.add(features.get(j));
}
try {
int processed =
processBatchSafely(batch, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
totalProcessed += processed;
logger.info(
"Batch processed successfully. Total so far: {}/{}", totalProcessed, totalFeatures);
} catch (Exception e) {
logger.error("Failed to process batch {}-{}: {}", i + 1, endIndex, e.getMessage());
// Continue with next batch instead of failing completely
}
}
logger.info(
"Successfully processed {} out of {} features from file: {}",
totalProcessed,
totalFeatures,
fileName);
return totalProcessed;
} catch (IOException e) {
logger.error("Failed to read GeoJSON file: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result file", e);
} catch (Exception e) {
logger.error("Error processing learning model result: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result", e);
}
}
@Transactional
private MapSheetLearnDataEntity createMainDataRecord(
String geoJsonContent,
String fileName,
String filePath,
String beforeYear,
String afterYear,
String mapSheetNum) {
MapSheetLearnDataEntity mainData =
createMainDataEntity(
geoJsonContent, fileName, filePath, beforeYear, afterYear, mapSheetNum);
MapSheetLearnDataEntity saved = mapSheetLearnDataRepository.save(mainData);
logger.info("Created main data record with ID: {}", saved.getId());
return saved;
}
@Transactional
private int processBatchSafely(
List<JsonNode> features,
Long dataUid,
String beforeYear,
String afterYear,
String mapSheetNum) {
int processed = 0;
for (JsonNode feature : features) {
try {
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000);
insertFeatureWithPostGIS(feature, geoUid, dataUid, beforeYear, afterYear, mapSheetNum);
processed++;
// Small delay to prevent ID collisions
try {
Thread.sleep(1);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
} catch (Exception e) {
logger.warn("Failed to process individual feature: {}", e.getMessage());
// Continue processing other features in this batch
}
}
return processed;
}
/**
* Process a learning model result GeoJSON file and store it in the database
*
* @param geoJsonFilePath Path to the GeoJSON file
* @return Number of features processed
*/
@Transactional
public int processLearningModelResult(Path geoJsonFilePath) {
try {
logger.info("Processing learning model result file: {}", geoJsonFilePath);
// Read and parse GeoJSON file
String geoJsonContent = Files.readString(geoJsonFilePath);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// Validate GeoJSON structure
if (!rootNode.has("type") || !"FeatureCollection".equals(rootNode.get("type").asText())) {
throw new IllegalArgumentException("Invalid GeoJSON: Not a FeatureCollection");
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
throw new IllegalArgumentException("Invalid GeoJSON: No features array found");
}
// Extract metadata from file name and content
String fileName = geoJsonFilePath.getFileName().toString();
String mapSheetName =
rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// Parse years from filename or data (e.g., "캠코_2021_2022_35813023")
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1]; // 2021 or 2023
afterYear = parts[2]; // 2022 or 2024
mapSheetNum = parts[3]; // 35813023 or 35810049
}
if (beforeYear == null || afterYear == null || mapSheetNum == null) {
throw new IllegalArgumentException(
"Cannot parse years and map sheet number from filename: " + fileName);
}
// Create main data record
MapSheetLearnDataEntity mainData =
createMainDataEntity(
geoJsonContent,
fileName,
geoJsonFilePath.toString(),
beforeYear,
afterYear,
mapSheetNum);
MapSheetLearnDataEntity savedMainData = mapSheetLearnDataRepository.save(mainData);
logger.info("Saved main data record with ID: {}", savedMainData.getId());
// Process each feature in the GeoJSON using direct PostGIS insertion
int featureCount = 0;
int batchSize = 10; // Much smaller batch size to avoid transaction timeout
for (int i = 0; i < features.size(); i += batchSize) {
int endIndex = Math.min(i + batchSize, features.size());
logger.info("Processing batch {}-{} of {} features", i + 1, endIndex, features.size());
// Process each feature individually within this logging batch
for (int j = i; j < endIndex; j++) {
JsonNode feature = features.get(j);
try {
// Generate unique ID for this geometry entity
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000) + j;
// Extract feature data and insert directly with PostGIS
insertFeatureWithPostGIS(
feature, geoUid, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
featureCount++;
// Small delay to prevent issues
if (j % 5 == 0) {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
} catch (Exception e) {
logger.warn("Failed to process feature {}: {}", j + 1, e.getMessage());
}
}
// Log progress after each batch
if (featureCount > 0 && endIndex % batchSize == 0) {
logger.info(
"Processed {} features so far, success rate: {:.1f}%",
featureCount, (featureCount * 100.0) / endIndex);
}
}
logger.info("Successfully processed {} features from file: {}", featureCount, fileName);
return featureCount;
} catch (IOException e) {
logger.error("Failed to read GeoJSON file: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result file", e);
} catch (Exception e) {
logger.error("Error processing learning model result: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result", e);
}
}
/** Create the main data entity for tb_map_sheet_learn_data table */
private MapSheetLearnDataEntity createMainDataEntity(
String geoJsonContent,
String fileName,
String filePath,
String beforeYear,
String afterYear,
String mapSheetNum) {
MapSheetLearnDataEntity entity = new MapSheetLearnDataEntity();
// Generate unique ID (using current timestamp + random component)
entity.setId(System.currentTimeMillis() + (long) (Math.random() * 1000));
LocalDateTime now = LocalDateTime.now();
entity.setAnalStrtDttm(ZonedDateTime.now());
entity.setAnalEndDttm(ZonedDateTime.now());
entity.setCompareYyyy(Integer.parseInt(beforeYear)); // 첫 번째 연도만 저장
// JSON 데이터를 Map으로 변환하여 저장
try {
@SuppressWarnings("unchecked")
Map<String, Object> jsonMap = objectMapper.readValue(geoJsonContent, Map.class);
entity.setDataJson(jsonMap);
} catch (Exception e) {
logger.warn("JSON 파싱 실패, 빈 Map으로 저장: {}", fileName, e);
entity.setDataJson(new HashMap<>());
}
entity.setDataName(fileName);
entity.setDataPath(filePath);
entity.setDataState("PROCESSED");
entity.setCreatedDttm(ZonedDateTime.now());
entity.setUpdatedDttm(ZonedDateTime.now());
return entity;
}
/** Insert GeoJSON feature directly using PostGIS functions */
private void insertFeatureWithPostGIS(
JsonNode feature,
Long geoUid,
Long dataUid,
String beforeYear,
String afterYear,
String mapSheetNum)
throws Exception {
JsonNode properties = feature.get("properties");
JsonNode geometry = feature.get("geometry");
if (properties == null || geometry == null) {
throw new IllegalArgumentException("Feature missing properties or geometry");
}
// Extract properties
Double cdProb = properties.has("cd_prob") ? properties.get("cd_prob").asDouble() : null;
Double area = properties.has("area") ? properties.get("area").asDouble() : null;
String classBeforeName = null;
Double classBeforeProb = null;
String classAfterName = null;
Double classAfterProb = null;
// Classification data
JsonNode classNode = properties.get("class");
if (classNode != null) {
// Before classification
JsonNode beforeClass = classNode.get("before");
if (beforeClass != null && beforeClass.isArray() && beforeClass.size() > 0) {
JsonNode firstBefore = beforeClass.get(0);
if (firstBefore.has("class_name")) {
classBeforeName = firstBefore.get("class_name").asText();
}
if (firstBefore.has("probability")) {
classBeforeProb = firstBefore.get("probability").asDouble();
}
}
// After classification
JsonNode afterClass = classNode.get("after");
if (afterClass != null && afterClass.isArray() && afterClass.size() > 0) {
JsonNode firstAfter = afterClass.get(0);
if (firstAfter.has("class_name")) {
classAfterName = firstAfter.get("class_name").asText();
}
if (firstAfter.has("probability")) {
classAfterProb = firstAfter.get("probability").asDouble();
}
}
}
// Get geometry type
String geoType = geometry.has("type") ? geometry.get("type").asText() : "Unknown";
// Convert geometry to JSON string for PostGIS
String geometryJson = geometry.toString();
// Insert using PostGIS functions
mapSheetLearnDataGeomRepository.insertWithPostGISGeometry(
geoUid,
cdProb,
classBeforeName,
classBeforeProb,
classAfterName,
classAfterProb,
Long.parseLong(mapSheetNum),
Integer.parseInt(beforeYear),
Integer.parseInt(afterYear),
area,
geometryJson,
geoType,
dataUid);
logger.debug("Inserted geometry entity with ID: {} using PostGIS", geoUid);
}
/**
* Process multiple learning model result files
*
* @param filePaths List of GeoJSON file paths
* @return Total number of features processed across all files
*/
@Transactional
public int processMultipleLearningModelResults(List<Path> filePaths) {
int totalProcessed = 0;
for (Path filePath : filePaths) {
try {
int processed = processLearningModelResult(filePath);
totalProcessed += processed;
logger.info("Processed {} features from file: {}", processed, filePath.getFileName());
} catch (Exception e) {
logger.error("Failed to process file: {}", filePath, e);
// Continue processing other files even if one fails
}
}
logger.info("Total features processed across all files: {}", totalProcessed);
return totalProcessed;
}
}

View File

@@ -191,12 +191,9 @@ public class LabelAllocateApiController {
@RequestBody @RequestBody
LabelAllocateDto.AllocateMoveDto dto) { LabelAllocateDto.AllocateMoveDto dto) {
int compareYyyy = Integer.parseInt(dto.getYyyy().split("-")[0]);
int targetYyyy = Integer.parseInt(dto.getYyyy().split("-")[1]);
return ApiResponseDto.okObject( return ApiResponseDto.okObject(
labelAllocateService.allocateMove( labelAllocateService.allocateMove(
dto.getStage(), dto.getLabelers(), compareYyyy, targetYyyy, dto.getUserId())); dto.getTotalCnt(), dto.getUuid(), dto.getLabelers(), dto.getUserId()));
} }
@Operation( @Operation(
@@ -232,7 +229,7 @@ public class LabelAllocateApiController {
@ApiResponse(responseCode = "500", description = "서버 오류") @ApiResponse(responseCode = "500", description = "서버 오류")
}) })
@GetMapping("/move-user") @GetMapping("/move-user")
public ApiResponseDto<List<LabelAllocateDto.MoveUserList>> availMoveUserList( public ApiResponseDto<LabelAllocateDto.MoveInfo> availMoveUserList(
@Parameter(description = "해당 사용자 사번", example = "01022223333") @RequestParam String userId, @Parameter(description = "해당 사용자 사번", example = "01022223333") @RequestParam String userId,
@Parameter(description = "회차 마스터 key", example = "f97dc186-e6d3-4645-9737-3173dde8dc64") @Parameter(description = "회차 마스터 key", example = "f97dc186-e6d3-4645-9737-3173dde8dc64")
@RequestParam @RequestParam

View File

@@ -229,34 +229,20 @@ public class LabelAllocateDto {
@AllArgsConstructor @AllArgsConstructor
public static class AllocateMoveDto { public static class AllocateMoveDto {
@Schema(description = "회차", example = "4") @Schema(description = "총 잔여 건수", example = "5061")
private Integer stage; private Integer totalCnt;
@Schema( @Schema(
description = "이관할 라벨러 할당량", description = "이관할 라벨러",
example = example = """
"""
[ [
{ "87654321"
"userId": "123456",
"demand": 10
},
{
"userId": "010222297501",
"demand": 5
}
] ]
""") """)
private List<TargetUser> labelers; private List<String> labelers;
@Schema(description = "비교년도-기준년도", example = "2022-2024") @Schema(description = "회차 마스터 key", example = "f97dc186-e6d3-4645-9737-3173dde8dc64")
private String yyyy; private String uuid;
// @Schema(description = "비교년도", example = "2022")
// private Integer compareYyyy;
//
// @Schema(description = "기준년도", example = "2024")
// private Integer targetYyyy;
@Schema(description = "대상 사번", example = "01022223333") @Schema(description = "대상 사번", example = "01022223333")
private String userId; private String userId;
@@ -269,6 +255,7 @@ public class LabelAllocateDto {
private Long geoUid; private Long geoUid;
private Long mapSheetNum; private Long mapSheetNum;
private Long pnu;
} }
@Getter @Getter
@@ -320,4 +307,13 @@ public class LabelAllocateDto {
private Long remainCnt; private Long remainCnt;
private Double percent; private Double percent;
} }
@Getter
@Setter
@AllArgsConstructor
public static class MoveInfo {
private Long totalCnt;
private List<MoveUserList> moveUserList;
}
} }

View File

@@ -0,0 +1,27 @@
package com.kamco.cd.kamcoback.label.dto;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.Setter;
public class LabelLabelerDto {
@Getter
@Setter
@AllArgsConstructor
public static class Basic {
private UUID lbUsrUid;
private Long analUid;
private String workerUid;
private Long allocateCnt;
private Boolean deleted;
private Boolean reAllocateYn;
private Long reAllocateCnt;
private String reAllocateWorkerUid;
private ZonedDateTime createdDttm;
private ZonedDateTime updatedDttm;
}
}

View File

@@ -7,12 +7,14 @@ import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.AllocateInfoDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveUserList; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveInfo;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.TargetUser; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.TargetUser;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerListResponse; import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerListResponse;
import com.kamco.cd.kamcoback.postgres.core.LabelAllocateCoreService; import com.kamco.cd.kamcoback.postgres.core.LabelAllocateCoreService;
import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects; import java.util.Objects;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
@@ -73,6 +75,9 @@ public class LabelAllocateService {
labelAllocateCoreService.assignOwner(sub, target.getUserId(), analUid); labelAllocateCoreService.assignOwner(sub, target.getUserId(), analUid);
index = end; index = end;
// 라벨러 유저 테이블에 insert
labelAllocateCoreService.insertLabelerUser(analUid, target.getUserId(), target.getDemand());
} }
// 검수자 할당 테이블에 insert. TODO: 익일 배치로 라벨링 완료된 내역을 검수자에게 할당해야 함 // 검수자 할당 테이블에 insert. TODO: 익일 배치로 라벨링 완료된 내역을 검수자에게 할당해야 함
@@ -116,31 +121,51 @@ public class LabelAllocateService {
} }
public ApiResponseDto.ResponseObj allocateMove( public ApiResponseDto.ResponseObj allocateMove(
Integer stage, Integer totalCnt, String uuid, List<String> targetUsers, String userId) {
List<TargetUser> targetUsers,
Integer compareYyyy,
Integer targetYyyy,
String userId) {
Long lastId = null;
Long chargeCnt = targetUsers.stream().mapToLong(TargetUser::getDemand).sum(); Map<String, Integer> result = new LinkedHashMap<>();
int userCount = targetUsers.size();
if (chargeCnt <= 0) { if (userCount <= 0) {
return new ApiResponseDto.ResponseObj(ApiResponseCode.BAD_REQUEST, "이관할 데이터를 입력해주세요."); return new ApiResponseDto.ResponseObj(ApiResponseCode.BAD_REQUEST, "재할당할 라벨러를 선택해주세요.");
} }
List<Long> allIds = int base = totalCnt / userCount;
labelAllocateCoreService.fetchNextMoveIds( int remainder = totalCnt % userCount;
lastId, chargeCnt, compareYyyy, targetYyyy, stage, userId);
int index = 0;
for (TargetUser target : targetUsers) {
int end = index + target.getDemand();
List<Long> sub = allIds.subList(index, end);
labelAllocateCoreService.assignOwnerMove(sub, target.getUserId()); for (int i = 0; i < userCount; i++) {
index = end; int assignCount = base;
// 마지막 사람에게 나머지 몰아주기
if (i == userCount - 1) {
assignCount += remainder;
}
result.put(targetUsers.get(i), assignCount);
// TODO: 재할당 테이블에 update 까지만 하고 나머지는 배치에서 처리하기?
labelAllocateCoreService.assignOwnerReAllocate(
uuid, userId, targetUsers.get(i), (long) assignCount);
} }
// Long lastId = null;
// Long chargeCnt = targetUsers.stream().mapToLong(TargetUser::getDemand).sum();
//
// if (chargeCnt <= 0) {
// return new ApiResponseDto.ResponseObj(ApiResponseCode.BAD_REQUEST, "이관할 데이터를 입력해주세요.");
// }
//
// List<Long> allIds =
// labelAllocateCoreService.fetchNextMoveIds(
// lastId, chargeCnt, compareYyyy, targetYyyy, stage, userId);
// int index = 0;
// for (TargetUser target : targetUsers) {
// int end = index + target.getDemand();
// List<Long> sub = allIds.subList(index, end);
//
// labelAllocateCoreService.assignOwnerMove(sub, target.getUserId());
// index = end;
// }
return new ApiResponseDto.ResponseObj(ApiResponseCode.OK, "이관을 완료하였습니다."); return new ApiResponseDto.ResponseObj(ApiResponseCode.OK, "이관을 완료하였습니다.");
} }
@@ -161,7 +186,7 @@ public class LabelAllocateService {
} }
} }
public List<MoveUserList> moveAvailUserList(String userId, String uuid) { public MoveInfo moveAvailUserList(String userId, String uuid) {
return labelAllocateCoreService.moveAvailUserList(userId, uuid); return labelAllocateCoreService.moveAvailUserList(userId, uuid);
} }
} }

View File

@@ -5,7 +5,7 @@ import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.AllocateInfoDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveUserList; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveInfo;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.searchReq; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.searchReq;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo; import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo;
@@ -128,7 +128,16 @@ public class LabelAllocateCoreService {
return labelAllocateRepository.findInspectorDetail(userId, uuid); return labelAllocateRepository.findInspectorDetail(userId, uuid);
} }
public List<MoveUserList> moveAvailUserList(String userId, String uuid) { public MoveInfo moveAvailUserList(String userId, String uuid) {
return labelAllocateRepository.moveAvailUserList(userId, uuid); return labelAllocateRepository.moveAvailUserList(userId, uuid);
} }
public void insertLabelerUser(Long analUid, String userId, int demand) {
labelAllocateRepository.insertLabelerUser(analUid, userId, demand);
}
public void assignOwnerReAllocate(
String uuid, String userId, String paramUserId, Long assignCount) {
labelAllocateRepository.assignOwnerReAllocate(uuid, userId, paramUserId, assignCount);
}
} }

View File

@@ -0,0 +1,48 @@
package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.label.dto.LabelLabelerDto;
import com.kamco.cd.kamcoback.postgres.CommonDateEntity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import java.util.UUID;
@Entity
@Table(name = "tb_labeling_labeler")
public class LabelingLabelerEntity extends CommonDateEntity {
@Id
@Column(name = "lb_usr_uid")
private UUID lbUsrUid;
@Column(name = "anal_uid")
private Long analUid;
@Column(name = "worker_uid")
private String workerUid;
private Long allocateCnt;
private Boolean deleted;
private Boolean reAllocateYn;
private Long reAllocateCnt;
private String reAllocateWorkerUid;
public LabelLabelerDto.Basic toDto() {
return new LabelLabelerDto.Basic(
this.lbUsrUid,
this.analUid,
this.workerUid,
this.allocateCnt,
this.deleted,
this.reAllocateYn,
this.reAllocateCnt,
this.reAllocateWorkerUid,
super.getCreatedDate(),
super.getModifiedDate());
}
}

View File

@@ -1,66 +0,0 @@
package com.kamco.cd.kamcoback.postgres.repository;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
@Repository
public interface MapSheetLearnDataGeomRepository
extends JpaRepository<MapSheetLearnDataGeomEntity, Long> {
/** 데이터 UID로 지오메트리 정보 조회 */
List<MapSheetLearnDataGeomEntity> findByDataUid(Long dataUid);
/** 도엽 번호로 지오메트리 정보 조회 */
List<MapSheetLearnDataGeomEntity> findByMapSheetNum(Long mapSheetNum);
/** 연도 범위로 지오메트리 정보 조회 */
List<MapSheetLearnDataGeomEntity> findByBeforeYyyyAndAfterYyyy(
Integer beforeYyyy, Integer afterYyyy);
/** 지오메트리 타입별 조회 */
List<MapSheetLearnDataGeomEntity> findByGeoType(String geoType);
/** 데이터 UID로 기존 지오메트리 데이터 삭제 (재생성 전에 사용) */
void deleteByDataUid(Long dataUid);
/** PostGIS 함수를 사용하여 geometry 데이터를 직접 삽입 ST_SetSRID(ST_GeomFromGeoJSON(...), 5186) 형식으로 저장 */
@Modifying
@Transactional
@Query(
value =
"""
INSERT INTO tb_map_sheet_learn_data_geom (
geo_uid, cd_prob, class_before_name, class_before_prob,
class_after_name, class_after_prob, map_sheet_num,
before_yyyy, after_yyyy, area, geom, geo_type, data_uid,
created_dttm, updated_dttm
) VALUES (
:geoUid, :cdProb, :classBeforeName, :classBeforeProb,
:classAfterName, :classAfterProb, :mapSheetNum,
:beforeYyyy, :afterYyyy, :area,
ST_SetSRID(ST_GeomFromGeoJSON(CAST(:geometryJson AS TEXT)), 5186),
:geoType, :dataUid, NOW(), NOW()
) ON CONFLICT (geo_uid) DO NOTHING
""",
nativeQuery = true)
void insertWithPostGISGeometry(
@Param("geoUid") Long geoUid,
@Param("cdProb") Double cdProb,
@Param("classBeforeName") String classBeforeName,
@Param("classBeforeProb") Double classBeforeProb,
@Param("classAfterName") String classAfterName,
@Param("classAfterProb") Double classAfterProb,
@Param("mapSheetNum") Long mapSheetNum,
@Param("beforeYyyy") Integer beforeYyyy,
@Param("afterYyyy") Integer afterYyyy,
@Param("area") Double area,
@Param("geometryJson") String geometryJson,
@Param("geoType") String geoType,
@Param("dataUid") Long dataUid);
}

View File

@@ -1,32 +0,0 @@
package com.kamco.cd.kamcoback.postgres.repository;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import java.util.List;
import java.util.Optional;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface MapSheetLearnDataRepository extends JpaRepository<MapSheetLearnDataEntity, Long> {
/** 데이터 이름으로 조회 */
Optional<MapSheetLearnDataEntity> findByDataName(String dataName);
/** 데이터 경로로 조회 */
Optional<MapSheetLearnDataEntity> findByDataPath(String dataPath);
/** 처리 상태별 조회 */
List<MapSheetLearnDataEntity> findByDataState(String dataState);
/** 데이터 타입별 조회 */
List<MapSheetLearnDataEntity> findByDataType(String dataType);
/** 분석 상태별 조회 */
List<MapSheetLearnDataEntity> findByAnalState(String analState);
/** 분석 상태별 개수 조회 */
long countByAnalState(String analState);
/** 처리되지 않은 데이터 조회 (data_state가 'PENDING' 또는 null인 것들) */
List<MapSheetLearnDataEntity> findByDataStateIsNullOrDataState(String dataState);
}

View File

@@ -5,7 +5,7 @@ import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.AllocateInfoDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveUserList; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveInfo;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo; import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo; import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
@@ -78,5 +78,9 @@ public interface LabelAllocateRepositoryCustom {
LabelerDetail findInspectorDetail(String userId, String uuid); LabelerDetail findInspectorDetail(String userId, String uuid);
List<MoveUserList> moveAvailUserList(String userId, String uuid); MoveInfo moveAvailUserList(String userId, String uuid);
void insertLabelerUser(Long analUid, String userId, int demand);
void assignOwnerReAllocate(String uuid, String userId, String paramUserId, Long assignCount);
} }

View File

@@ -2,6 +2,7 @@ package com.kamco.cd.kamcoback.postgres.repository.label;
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity; import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingInspectorEntity.labelingInspectorEntity; import static com.kamco.cd.kamcoback.postgres.entity.QLabelingInspectorEntity.labelingInspectorEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingLabelerEntity.labelingLabelerEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMemberEntity.memberEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMemberEntity.memberEntity;
@@ -13,6 +14,7 @@ import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InspectState;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveInfo;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveUserList; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveUserList;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo; import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo;
@@ -68,7 +70,8 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
Projections.constructor( Projections.constructor(
AllocateInfoDto.class, AllocateInfoDto.class,
mapSheetAnalDataInferenceGeomEntity.geoUid, mapSheetAnalDataInferenceGeomEntity.geoUid,
mapSheetAnalDataInferenceGeomEntity.mapSheetNum)) mapSheetAnalDataInferenceGeomEntity.mapSheetNum,
mapSheetAnalDataInferenceGeomEntity.pnu))
.from(mapSheetAnalDataInferenceGeomEntity) .from(mapSheetAnalDataInferenceGeomEntity)
.where( .where(
lastId == null ? null : mapSheetAnalDataInferenceGeomEntity.geoUid.gt(lastId), lastId == null ? null : mapSheetAnalDataInferenceGeomEntity.geoUid.gt(lastId),
@@ -116,8 +119,8 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
""" """
insert into tb_labeling_assignment insert into tb_labeling_assignment
(assignment_uid, inference_geom_uid, worker_uid, (assignment_uid, inference_geom_uid, worker_uid,
work_state, assign_group_id, anal_uid) work_state, assign_group_id, anal_uid, pnu)
values (?, ?, ?, ?, ?, ?) values (?, ?, ?, ?, ?, ?, ?)
"""; """;
try (PreparedStatement ps = connection.prepareStatement(sql)) { try (PreparedStatement ps = connection.prepareStatement(sql)) {
@@ -130,6 +133,7 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
ps.setString(4, LabelState.ASSIGNED.getId()); ps.setString(4, LabelState.ASSIGNED.getId());
ps.setString(5, String.valueOf(info.getMapSheetNum())); ps.setString(5, String.valueOf(info.getMapSheetNum()));
ps.setLong(6, analEntity.getId()); ps.setLong(6, analEntity.getId());
ps.setLong(7, info.getPnu());
ps.addBatch(); ps.addBatch();
batchSize++; batchSize++;
@@ -1038,12 +1042,16 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
} }
@Override @Override
public List<MoveUserList> moveAvailUserList(String userId, String uuid) { public MoveInfo moveAvailUserList(String userId, String uuid) {
NumberExpression<Long> totalCnt = labelingAssignmentEntity.count(); NumberExpression<Long> totalCnt = labelingAssignmentEntity.count();
NumberExpression<Long> completeCnt = NumberExpression<Long> completeCnt =
new CaseBuilder() new CaseBuilder()
.when(labelingAssignmentEntity.workState.eq(LabelState.COMPLETE.getId())) .when(
labelingAssignmentEntity
.workState
.eq(LabelState.COMPLETE.getId())
.or(labelingAssignmentEntity.workState.eq(LabelState.SKIP.getId())))
.then(1L) .then(1L)
.otherwise(0L) .otherwise(0L)
.sum(); .sum();
@@ -1073,24 +1081,78 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
throw new EntityNotFoundException("MapSheetAnalInferenceEntity not found for analUid: "); throw new EntityNotFoundException("MapSheetAnalInferenceEntity not found for analUid: ");
} }
return queryFactory Long userChargeCnt =
.select( queryFactory
Projections.constructor( .select(labelingAssignmentEntity.inferenceGeomUid.count())
MoveUserList.class, .from(labelingAssignmentEntity)
memberEntity.userRole, .where(
memberEntity.employeeNo, labelingAssignmentEntity.analUid.eq(analEntity.getId()),
memberEntity.name, labelingAssignmentEntity.workerUid.eq(userId),
remainCnt, labelingAssignmentEntity.workState.eq(LabelState.ASSIGNED.getId()))
percent)) .fetchOne();
.from(labelingAssignmentEntity)
.innerJoin(memberEntity) List<MoveUserList> list =
.on(labelingAssignmentEntity.workerUid.eq(memberEntity.employeeNo)) queryFactory
.select(
Projections.constructor(
MoveUserList.class,
memberEntity.userRole,
memberEntity.employeeNo,
memberEntity.name,
remainCnt,
percent))
.from(labelingLabelerEntity)
.innerJoin(memberEntity)
.on(labelingAssignmentEntity.workerUid.eq(memberEntity.employeeNo))
.where(
labelingAssignmentEntity.analUid.eq(analEntity.getId()),
labelingAssignmentEntity.workerUid.ne(userId))
.groupBy(memberEntity.userRole, memberEntity.employeeNo, memberEntity.name)
.having(
completeCnt
.multiply(2)
.goe(totalCnt)) // 진행률 평균 이상인 것들만 조회 => percent 를 바로 쓰면 having절에 무리가 갈 수 있다고 함
.orderBy(completeCnt.desc()) // TODO: 도엽번호? PNU? 로 정렬하여 보여주기?
.fetch();
return new MoveInfo(userChargeCnt, list);
}
@Override
public void insertLabelerUser(Long analUid, String userId, int demand) {
queryFactory
.insert(labelingLabelerEntity)
.columns(
labelingLabelerEntity.lbUsrUid,
labelingLabelerEntity.analUid,
labelingLabelerEntity.workerUid,
labelingLabelerEntity.allocateCnt)
.values(UUID.randomUUID(), analUid, userId, demand)
.execute();
}
@Override
public void assignOwnerReAllocate(
String uuid, String userId, String paramUserId, Long assignCount) {
// analUid로 분석 정보 조회
MapSheetAnalInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalInferenceEntity)
.where(mapSheetAnalInferenceEntity.uuid.eq(UUID.fromString(uuid)))
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("MapSheetAnalInferenceEntity not found for analUid: ");
}
queryFactory
.update(labelingLabelerEntity)
.set(labelingLabelerEntity.reAllocateYn, true)
.set(labelingLabelerEntity.reAllocateWorkerUid, userId)
.set(labelingLabelerEntity.reAllocateCnt, assignCount)
.where( .where(
labelingAssignmentEntity.analUid.eq(analEntity.getId()), labelingLabelerEntity.analUid.eq(analEntity.getId()),
labelingAssignmentEntity.workerUid.ne(userId)) labelingLabelerEntity.workerUid.eq(paramUserId))
.groupBy(memberEntity.userRole, memberEntity.employeeNo, memberEntity.name) .execute();
.having(completeCnt.multiply(2).goe(totalCnt)) // 진행률 평균 이상인 것들만 조회 => percent 를 바로 쓰면
.orderBy(completeCnt.desc())
.fetch();
} }
} }