./gradlew :spotlessApply 실행

This commit is contained in:
2025-11-26 10:13:11 +09:00
parent 51a9f18af2
commit e96cff6ea5
54 changed files with 2095 additions and 2214 deletions

View File

@@ -5,13 +5,12 @@ import com.kamco.cd.kamcoback.changedetection.service.ChangeDetectionService;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import io.swagger.v3.oas.annotations.tags.Tag; import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.transaction.Transactional; import jakarta.transaction.Transactional;
import java.util.List;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@Tag(name = "변화탐지", description = "변화탐지 API") @Tag(name = "변화탐지", description = "변화탐지 API")
@RequiredArgsConstructor @RequiredArgsConstructor
@RestController @RestController

View File

@@ -2,9 +2,4 @@ package com.kamco.cd.kamcoback.changedetection.dto;
import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Geometry;
public record ChangeDetectionDto( public record ChangeDetectionDto(Long id, Geometry polygon, double centroidX, double centroidY) {}
Long id,
Geometry polygon,
double centroidX,
double centroidY
) {}

View File

@@ -1,13 +1,11 @@
package com.kamco.cd.kamcoback.changedetection.service; package com.kamco.cd.kamcoback.changedetection.service;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto; import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.postgres.core.ChangeDetectionCoreService; import com.kamco.cd.kamcoback.postgres.core.ChangeDetectionCoreService;
import java.util.List;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.util.List;
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
public class ChangeDetectionService { public class ChangeDetectionService {

View File

@@ -207,7 +207,7 @@ public class GlobalExceptionHandler {
@ResponseStatus(HttpStatus.UNPROCESSABLE_ENTITY) @ResponseStatus(HttpStatus.UNPROCESSABLE_ENTITY)
@ExceptionHandler(IllegalStateException.class) @ExceptionHandler(IllegalStateException.class)
public ApiResponseDto<String> handlerIllegalStateException( public ApiResponseDto<String> handlerIllegalStateException(
IllegalStateException e, HttpServletRequest request) { IllegalStateException e, HttpServletRequest request) {
log.warn("[IllegalStateException] resource :{} ", e.getMessage()); log.warn("[IllegalStateException] resource :{} ", e.getMessage());
String codeName = "UNPROCESSABLE_ENTITY"; String codeName = "UNPROCESSABLE_ENTITY";
@@ -291,11 +291,11 @@ public class GlobalExceptionHandler {
// TODO : stackTrace limit 20줄? 확인 필요 // TODO : stackTrace limit 20줄? 확인 필요
String stackTraceStr = String stackTraceStr =
Arrays.stream(stackTrace) Arrays.stream(stackTrace)
// .limit(20) // .limit(20)
.map(StackTraceElement::toString) .map(StackTraceElement::toString)
.collect(Collectors.joining("\n")) .collect(Collectors.joining("\n"))
.substring(0, Math.min(stackTrace.length, 255)); .substring(0, Math.min(stackTrace.length, 255));
; ;
ErrorLogEntity errorLogEntity = ErrorLogEntity errorLogEntity =
new ErrorLogEntity( new ErrorLogEntity(

View File

@@ -1,72 +1,52 @@
package com.kamco.cd.kamcoback.geojson.config; package com.kamco.cd.kamcoback.geojson.config;
import jakarta.annotation.PostConstruct;
import lombok.Getter; import lombok.Getter;
import lombok.Setter; import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import jakarta.annotation.PostConstruct; /** GeoJSON 파일 모니터링 설정 */
/**
* GeoJSON 파일 모니터링 설정
*/
@Component @Component
@ConfigurationProperties(prefix = "geojson.monitor") @ConfigurationProperties(prefix = "geojson.monitor")
@Getter @Getter
@Setter @Setter
public class GeoJsonMonitorConfig { public class GeoJsonMonitorConfig {
/** /** 모니터링할 폴더 경로 */
* 모니터링할 폴더 경로 private String watchDirectory = "~/geojson/upload";
*/
private String watchDirectory = "~/geojson/upload";
/** /** 처리 완료 후 파일을 이동할 폴더 경로 */
* 처리 완료 후 파일을 이동할 폴더 경로 private String processedDirectory = "~/geojson/processed";
*/
private String processedDirectory = "~/geojson/processed";
/** /** 처리 실패 파일을 이동할 폴더 경로 */
* 처리 실패 파일을 이동할 폴더 경로 private String errorDirectory = "~/geojson/error";
*/
private String errorDirectory = "~/geojson/error";
/** /** 파일 모니터링 스케줄 (cron 표현식) 기본값: 매 30초마다 실행 */
* 파일 모니터링 스케줄 (cron 표현식) private String cronExpression = "0/30 * * * * *";
* 기본값: 매 30초마다 실행
*/
private String cronExpression = "0/30 * * * * *";
/** /** 지원하는 압축파일 확장자 */
* 지원하는 압축파일 확장자 private String[] supportedExtensions = {"zip", "tar", "tar.gz", "tgz"};
*/
private String[] supportedExtensions = {"zip", "tar", "tar.gz", "tgz"};
/** /** 처리할 최대 파일 크기 (바이트) */
* 처리할 최대 파일 크기 (바이트) private long maxFileSize = 100 * 1024 * 1024; // 100MB
*/
private long maxFileSize = 100 * 1024 * 1024; // 100MB
/** /** 임시 압축해제 폴더 */
* 임시 압축해제 폴더 private String tempDirectory = "/tmp/geojson_extract";
*/
private String tempDirectory = "/tmp/geojson_extract";
/** /** 홈 디렉토리 경로 확장 */
* 홈 디렉토리 경로 확장 @PostConstruct
*/ public void expandPaths() {
@PostConstruct watchDirectory = expandPath(watchDirectory);
public void expandPaths() { processedDirectory = expandPath(processedDirectory);
watchDirectory = expandPath(watchDirectory); errorDirectory = expandPath(errorDirectory);
processedDirectory = expandPath(processedDirectory); tempDirectory = expandPath(tempDirectory);
errorDirectory = expandPath(errorDirectory); }
tempDirectory = expandPath(tempDirectory);
} private String expandPath(String path) {
if (path.startsWith("~")) {
private String expandPath(String path) { return path.replace("~", System.getProperty("user.home"));
if (path.startsWith("~")) {
return path.replace("~", System.getProperty("user.home"));
}
return path;
} }
return path;
}
} }

View File

@@ -4,200 +4,195 @@ import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository; import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository; import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
import java.util.HashMap; /** GeoJSON 데이터 조회 및 테스트용 API 컨트롤러 */
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* GeoJSON 데이터 조회 및 테스트용 API 컨트롤러
*/
@Slf4j @Slf4j
@RestController @RestController
@RequestMapping("/api/geojson/data") @RequestMapping("/api/geojson/data")
@RequiredArgsConstructor @RequiredArgsConstructor
public class GeoJsonDataController { public class GeoJsonDataController {
private final MapSheetLearnDataRepository mapSheetLearnDataRepository; private final MapSheetLearnDataRepository mapSheetLearnDataRepository;
private final MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository; private final MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
/** /** 학습 데이터 목록 조회 */
* 학습 데이터 목록 조회 @GetMapping("/learn-data")
*/ public ResponseEntity<Map<String, Object>> getLearnDataList(
@GetMapping("/learn-data") @RequestParam(defaultValue = "0") int page,
public ResponseEntity<Map<String, Object>> getLearnDataList( @RequestParam(defaultValue = "10") int size,
@RequestParam(defaultValue = "0") int page, @RequestParam(required = false) String dataState,
@RequestParam(defaultValue = "10") int size, @RequestParam(required = false) String analState) {
@RequestParam(required = false) String dataState, try {
@RequestParam(required = false) String analState) { PageRequest pageRequest = PageRequest.of(page, size);
try { List<MapSheetLearnDataEntity> learnDataList;
PageRequest pageRequest = PageRequest.of(page, size);
List<MapSheetLearnDataEntity> learnDataList;
if (dataState != null) { if (dataState != null) {
learnDataList = mapSheetLearnDataRepository.findByDataState(dataState); learnDataList = mapSheetLearnDataRepository.findByDataState(dataState);
} else if (analState != null) { } else if (analState != null) {
learnDataList = mapSheetLearnDataRepository.findByAnalState(analState); learnDataList = mapSheetLearnDataRepository.findByAnalState(analState);
} else { } else {
learnDataList = mapSheetLearnDataRepository.findAll(pageRequest).getContent(); learnDataList = mapSheetLearnDataRepository.findAll(pageRequest).getContent();
} }
Map<String, Object> response = new HashMap<>(); Map<String, Object> response = new HashMap<>();
response.put("data", learnDataList); response.put("data", learnDataList);
response.put("totalCount", learnDataList.size()); response.put("totalCount", learnDataList.size());
response.put("page", page); response.put("page", page);
response.put("size", size); response.put("size", size);
return ResponseEntity.ok(response); return ResponseEntity.ok(response);
} catch (Exception e) { } catch (Exception e) {
log.error("학습 데이터 목록 조회 실패", e); log.error("학습 데이터 목록 조회 실패", e);
return ResponseEntity.internalServerError() return ResponseEntity.internalServerError()
.body(Map.of("error", "데이터 조회 실패: " + e.getMessage())); .body(Map.of("error", "데이터 조회 실패: " + e.getMessage()));
}
} }
}
/** /** 특정 학습 데이터 상세 조회 */
* 특정 학습 데이터 상세 조회 @GetMapping("/learn-data/{id}")
*/ public ResponseEntity<Map<String, Object>> getLearnDataDetail(@PathVariable Long id) {
@GetMapping("/learn-data/{id}") try {
public ResponseEntity<Map<String, Object>> getLearnDataDetail(@PathVariable Long id) { if (id == null) {
try { return ResponseEntity.badRequest().body(Map.of("error", "ID가 필요합니다."));
if (id == null) { }
return ResponseEntity.badRequest()
.body(Map.of("error", "ID가 필요합니다."));
}
Optional<MapSheetLearnDataEntity> learnDataOpt = mapSheetLearnDataRepository.findById(id); Optional<MapSheetLearnDataEntity> learnDataOpt = mapSheetLearnDataRepository.findById(id);
if (learnDataOpt.isEmpty()) { if (learnDataOpt.isEmpty()) {
return ResponseEntity.notFound().build(); return ResponseEntity.notFound().build();
} }
MapSheetLearnDataEntity learnData = learnDataOpt.get(); MapSheetLearnDataEntity learnData = learnDataOpt.get();
List<MapSheetLearnDataGeomEntity> geometryList = mapSheetLearnDataGeomRepository.findByDataUid(id); List<MapSheetLearnDataGeomEntity> geometryList =
mapSheetLearnDataGeomRepository.findByDataUid(id);
Map<String, Object> response = new HashMap<>(); Map<String, Object> response = new HashMap<>();
response.put("learnData", learnData); response.put("learnData", learnData);
response.put("geometryData", geometryList); response.put("geometryData", geometryList);
response.put("geometryCount", geometryList.size()); response.put("geometryCount", geometryList.size());
return ResponseEntity.ok(response); return ResponseEntity.ok(response);
} catch (Exception e) { } catch (Exception e) {
log.error("학습 데이터 상세 조회 실패: {}", id, e); log.error("학습 데이터 상세 조회 실패: {}", id, e);
return ResponseEntity.internalServerError() return ResponseEntity.internalServerError()
.body(Map.of("error", "데이터 조회 실패: " + e.getMessage())); .body(Map.of("error", "데이터 조회 실패: " + e.getMessage()));
}
} }
}
/** /** Geometry 데이터 목록 조회 */
* Geometry 데이터 목록 조회 @GetMapping("/geometry")
*/ public ResponseEntity<Map<String, Object>> getGeometryDataList(
@GetMapping("/geometry") @RequestParam(defaultValue = "0") int page,
public ResponseEntity<Map<String, Object>> getGeometryDataList( @RequestParam(defaultValue = "10") int size,
@RequestParam(defaultValue = "0") int page, @RequestParam(required = false) Long dataUid,
@RequestParam(defaultValue = "10") int size, @RequestParam(required = false) String geoType) {
@RequestParam(required = false) Long dataUid, try {
@RequestParam(required = false) String geoType) { List<MapSheetLearnDataGeomEntity> geometryList;
try {
List<MapSheetLearnDataGeomEntity> geometryList;
if (dataUid != null) { if (dataUid != null) {
geometryList = mapSheetLearnDataGeomRepository.findByDataUid(dataUid); geometryList = mapSheetLearnDataGeomRepository.findByDataUid(dataUid);
} else if (geoType != null) { } else if (geoType != null) {
geometryList = mapSheetLearnDataGeomRepository.findByGeoType(geoType); geometryList = mapSheetLearnDataGeomRepository.findByGeoType(geoType);
} else { } else {
PageRequest pageRequest = PageRequest.of(page, size); PageRequest pageRequest = PageRequest.of(page, size);
geometryList = mapSheetLearnDataGeomRepository.findAll(pageRequest).getContent(); geometryList = mapSheetLearnDataGeomRepository.findAll(pageRequest).getContent();
} }
Map<String, Object> response = new HashMap<>(); Map<String, Object> response = new HashMap<>();
response.put("data", geometryList); response.put("data", geometryList);
response.put("totalCount", geometryList.size()); response.put("totalCount", geometryList.size());
response.put("page", page); response.put("page", page);
response.put("size", size); response.put("size", size);
return ResponseEntity.ok(response); return ResponseEntity.ok(response);
} catch (Exception e) { } catch (Exception e) {
log.error("Geometry 데이터 목록 조회 실패", e); log.error("Geometry 데이터 목록 조회 실패", e);
return ResponseEntity.internalServerError() return ResponseEntity.internalServerError()
.body(Map.of("error", "데이터 조회 실패: " + e.getMessage())); .body(Map.of("error", "데이터 조회 실패: " + e.getMessage()));
}
} }
}
/** /** 시스템 통계 정보 조회 */
* 시스템 통계 정보 조회 @GetMapping("/statistics")
*/ public ResponseEntity<Map<String, Object>> getStatistics() {
@GetMapping("/statistics") try {
public ResponseEntity<Map<String, Object>> getStatistics() { long totalLearnData = mapSheetLearnDataRepository.count();
try { long totalGeometryData = mapSheetLearnDataGeomRepository.count();
long totalLearnData = mapSheetLearnDataRepository.count();
long totalGeometryData = mapSheetLearnDataGeomRepository.count();
List<MapSheetLearnDataEntity> processedData = mapSheetLearnDataRepository.findByDataState("PROCESSED"); List<MapSheetLearnDataEntity> processedData =
List<MapSheetLearnDataEntity> pendingAnalysis = mapSheetLearnDataRepository.findByAnalState("PENDING"); mapSheetLearnDataRepository.findByDataState("PROCESSED");
List<MapSheetLearnDataEntity> completedAnalysis = mapSheetLearnDataRepository.findByAnalState("COMPLETED"); List<MapSheetLearnDataEntity> pendingAnalysis =
List<MapSheetLearnDataEntity> errorAnalysis = mapSheetLearnDataRepository.findByAnalState("ERROR"); mapSheetLearnDataRepository.findByAnalState("PENDING");
List<MapSheetLearnDataEntity> completedAnalysis =
mapSheetLearnDataRepository.findByAnalState("COMPLETED");
List<MapSheetLearnDataEntity> errorAnalysis =
mapSheetLearnDataRepository.findByAnalState("ERROR");
Map<String, Object> statistics = new HashMap<>(); Map<String, Object> statistics = new HashMap<>();
statistics.put("totalLearnData", totalLearnData); statistics.put("totalLearnData", totalLearnData);
statistics.put("totalGeometryData", totalGeometryData); statistics.put("totalGeometryData", totalGeometryData);
statistics.put("processedDataCount", processedData.size()); statistics.put("processedDataCount", processedData.size());
statistics.put("pendingAnalysisCount", pendingAnalysis.size()); statistics.put("pendingAnalysisCount", pendingAnalysis.size());
statistics.put("completedAnalysisCount", completedAnalysis.size()); statistics.put("completedAnalysisCount", completedAnalysis.size());
statistics.put("errorAnalysisCount", errorAnalysis.size()); statistics.put("errorAnalysisCount", errorAnalysis.size());
// 처리 완료율 계산 // 처리 완료율 계산
if (totalLearnData > 0) { if (totalLearnData > 0) {
double completionRate = (double) completedAnalysis.size() / totalLearnData * 100; double completionRate = (double) completedAnalysis.size() / totalLearnData * 100;
statistics.put("completionRate", Math.round(completionRate * 100.0) / 100.0); statistics.put("completionRate", Math.round(completionRate * 100.0) / 100.0);
} else { } else {
statistics.put("completionRate", 0.0); statistics.put("completionRate", 0.0);
} }
return ResponseEntity.ok(Map.of( return ResponseEntity.ok(
"statistics", statistics, Map.of("statistics", statistics, "timestamp", java.time.Instant.now()));
"timestamp", java.time.Instant.now() } catch (Exception e) {
)); log.error("통계 정보 조회 실패", e);
} catch (Exception e) { return ResponseEntity.internalServerError()
log.error("통계 정보 조회 실패", e); .body(Map.of("error", "통계 조회 실패: " + e.getMessage()));
return ResponseEntity.internalServerError()
.body(Map.of("error", "통계 조회 실패: " + e.getMessage()));
}
} }
}
/** /** 데이터 상태별 카운트 조회 */
* 데이터 상태별 카운트 조회 @GetMapping("/status-counts")
*/ public ResponseEntity<Map<String, Object>> getStatusCounts() {
@GetMapping("/status-counts") try {
public ResponseEntity<Map<String, Object>> getStatusCounts() { Map<String, Long> dataStateCounts = new HashMap<>();
try { Map<String, Long> analStateCounts = new HashMap<>();
Map<String, Long> dataStateCounts = new HashMap<>();
Map<String, Long> analStateCounts = new HashMap<>();
// 데이터 상태별 카운트 // 데이터 상태별 카운트
dataStateCounts.put("PROCESSED", mapSheetLearnDataRepository.findByDataState("PROCESSED").size() + 0L); dataStateCounts.put(
dataStateCounts.put("PENDING", mapSheetLearnDataRepository.findByDataStateIsNullOrDataState("PENDING").size() + 0L); "PROCESSED", mapSheetLearnDataRepository.findByDataState("PROCESSED").size() + 0L);
dataStateCounts.put(
"PENDING",
mapSheetLearnDataRepository.findByDataStateIsNullOrDataState("PENDING").size() + 0L);
// 분석 상태별 카운트 // 분석 상태별 카운트
analStateCounts.put("PENDING", mapSheetLearnDataRepository.findByAnalState("PENDING").size() + 0L); analStateCounts.put(
analStateCounts.put("COMPLETED", mapSheetLearnDataRepository.findByAnalState("COMPLETED").size() + 0L); "PENDING", mapSheetLearnDataRepository.findByAnalState("PENDING").size() + 0L);
analStateCounts.put("ERROR", mapSheetLearnDataRepository.findByAnalState("ERROR").size() + 0L); analStateCounts.put(
"COMPLETED", mapSheetLearnDataRepository.findByAnalState("COMPLETED").size() + 0L);
analStateCounts.put(
"ERROR", mapSheetLearnDataRepository.findByAnalState("ERROR").size() + 0L);
return ResponseEntity.ok(Map.of( return ResponseEntity.ok(
"dataStateCounts", dataStateCounts, Map.of(
"analStateCounts", analStateCounts, "dataStateCounts", dataStateCounts,
"timestamp", java.time.Instant.now() "analStateCounts", analStateCounts,
)); "timestamp", java.time.Instant.now()));
} catch (Exception e) { } catch (Exception e) {
log.error("상태별 카운트 조회 실패", e); log.error("상태별 카운트 조회 실패", e);
return ResponseEntity.internalServerError() return ResponseEntity.internalServerError()
.body(Map.of("error", "카운트 조회 실패: " + e.getMessage())); .body(Map.of("error", "카운트 조회 실패: " + e.getMessage()));
}
} }
}
} }

View File

@@ -2,153 +2,133 @@ package com.kamco.cd.kamcoback.geojson.controller;
import com.kamco.cd.kamcoback.geojson.service.GeoJsonFileMonitorService; import com.kamco.cd.kamcoback.geojson.service.GeoJsonFileMonitorService;
import com.kamco.cd.kamcoback.geojson.service.GeometryConversionService; import com.kamco.cd.kamcoback.geojson.service.GeometryConversionService;
import java.util.List;
import java.util.Map;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
import java.util.List; /** GeoJSON 파일 모니터링 및 처리 API 컨트롤러 */
import java.util.Map;
/**
* GeoJSON 파일 모니터링 및 처리 API 컨트롤러
*/
@Slf4j @Slf4j
@RestController @RestController
@RequestMapping("/api/geojson") @RequestMapping("/api/geojson")
@RequiredArgsConstructor @RequiredArgsConstructor
public class GeoJsonMonitorController { public class GeoJsonMonitorController {
private final GeoJsonFileMonitorService monitorService; private final GeoJsonFileMonitorService monitorService;
private final GeometryConversionService geometryConversionService; private final GeometryConversionService geometryConversionService;
/** /** 모니터링 상태 조회 */
* 모니터링 상태 조회 @GetMapping("/monitor/status")
*/ public Map<String, Object> getMonitorStatus() {
@GetMapping("/monitor/status") return monitorService.getMonitorStatus();
public Map<String, Object> getMonitorStatus() { }
return monitorService.getMonitorStatus();
/** 시스템 통계 정보 조회 */
@GetMapping("/monitor/stats")
public ResponseEntity<Map<String, Object>> getSystemStats() {
try {
Map<String, Object> stats = monitorService.getSystemStats();
return ResponseEntity.ok(stats);
} catch (Exception e) {
log.error("시스템 통계 조회 실패", e);
return ResponseEntity.internalServerError()
.body(Map.of("error", "시스템 통계 조회 실패: " + e.getMessage(), "status", "error"));
} }
}
/** /** 디렉토리 초기화 (수동 실행) */
* 시스템 통계 정보 조회 @PostMapping("/monitor/init-directories")
*/ public ResponseEntity<Map<String, Object>> initializeDirectories() {
@GetMapping("/monitor/stats") try {
public ResponseEntity<Map<String, Object>> getSystemStats() { log.info("디렉토리 초기화 수동 실행 요청");
try { monitorService.initializeDirectoriesManually();
Map<String, Object> stats = monitorService.getSystemStats();
return ResponseEntity.ok(stats); return ResponseEntity.ok(
} catch (Exception e) { Map.of(
log.error("시스템 통계 조회 실패", e); "message", "디렉토리 초기화가 완료되었습니다.",
return ResponseEntity.internalServerError() "status", "success"));
.body(Map.of( } catch (Exception e) {
"error", "시스템 통계 조회 실패: " + e.getMessage(), log.error("디렉토리 초기화 실패", e);
"status", "error" return ResponseEntity.internalServerError()
)); .body(Map.of("error", "디렉토리 초기화 실패: " + e.getMessage(), "status", "error"));
}
} }
}
/** /** 수동으로 특정 파일 처리 */
* 디렉토리 초기화 (수동 실행) @PostMapping("/process/file")
*/ public ResponseEntity<Map<String, Object>> processFileManually(@RequestParam String filePath) {
@PostMapping("/monitor/init-directories") try {
public ResponseEntity<Map<String, Object>> initializeDirectories() { log.info("수동 파일 처리 요청: {}", filePath);
try { monitorService.processFileManually(filePath);
log.info("디렉토리 초기화 수동 실행 요청");
monitorService.initializeDirectoriesManually();
return ResponseEntity.ok(Map.of( return ResponseEntity.ok(
"message", "디렉토리 초기화가 완료되었습니다.", Map.of(
"status", "success" "message", "파일 처리가 완료되었습니다.",
)); "filePath", filePath,
} catch (Exception e) { "status", "success"));
log.error("디렉토리 초기화 실패", e); } catch (Exception e) {
return ResponseEntity.internalServerError() log.error("수동 파일 처리 실패: {}", filePath, e);
.body(Map.of( return ResponseEntity.internalServerError()
"error", "디렉토리 초기화 실패: " + e.getMessage(), .body(
"status", "error" Map.of(
)); "error", "파일 처리 실패: " + e.getMessage(), "filePath", filePath, "status", "error"));
}
} }
}
/** /** 미처리된 Geometry 데이터 수동 변환 */
* 수동으로 특정 파일 처리 @PostMapping("/process/geometry")
*/ public ResponseEntity<Map<String, Object>> processUnprocessedGeometry() {
@PostMapping("/process/file") try {
public ResponseEntity<Map<String, Object>> processFileManually(@RequestParam String filePath) { log.info("미처리 Geometry 변환 수동 실행 요청");
try { List<Long> processedIds = geometryConversionService.processUnprocessedLearnData();
log.info("수동 파일 처리 요청: {}", filePath);
monitorService.processFileManually(filePath);
return ResponseEntity.ok(Map.of( return ResponseEntity.ok(
"message", "파일 처리가 완료되었습니다.", Map.of(
"filePath", filePath, "message",
"status", "success" "Geometry 변환이 완료되었습니다.",
)); "processedCount",
} catch (Exception e) { processedIds.size(),
log.error("수동 파일 처리 실패: {}", filePath, e); "processedIds",
return ResponseEntity.internalServerError() processedIds,
.body(Map.of( "status",
"error", "파일 처리 실패: " + e.getMessage(), "success"));
"filePath", filePath, } catch (Exception e) {
"status", "error" log.error("Geometry 변환 실패", e);
)); return ResponseEntity.internalServerError()
} .body(Map.of("error", "Geometry 변환 실패: " + e.getMessage(), "status", "error"));
} }
}
/** /** 특정 학습 데이터의 Geometry 변환 */
* 미처리된 Geometry 데이터 수동 변환 @PostMapping("/process/geometry/convert")
*/ public ResponseEntity<Map<String, Object>> convertSpecificGeometry(
@PostMapping("/process/geometry") @RequestBody List<Long> learnDataIds) {
public ResponseEntity<Map<String, Object>> processUnprocessedGeometry() { try {
try { if (learnDataIds == null || learnDataIds.isEmpty()) {
log.info("미처리 Geometry 변환 수동 실행 요청"); return ResponseEntity.badRequest().body(Map.of("error", "변환할 학습 데이터 ID가 없습니다."));
List<Long> processedIds = geometryConversionService.processUnprocessedLearnData(); }
return ResponseEntity.ok(Map.of( log.info("특정 학습 데이터 Geometry 변환 요청: {}", learnDataIds);
"message", "Geometry 변환이 완료되었습니다.", List<Long> geometryIds = geometryConversionService.convertToGeometryData(learnDataIds);
"processedCount", processedIds.size(),
"processedIds", processedIds, return ResponseEntity.ok(
"status", "success" Map.of(
)); "message",
} catch (Exception e) { "Geometry 변환이 완료되었습니다.",
log.error("Geometry 변환 실패", e); "inputCount",
return ResponseEntity.internalServerError() learnDataIds.size(),
.body(Map.of( "outputCount",
"error", "Geometry 변환 실패: " + e.getMessage(), geometryIds.size(),
"status", "error" "geometryIds",
)); geometryIds,
} "status",
} "success"));
} catch (Exception e) {
/** log.error("특정 Geometry 변환 실패: {}", learnDataIds, e);
* 특정 학습 데이터의 Geometry 변환 return ResponseEntity.internalServerError()
*/ .body(Map.of("error", "Geometry 변환 실패: " + e.getMessage(), "status", "error"));
@PostMapping("/process/geometry/convert")
public ResponseEntity<Map<String, Object>> convertSpecificGeometry(@RequestBody List<Long> learnDataIds) {
try {
if (learnDataIds == null || learnDataIds.isEmpty()) {
return ResponseEntity.badRequest()
.body(Map.of("error", "변환할 학습 데이터 ID가 없습니다."));
}
log.info("특정 학습 데이터 Geometry 변환 요청: {}", learnDataIds);
List<Long> geometryIds = geometryConversionService.convertToGeometryData(learnDataIds);
return ResponseEntity.ok(Map.of(
"message", "Geometry 변환이 완료되었습니다.",
"inputCount", learnDataIds.size(),
"outputCount", geometryIds.size(),
"geometryIds", geometryIds,
"status", "success"
));
} catch (Exception e) {
log.error("특정 Geometry 변환 실패: {}", learnDataIds, e);
return ResponseEntity.internalServerError()
.body(Map.of(
"error", "Geometry 변환 실패: " + e.getMessage(),
"status", "error"
));
}
} }
}
} }

View File

@@ -1,6 +1,9 @@
package com.kamco.cd.kamcoback.geojson.service; package com.kamco.cd.kamcoback.geojson.service;
import com.kamco.cd.kamcoback.geojson.config.GeoJsonMonitorConfig; import com.kamco.cd.kamcoback.geojson.config.GeoJsonMonitorConfig;
import java.io.*;
import java.nio.file.*;
import java.util.*;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveEntry;
@@ -10,158 +13,143 @@ import org.apache.commons.compress.archivers.zip.ZipFile;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.io.*; /** 압축파일 처리 서비스 */
import java.nio.file.*;
import java.util.*;
import java.util.stream.Stream;
import java.util.zip.ZipInputStream;
/**
* 압축파일 처리 서비스
*/
@Slf4j @Slf4j
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
public class ArchiveExtractorService { public class ArchiveExtractorService {
private final GeoJsonMonitorConfig config; private final GeoJsonMonitorConfig config;
/** /** 압축파일에서 GeoJSON 파일들을 추출 */
* 압축파일에서 GeoJSON 파일들을 추출 public Map<String, String> extractGeoJsonFiles(Path archiveFile) throws IOException {
*/ Map<String, String> geoJsonContents = new HashMap<>();
public Map<String, String> extractGeoJsonFiles(Path archiveFile) throws IOException { String fileName = archiveFile.getFileName().toString().toLowerCase();
Map<String, String> geoJsonContents = new HashMap<>();
String fileName = archiveFile.getFileName().toString().toLowerCase();
log.info("압축파일 추출 시작: {}", archiveFile); log.info("압축파일 추출 시작: {}", archiveFile);
try { try {
if (fileName.endsWith(".zip")) { if (fileName.endsWith(".zip")) {
extractFromZip(archiveFile, geoJsonContents); extractFromZip(archiveFile, geoJsonContents);
} else if (fileName.endsWith(".tar") || fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz")) { } else if (fileName.endsWith(".tar")
extractFromTar(archiveFile, geoJsonContents); || fileName.endsWith(".tar.gz")
} else { || fileName.endsWith(".tgz")) {
throw new IllegalArgumentException("지원하지 않는 압축파일 형식: " + fileName); extractFromTar(archiveFile, geoJsonContents);
} } else {
} catch (Exception e) { throw new IllegalArgumentException("지원하지 않는 압축파일 형식: " + fileName);
log.error("압축파일 추출 실패: {}", archiveFile, e); }
throw e; } catch (Exception e) {
log.error("압축파일 추출 실패: {}", archiveFile, e);
throw e;
}
log.info("압축파일에서 {}개의 GeoJSON 파일을 추출했습니다: {}", geoJsonContents.size(), archiveFile);
return geoJsonContents;
}
/** ZIP 파일에서 GeoJSON 추출 */
private void extractFromZip(Path zipFile, Map<String, String> geoJsonContents)
throws IOException {
try (ZipFile zip = new ZipFile(zipFile.toFile())) {
Enumeration<ZipArchiveEntry> entries = zip.getEntries();
while (entries.hasMoreElements()) {
ZipArchiveEntry entry = entries.nextElement();
if (!entry.isDirectory() && isGeoJsonFile(entry.getName())) {
try (InputStream inputStream = zip.getInputStream(entry)) {
String content = readInputStream(inputStream);
geoJsonContents.put(entry.getName(), content);
log.debug("ZIP에서 추출: {}", entry.getName());
}
} }
}
log.info("압축파일에서 {}개의 GeoJSON 파일을 추출했습니다: {}", geoJsonContents.size(), archiveFile);
return geoJsonContents;
} }
}
/** /** TAR 파일에서 GeoJSON 추출 */
* ZIP 파일에서 GeoJSON 추출 private void extractFromTar(Path tarFile, Map<String, String> geoJsonContents)
*/ throws IOException {
private void extractFromZip(Path zipFile, Map<String, String> geoJsonContents) throws IOException { String fileName = tarFile.getFileName().toString().toLowerCase();
try (ZipFile zip = new ZipFile(zipFile.toFile())) { InputStream fileInputStream = Files.newInputStream(tarFile);
Enumeration<ZipArchiveEntry> entries = zip.getEntries();
while (entries.hasMoreElements()) { try {
ZipArchiveEntry entry = entries.nextElement(); // GZIP 압축된 TAR 파일인지 확인
if (fileName.endsWith(".gz") || fileName.endsWith(".tgz")) {
fileInputStream = new GzipCompressorInputStream(fileInputStream);
}
if (!entry.isDirectory() && isGeoJsonFile(entry.getName())) { try (TarArchiveInputStream tarInputStream = new TarArchiveInputStream(fileInputStream)) {
try (InputStream inputStream = zip.getInputStream(entry)) { ArchiveEntry entry;
String content = readInputStream(inputStream);
geoJsonContents.put(entry.getName(), content); while ((entry = tarInputStream.getNextEntry()) != null) {
log.debug("ZIP에서 추출: {}", entry.getName()); if (!entry.isDirectory() && isGeoJsonFile(entry.getName())) {
} String content = readInputStream(tarInputStream);
} geoJsonContents.put(entry.getName(), content);
} log.debug("TAR에서 추출: {}", entry.getName());
}
} }
}
} finally {
try {
fileInputStream.close();
} catch (IOException e) {
log.warn("파일 스트림 종료 실패", e);
}
}
}
/** InputStream에서 문자열 읽기 */
private String readInputStream(InputStream inputStream) throws IOException {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"))) {
StringBuilder content = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
content.append(line).append("\n");
}
return content.toString();
}
}
/** 파일이 GeoJSON 파일인지 확인 */
private boolean isGeoJsonFile(String fileName) {
String lowerFileName = fileName.toLowerCase();
return lowerFileName.endsWith(".geojson") || lowerFileName.endsWith(".json");
}
/** 지원하는 압축파일인지 확인 */
public boolean isSupportedArchive(Path file) {
String fileName = file.getFileName().toString().toLowerCase();
for (String extension : config.getSupportedExtensions()) {
if (fileName.endsWith("." + extension)) {
return true;
}
} }
/** return false;
* TAR 파일에서 GeoJSON 추출 }
*/
private void extractFromTar(Path tarFile, Map<String, String> geoJsonContents) throws IOException {
String fileName = tarFile.getFileName().toString().toLowerCase();
InputStream fileInputStream = Files.newInputStream(tarFile);
try { /** 파일 크기가 제한 범위 내인지 확인 */
// GZIP 압축된 TAR 파일인지 확인 public boolean isFileSizeValid(Path file) {
if (fileName.endsWith(".gz") || fileName.endsWith(".tgz")) { try {
fileInputStream = new GzipCompressorInputStream(fileInputStream); long fileSize = Files.size(file);
} boolean isValid = fileSize <= config.getMaxFileSize();
try (TarArchiveInputStream tarInputStream = new TarArchiveInputStream(fileInputStream)) { if (!isValid) {
ArchiveEntry entry; log.warn(
"파일 크기가 제한을 초과했습니다: {} ({}MB > {}MB)",
file,
fileSize / 1024 / 1024,
config.getMaxFileSize() / 1024 / 1024);
}
while ((entry = tarInputStream.getNextEntry()) != null) { return isValid;
if (!entry.isDirectory() && isGeoJsonFile(entry.getName())) { } catch (IOException e) {
String content = readInputStream(tarInputStream); log.error("파일 크기 확인 실패: {}", file, e);
geoJsonContents.put(entry.getName(), content); return false;
log.debug("TAR에서 추출: {}", entry.getName());
}
}
}
} finally {
try {
fileInputStream.close();
} catch (IOException e) {
log.warn("파일 스트림 종료 실패", e);
}
}
}
/**
* InputStream에서 문자열 읽기
*/
private String readInputStream(InputStream inputStream) throws IOException {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"))) {
StringBuilder content = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
content.append(line).append("\n");
}
return content.toString();
}
}
/**
* 파일이 GeoJSON 파일인지 확인
*/
private boolean isGeoJsonFile(String fileName) {
String lowerFileName = fileName.toLowerCase();
return lowerFileName.endsWith(".geojson") || lowerFileName.endsWith(".json");
}
/**
* 지원하는 압축파일인지 확인
*/
public boolean isSupportedArchive(Path file) {
String fileName = file.getFileName().toString().toLowerCase();
for (String extension : config.getSupportedExtensions()) {
if (fileName.endsWith("." + extension)) {
return true;
}
}
return false;
}
/**
* 파일 크기가 제한 범위 내인지 확인
*/
public boolean isFileSizeValid(Path file) {
try {
long fileSize = Files.size(file);
boolean isValid = fileSize <= config.getMaxFileSize();
if (!isValid) {
log.warn("파일 크기가 제한을 초과했습니다: {} ({}MB > {}MB)",
file, fileSize / 1024 / 1024, config.getMaxFileSize() / 1024 / 1024);
}
return isValid;
} catch (IOException e) {
log.error("파일 크기 확인 실패: {}", file, e);
return false;
}
} }
}
} }

View File

@@ -5,241 +5,225 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository; import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.*;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.time.Instant; /** GeoJSON 데이터 처리 서비스 */
import java.util.*;
/**
* GeoJSON 데이터 처리 서비스
*/
@Slf4j @Slf4j
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
public class GeoJsonDataService { public class GeoJsonDataService {
private final MapSheetLearnDataRepository mapSheetLearnDataRepository; private final MapSheetLearnDataRepository mapSheetLearnDataRepository;
private final ObjectMapper objectMapper; private final ObjectMapper objectMapper;
/** /** GeoJSON 파일들을 데이터베이스에 저장 */
* GeoJSON 파일들을 데이터베이스에 저장 @Transactional
*/ public List<Long> processGeoJsonFiles(
@Transactional Map<String, String> geoJsonContents, String archiveFileName) {
public List<Long> processGeoJsonFiles(Map<String, String> geoJsonContents, String archiveFileName) { List<Long> savedIds = new ArrayList<>();
List<Long> savedIds = new ArrayList<>();
log.info("GeoJSON 파일 처리 시작: {} ({}개 파일)", archiveFileName, geoJsonContents.size()); log.info("GeoJSON 파일 처리 시작: {} ({}개 파일)", archiveFileName, geoJsonContents.size());
for (Map.Entry<String, String> entry : geoJsonContents.entrySet()) { for (Map.Entry<String, String> entry : geoJsonContents.entrySet()) {
String fileName = entry.getKey(); String fileName = entry.getKey();
String geoJsonContent = entry.getValue(); String geoJsonContent = entry.getValue();
try { try {
Long savedId = processGeoJsonFile(fileName, geoJsonContent, archiveFileName); Long savedId = processGeoJsonFile(fileName, geoJsonContent, archiveFileName);
if (savedId != null) { if (savedId != null) {
savedIds.add(savedId); savedIds.add(savedId);
log.debug("GeoJSON 파일 저장 성공: {} (ID: {})", fileName, savedId); log.debug("GeoJSON 파일 저장 성공: {} (ID: {})", fileName, savedId);
}
} catch (Exception e) {
log.error("GeoJSON 파일 처리 실패: {}", fileName, e);
// 개별 파일 처리 실패는 전체 처리를 중단시키지 않음
}
} }
} catch (Exception e) {
log.info("GeoJSON 파일 처리 완료: {} (성공: {}개, 전체: {}개)", log.error("GeoJSON 파일 처리 실패: {}", fileName, e);
archiveFileName, savedIds.size(), geoJsonContents.size()); // 개별 파일 처리 실패는 전체 처리를 중단시키지 않음
}
return savedIds;
} }
/** log.info(
* 개별 GeoJSON 파일을 MapSheetLearnDataEntity로 변환하여 저장 "GeoJSON 파일 처리 완료: {} (성공: {}개, 전체: {}개)",
*/ archiveFileName,
private Long processGeoJsonFile(String fileName, String geoJsonContent, String archiveFileName) { savedIds.size(),
geoJsonContents.size());
return savedIds;
}
/** 개별 GeoJSON 파일을 MapSheetLearnDataEntity로 변환하여 저장 */
private Long processGeoJsonFile(String fileName, String geoJsonContent, String archiveFileName) {
try {
// GeoJSON 파싱 및 검증
JsonNode geoJsonNode = objectMapper.readTree(geoJsonContent);
validateGeoJsonStructure(geoJsonNode);
// 파일이 이미 처리되었는지 확인
String dataPath = generateDataPath(archiveFileName, fileName);
Optional<MapSheetLearnDataEntity> existingData =
mapSheetLearnDataRepository.findByDataPath(dataPath);
if (existingData.isPresent()) {
log.warn("이미 처리된 파일입니다: {}", dataPath);
return existingData.get().getId();
}
// 새 엔티티 생성 및 저장
MapSheetLearnDataEntity entity =
createMapSheetLearnDataEntity(fileName, geoJsonContent, archiveFileName, geoJsonNode);
MapSheetLearnDataEntity savedEntity = mapSheetLearnDataRepository.save(entity);
return savedEntity.getId();
} catch (Exception e) {
log.error("GeoJSON 파일 처리 중 오류 발생: {}", fileName, e);
throw new RuntimeException("GeoJSON 파일 처리 실패: " + fileName, e);
}
}
/** GeoJSON 구조 검증 */
private void validateGeoJsonStructure(JsonNode geoJsonNode) {
if (!geoJsonNode.has("type")) {
throw new IllegalArgumentException("유효하지 않은 GeoJSON: 'type' 필드가 없습니다.");
}
String type = geoJsonNode.get("type").asText();
if (!"FeatureCollection".equals(type) && !"Feature".equals(type) && !"Geometry".equals(type)) {
throw new IllegalArgumentException("지원하지 않는 GeoJSON type: " + type);
}
}
/** MapSheetLearnDataEntity 생성 */
private MapSheetLearnDataEntity createMapSheetLearnDataEntity(
String fileName, String geoJsonContent, String archiveFileName, JsonNode geoJsonNode) {
MapSheetLearnDataEntity entity = new MapSheetLearnDataEntity();
// 기본 정보 설정
entity.setDataName(fileName);
entity.setDataPath(generateDataPath(archiveFileName, fileName));
entity.setDataType("GeoJSON");
entity.setDataTitle(extractTitle(fileName, geoJsonNode));
// CRS 정보 추출 및 설정
setCrsInformation(entity, geoJsonNode);
// JSON 데이터 저장
try {
@SuppressWarnings("unchecked")
Map<String, Object> jsonMap = objectMapper.readValue(geoJsonContent, Map.class);
entity.setDataJson(jsonMap);
} catch (Exception e) {
log.warn("JSON 파싱 실패, 원본 텍스트로 저장: {}", fileName, e);
// JSON 파싱이 실패하면 원본을 Map 형태로 저장
Map<String, Object> fallbackMap = new HashMap<>();
fallbackMap.put("raw_content", geoJsonContent);
fallbackMap.put("parse_error", e.getMessage());
entity.setDataJson(fallbackMap);
}
// 연도 정보 추출 (파일명에서 추출 시도)
setYearInformation(entity, fileName);
// 상태 정보 설정
entity.setDataState("PROCESSED");
entity.setAnalState("PENDING");
// 시간 정보 설정
ZonedDateTime now = ZonedDateTime.now();
entity.setCreatedDttm(now);
entity.setUpdatedDttm(now);
entity.setDataStateDttm(now);
return entity;
}
/** CRS 정보 설정 */
private void setCrsInformation(MapSheetLearnDataEntity entity, JsonNode geoJsonNode) {
if (geoJsonNode.has("crs")) {
JsonNode crsNode = geoJsonNode.get("crs");
if (crsNode.has("type") && crsNode.has("properties")) {
String crsType = crsNode.get("type").asText();
entity.setDataCrsType(crsType);
JsonNode propertiesNode = crsNode.get("properties");
if (propertiesNode.has("name")) {
String crsName = propertiesNode.get("name").asText();
entity.setDataCrsTypeName(crsName);
}
}
} else {
// CRS가 명시되지 않은 경우 기본값 설정 (WGS84)
entity.setDataCrsType("EPSG");
entity.setDataCrsTypeName("EPSG:4326");
}
}
/** 연도 정보 추출 */
private void setYearInformation(MapSheetLearnDataEntity entity, String fileName) {
// 파일명에서 연도 추출 시도 (예: kamco_2021_2022_35813023.geojson)
String[] parts = fileName.split("_");
for (String part : parts) {
if (part.matches("\\d{4}")) { // 4자리 숫자 (연도)
try { try {
// GeoJSON 파싱 및 검증 Integer year = Integer.parseInt(part);
JsonNode geoJsonNode = objectMapper.readTree(geoJsonContent); if (year >= 1900 && year <= 2100) {
validateGeoJsonStructure(geoJsonNode); if (entity.getDataYyyy() == null) {
entity.setDataYyyy(year);
// 파일이 이미 처리되었는지 확인 } else {
String dataPath = generateDataPath(archiveFileName, fileName); entity.setCompareYyyy(year);
Optional<MapSheetLearnDataEntity> existingData = mapSheetLearnDataRepository.findByDataPath(dataPath); break;
if (existingData.isPresent()) {
log.warn("이미 처리된 파일입니다: {}", dataPath);
return existingData.get().getId();
} }
}
// 새 엔티티 생성 및 저장 } catch (NumberFormatException ignored) {
MapSheetLearnDataEntity entity = createMapSheetLearnDataEntity(fileName, geoJsonContent, archiveFileName, geoJsonNode); // 무시
MapSheetLearnDataEntity savedEntity = mapSheetLearnDataRepository.save(entity);
return savedEntity.getId();
} catch (Exception e) {
log.error("GeoJSON 파일 처리 중 오류 발생: {}", fileName, e);
throw new RuntimeException("GeoJSON 파일 처리 실패: " + fileName, e);
} }
}
}
}
/** 제목 추출 */
private String extractTitle(String fileName, JsonNode geoJsonNode) {
// GeoJSON 메타데이터에서 제목 추출 시도
if (geoJsonNode.has("properties")) {
JsonNode properties = geoJsonNode.get("properties");
if (properties.has("title")) {
return properties.get("title").asText();
}
if (properties.has("name")) {
return properties.get("name").asText();
}
} }
/** // 파일명에서 확장자 제거하여 제목으로 사용
* GeoJSON 구조 검증 int lastDotIndex = fileName.lastIndexOf('.');
*/ if (lastDotIndex > 0) {
private void validateGeoJsonStructure(JsonNode geoJsonNode) { return fileName.substring(0, lastDotIndex);
if (!geoJsonNode.has("type")) {
throw new IllegalArgumentException("유효하지 않은 GeoJSON: 'type' 필드가 없습니다.");
}
String type = geoJsonNode.get("type").asText();
if (!"FeatureCollection".equals(type) && !"Feature".equals(type) && !"Geometry".equals(type)) {
throw new IllegalArgumentException("지원하지 않는 GeoJSON type: " + type);
}
} }
/** return fileName;
* MapSheetLearnDataEntity 생성 }
*/
private MapSheetLearnDataEntity createMapSheetLearnDataEntity(
String fileName, String geoJsonContent, String archiveFileName, JsonNode geoJsonNode) {
MapSheetLearnDataEntity entity = new MapSheetLearnDataEntity(); /** 데이터 경로 생성 */
private String generateDataPath(String archiveFileName, String fileName) {
return archiveFileName + "/" + fileName;
}
// 기본 정보 설정 /** 처리 가능한 파일 개수 확인 */
entity.setDataName(fileName); public boolean isProcessable(Map<String, String> geoJsonContents) {
entity.setDataPath(generateDataPath(archiveFileName, fileName)); if (geoJsonContents == null || geoJsonContents.isEmpty()) {
entity.setDataType("GeoJSON"); return false;
entity.setDataTitle(extractTitle(fileName, geoJsonNode));
// CRS 정보 추출 및 설정
setCrsInformation(entity, geoJsonNode);
// JSON 데이터 저장
try {
@SuppressWarnings("unchecked")
Map<String, Object> jsonMap = objectMapper.readValue(geoJsonContent, Map.class);
entity.setDataJson(jsonMap);
} catch (Exception e) {
log.warn("JSON 파싱 실패, 원본 텍스트로 저장: {}", fileName, e);
// JSON 파싱이 실패하면 원본을 Map 형태로 저장
Map<String, Object> fallbackMap = new HashMap<>();
fallbackMap.put("raw_content", geoJsonContent);
fallbackMap.put("parse_error", e.getMessage());
entity.setDataJson(fallbackMap);
}
// 연도 정보 추출 (파일명에서 추출 시도)
setYearInformation(entity, fileName);
// 상태 정보 설정
entity.setDataState("PROCESSED");
entity.setAnalState("PENDING");
// 시간 정보 설정
ZonedDateTime now = ZonedDateTime.now();
entity.setCreatedDttm(now);
entity.setUpdatedDttm(now);
entity.setDataStateDttm(now);
return entity;
} }
/** // 최대 처리 가능한 파일 수 제한 (성능 고려)
* CRS 정보 설정 int maxFiles = 50;
*/ if (geoJsonContents.size() > maxFiles) {
private void setCrsInformation(MapSheetLearnDataEntity entity, JsonNode geoJsonNode) { log.warn("처리 가능한 최대 파일 수를 초과했습니다: {} > {}", geoJsonContents.size(), maxFiles);
if (geoJsonNode.has("crs")) { return false;
JsonNode crsNode = geoJsonNode.get("crs");
if (crsNode.has("type") && crsNode.has("properties")) {
String crsType = crsNode.get("type").asText();
entity.setDataCrsType(crsType);
JsonNode propertiesNode = crsNode.get("properties");
if (propertiesNode.has("name")) {
String crsName = propertiesNode.get("name").asText();
entity.setDataCrsTypeName(crsName);
}
}
} else {
// CRS가 명시되지 않은 경우 기본값 설정 (WGS84)
entity.setDataCrsType("EPSG");
entity.setDataCrsTypeName("EPSG:4326");
}
} }
/** return true;
* 연도 정보 추출 }
*/
private void setYearInformation(MapSheetLearnDataEntity entity, String fileName) {
// 파일명에서 연도 추출 시도 (예: kamco_2021_2022_35813023.geojson)
String[] parts = fileName.split("_");
for (String part : parts) {
if (part.matches("\\d{4}")) { // 4자리 숫자 (연도)
try {
Integer year = Integer.parseInt(part);
if (year >= 1900 && year <= 2100) {
if (entity.getDataYyyy() == null) {
entity.setDataYyyy(year);
} else {
entity.setCompareYyyy(year);
break;
}
}
} catch (NumberFormatException ignored) {
// 무시
}
}
}
}
/**
* 제목 추출
*/
private String extractTitle(String fileName, JsonNode geoJsonNode) {
// GeoJSON 메타데이터에서 제목 추출 시도
if (geoJsonNode.has("properties")) {
JsonNode properties = geoJsonNode.get("properties");
if (properties.has("title")) {
return properties.get("title").asText();
}
if (properties.has("name")) {
return properties.get("name").asText();
}
}
// 파일명에서 확장자 제거하여 제목으로 사용
int lastDotIndex = fileName.lastIndexOf('.');
if (lastDotIndex > 0) {
return fileName.substring(0, lastDotIndex);
}
return fileName;
}
/**
* 데이터 경로 생성
*/
private String generateDataPath(String archiveFileName, String fileName) {
return archiveFileName + "/" + fileName;
}
/**
* 처리 가능한 파일 개수 확인
*/
public boolean isProcessable(Map<String, String> geoJsonContents) {
if (geoJsonContents == null || geoJsonContents.isEmpty()) {
return false;
}
// 최대 처리 가능한 파일 수 제한 (성능 고려)
int maxFiles = 50;
if (geoJsonContents.size() > maxFiles) {
log.warn("처리 가능한 최대 파일 수를 초과했습니다: {} > {}", geoJsonContents.size(), maxFiles);
return false;
}
return true;
}
} }

View File

@@ -1,434 +1,406 @@
package com.kamco.cd.kamcoback.geojson.service; package com.kamco.cd.kamcoback.geojson.service;
import com.kamco.cd.kamcoback.geojson.config.GeoJsonMonitorConfig; import com.kamco.cd.kamcoback.geojson.config.GeoJsonMonitorConfig;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository; import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import lombok.RequiredArgsConstructor; import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import lombok.extern.slf4j.Slf4j;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import jakarta.annotation.PostConstruct; import jakarta.annotation.PostConstruct;
import java.io.IOException; import java.io.IOException;
import java.nio.file.*; import java.nio.file.*;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.stream.Stream; import java.util.stream.Stream;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
/** /** GeoJSON 파일 모니터링 서비스 지정된 폴더를 주기적으로 모니터링하여 압축파일을 자동으로 처리합니다. */
* GeoJSON 파일 모니터링 서비스
* 지정된 폴더를 주기적으로 모니터링하여 압축파일을 자동으로 처리합니다.
*/
@Slf4j @Slf4j
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
public class GeoJsonFileMonitorService { public class GeoJsonFileMonitorService {
private final GeoJsonMonitorConfig config; private final GeoJsonMonitorConfig config;
private final ArchiveExtractorService archiveExtractorService; private final ArchiveExtractorService archiveExtractorService;
private final GeoJsonDataService geoJsonDataService; private final GeoJsonDataService geoJsonDataService;
private final GeometryConversionService geometryConversionService; private final GeometryConversionService geometryConversionService;
private final MapSheetLearnDataRepository learnDataRepository; private final MapSheetLearnDataRepository learnDataRepository;
private final MapSheetLearnDataGeomRepository geomRepository; private final MapSheetLearnDataGeomRepository geomRepository;
/** /** 애플리케이션 시작 시 필요한 디렉토리들을 미리 생성 */
* 애플리케이션 시작 시 필요한 디렉토리들을 미리 생성 @PostConstruct
*/ public void initializeDirectories() {
@PostConstruct try {
public void initializeDirectories() { log.info("GeoJSON 모니터링 시스템 초기화 중...");
log.info(
"설정된 경로 - Watch: {}, Processed: {}, Error: {}, Temp: {}",
config.getWatchDirectory(),
config.getProcessedDirectory(),
config.getErrorDirectory(),
config.getTempDirectory());
ensureDirectoriesExist();
log.info("GeoJSON 모니터링 시스템 초기화 완료");
} catch (Exception e) {
log.warn("GeoJSON 모니터링 시스템 초기화 실패 - 스케줄러 실행 시 재시도됩니다", e);
// 초기화 실패해도 애플리케이션은 시작되도록 함 (RuntimeException 던지지 않음)
}
}
/** 스케줄러를 통한 파일 모니터링 설정된 cron 표현식에 따라 주기적으로 실행 */
// @Scheduled(cron = "#{@geoJsonMonitorConfig.cronExpression}")
public void monitorFiles() {
log.debug("파일 모니터링 시작");
try {
// 모니터링 폴더 존재 확인 및 생성
ensureDirectoriesExist();
// 압축파일 검색 및 처리
processArchiveFiles();
// 미처리된 Geometry 변환 작업 수행
processUnprocessedGeometryData();
} catch (RuntimeException e) {
log.error("파일 모니터링 중 치명적 오류 발생 - 이번 주기 건너뜀", e);
} catch (Exception e) {
log.error("파일 모니터링 중 오류 발생", e);
}
log.debug("파일 모니터링 완료");
}
/** 필요한 디렉토리들이 존재하는지 확인하고 생성 */
private void ensureDirectoriesExist() {
boolean hasError = false;
try {
createDirectoryIfNotExists(config.getWatchDirectory());
} catch (IOException e) {
log.error("Watch 디렉토리 생성 실패: {} - {}", config.getWatchDirectory(), e.getMessage());
hasError = true;
}
try {
createDirectoryIfNotExists(config.getProcessedDirectory());
} catch (IOException e) {
log.error("Processed 디렉토리 생성 실패: {} - {}", config.getProcessedDirectory(), e.getMessage());
hasError = true;
}
try {
createDirectoryIfNotExists(config.getErrorDirectory());
} catch (IOException e) {
log.error("Error 디렉토리 생성 실패: {} - {}", config.getErrorDirectory(), e.getMessage());
hasError = true;
}
try {
createDirectoryIfNotExists(config.getTempDirectory());
} catch (IOException e) {
log.error("Temp 디렉토리 생성 실패: {} - {}", config.getTempDirectory(), e.getMessage());
hasError = true;
}
if (hasError) {
log.warn("일부 디렉토리 생성에 실패했습니다. 해당 기능은 제한될 수 있습니다.");
log.info("수동으로 다음 디렉토리들을 생성해주세요:");
log.info(" - {}", config.getWatchDirectory());
log.info(" - {}", config.getProcessedDirectory());
log.info(" - {}", config.getErrorDirectory());
log.info(" - {}", config.getTempDirectory());
} else {
log.info("모든 필요한 디렉토리가 준비되었습니다.");
}
}
/** 디렉토리가 존재하지 않으면 생성 */
private void createDirectoryIfNotExists(String directory) throws IOException {
if (directory == null || directory.trim().isEmpty()) {
throw new IllegalArgumentException("디렉토리 경로가 비어있습니다.");
}
Path path = Paths.get(directory);
if (!Files.exists(path)) {
try {
Files.createDirectories(path);
log.info("디렉토리 생성 완료: {}", directory);
// 디렉토리 권한 설정 (Unix/Linux 환경에서)
try { try {
log.info("GeoJSON 모니터링 시스템 초기화 중..."); if (!System.getProperty("os.name").toLowerCase().contains("windows")) {
log.info("설정된 경로 - Watch: {}, Processed: {}, Error: {}, Temp: {}", // rwxrwxr-x 권한 설정
config.getWatchDirectory(), config.getProcessedDirectory(), java.nio.file.attribute.PosixFilePermissions.asFileAttribute(
config.getErrorDirectory(), config.getTempDirectory()); java.nio.file.attribute.PosixFilePermissions.fromString("rwxrwxr-x"));
}
ensureDirectoriesExist(); } catch (Exception permissionException) {
log.info("GeoJSON 모니터링 시스템 초기화 완료"); log.debug("권한 설정 실패 (무시됨): {}", permissionException.getMessage());
} catch (Exception e) {
log.warn("GeoJSON 모니터링 시스템 초기화 실패 - 스케줄러 실행 시 재시도됩니다", e);
// 초기화 실패해도 애플리케이션은 시작되도록 함 (RuntimeException 던지지 않음)
} }
} catch (IOException e) {
log.error("디렉토리 생성 실패: {} - {}", directory, e.getMessage());
throw new IOException("디렉토리를 생성할 수 없습니다: " + directory, e);
}
} else if (!Files.isDirectory(path)) {
throw new IOException("지정된 경로가 디렉토리가 아닙니다: " + directory);
} else if (!Files.isWritable(path)) {
log.warn("디렉토리에 쓰기 권한이 없습니다: {}", directory);
} else {
log.debug("디렉토리가 이미 존재합니다: {}", directory);
}
}
/** 모니터링 폴더에서 압축파일들을 찾아서 처리 */
private void processArchiveFiles() {
Path watchDir = Paths.get(config.getWatchDirectory());
// 디렉토리 존재 확인
if (!Files.exists(watchDir)) {
log.debug("Watch 디렉토리가 존재하지 않습니다: {}", watchDir);
return;
} }
/** if (!Files.isDirectory(watchDir)) {
* 스케줄러를 통한 파일 모니터링 log.warn("Watch 경로가 디렉토리가 아닙니다: {}", watchDir);
* 설정된 cron 표현식에 따라 주기적으로 실행 return;
*/
// @Scheduled(cron = "#{@geoJsonMonitorConfig.cronExpression}")
public void monitorFiles() {
log.debug("파일 모니터링 시작");
try {
// 모니터링 폴더 존재 확인 및 생성
ensureDirectoriesExist();
// 압축파일 검색 및 처리
processArchiveFiles();
// 미처리된 Geometry 변환 작업 수행
processUnprocessedGeometryData();
} catch (RuntimeException e) {
log.error("파일 모니터링 중 치명적 오류 발생 - 이번 주기 건너뜀", e);
} catch (Exception e) {
log.error("파일 모니터링 중 오류 발생", e);
}
log.debug("파일 모니터링 완료");
} }
/** if (!Files.isReadable(watchDir)) {
* 필요한 디렉토리들이 존재하는지 확인하고 생성 log.warn("Watch 디렉토리에 읽기 권한이 없습니다: {}", watchDir);
*/ return;
private void ensureDirectoriesExist() {
boolean hasError = false;
try {
createDirectoryIfNotExists(config.getWatchDirectory());
} catch (IOException e) {
log.error("Watch 디렉토리 생성 실패: {} - {}", config.getWatchDirectory(), e.getMessage());
hasError = true;
}
try {
createDirectoryIfNotExists(config.getProcessedDirectory());
} catch (IOException e) {
log.error("Processed 디렉토리 생성 실패: {} - {}", config.getProcessedDirectory(), e.getMessage());
hasError = true;
}
try {
createDirectoryIfNotExists(config.getErrorDirectory());
} catch (IOException e) {
log.error("Error 디렉토리 생성 실패: {} - {}", config.getErrorDirectory(), e.getMessage());
hasError = true;
}
try {
createDirectoryIfNotExists(config.getTempDirectory());
} catch (IOException e) {
log.error("Temp 디렉토리 생성 실패: {} - {}", config.getTempDirectory(), e.getMessage());
hasError = true;
}
if (hasError) {
log.warn("일부 디렉토리 생성에 실패했습니다. 해당 기능은 제한될 수 있습니다.");
log.info("수동으로 다음 디렉토리들을 생성해주세요:");
log.info(" - {}", config.getWatchDirectory());
log.info(" - {}", config.getProcessedDirectory());
log.info(" - {}", config.getErrorDirectory());
log.info(" - {}", config.getTempDirectory());
} else {
log.info("모든 필요한 디렉토리가 준비되었습니다.");
}
} }
/** try (Stream<Path> files = Files.list(watchDir)) {
* 디렉토리가 존재하지 않으면 생성 files
*/ .filter(Files::isRegularFile)
private void createDirectoryIfNotExists(String directory) throws IOException { .filter(archiveExtractorService::isSupportedArchive)
if (directory == null || directory.trim().isEmpty()) { .filter(archiveExtractorService::isFileSizeValid)
throw new IllegalArgumentException("디렉토리 경로가 비어있습니다."); .forEach(this::processArchiveFile);
}
Path path = Paths.get(directory); } catch (IOException e) {
log.error("파일 목록 조회 실패: {}", watchDir, e);
}
}
if (!Files.exists(path)) { /** 개별 압축파일 처리 */
try { private void processArchiveFile(Path archiveFile) {
Files.createDirectories(path); String fileName = archiveFile.getFileName().toString();
log.info("디렉토리 생성 완료: {}", directory); log.info("압축파일 처리 시작: {}", fileName);
// 디렉토리 권한 설정 (Unix/Linux 환경에서) try {
try { // 1. 압축파일에서 GeoJSON 파일들 추출
if (!System.getProperty("os.name").toLowerCase().contains("windows")) { Map<String, String> geoJsonContents =
// rwxrwxr-x 권한 설정 archiveExtractorService.extractGeoJsonFiles(archiveFile);
java.nio.file.attribute.PosixFilePermissions.asFileAttribute(
java.nio.file.attribute.PosixFilePermissions.fromString("rwxrwxr-x")
);
}
} catch (Exception permissionException) {
log.debug("권한 설정 실패 (무시됨): {}", permissionException.getMessage());
}
} catch (IOException e) { if (geoJsonContents.isEmpty()) {
log.error("디렉토리 생성 실패: {} - {}", directory, e.getMessage()); log.warn("압축파일에서 GeoJSON 파일을 찾을 수 없습니다: {}", fileName);
throw new IOException("디렉토리를 생성할 수 없습니다: " + directory, e); moveFileToError(archiveFile, "GeoJSON 파일 없음");
} return;
} else if (!Files.isDirectory(path)) { }
throw new IOException("지정된 경로가 디렉토리가 아닙니다: " + directory);
} else if (!Files.isWritable(path)) { // 2. 처리 가능한 파일 수인지 확인
log.warn("디렉토리에 쓰기 권한이 없습니다: {}", directory); if (!geoJsonDataService.isProcessable(geoJsonContents)) {
} else { log.warn("처리할 수 없는 파일입니다: {}", fileName);
log.debug("디렉토리가 이미 존재합니다: {}", directory); moveFileToError(archiveFile, "처리 불가능한 파일");
} return;
}
// 3. GeoJSON 데이터를 데이터베이스에 저장
List<Long> savedLearnDataIds =
geoJsonDataService.processGeoJsonFiles(geoJsonContents, fileName);
if (savedLearnDataIds.isEmpty()) {
log.warn("저장된 학습 데이터가 없습니다: {}", fileName);
moveFileToError(archiveFile, "데이터 저장 실패");
return;
}
// 4. Geometry 데이터로 변환
List<Long> geometryIds = geometryConversionService.convertToGeometryData(savedLearnDataIds);
// 5. 처리 완료된 파일을 처리된 폴더로 이동
moveFileToProcessed(archiveFile);
log.info(
"압축파일 처리 완료: {} (학습 데이터: {}개, Geometry: {}개)",
fileName,
savedLearnDataIds.size(),
geometryIds.size());
} catch (Exception e) {
log.error("압축파일 처리 실패: {}", fileName, e);
try {
moveFileToError(archiveFile, "처리 중 오류 발생: " + e.getMessage());
} catch (IOException moveError) {
log.error("오류 파일 이동 실패: {}", fileName, moveError);
}
}
}
/** 미처리된 Geometry 변환 작업 수행 */
private void processUnprocessedGeometryData() {
try {
List<Long> processedIds = geometryConversionService.processUnprocessedLearnData();
if (!processedIds.isEmpty()) {
log.info("미처리 Geometry 변환 완료: {}개", processedIds.size());
}
} catch (Exception e) {
log.error("미처리 Geometry 변환 작업 실패", e);
}
}
/** 처리 완료된 파일을 processed 폴더로 이동 */
private void moveFileToProcessed(Path sourceFile) throws IOException {
String fileName = sourceFile.getFileName().toString();
String timestampedFileName = addTimestamp(fileName);
Path targetPath = Paths.get(config.getProcessedDirectory(), timestampedFileName);
Files.move(sourceFile, targetPath, StandardCopyOption.REPLACE_EXISTING);
log.info("파일을 처리된 폴더로 이동: {} -> {}", fileName, timestampedFileName);
}
/** 오류가 발생한 파일을 error 폴더로 이동 */
private void moveFileToError(Path sourceFile, String errorReason) throws IOException {
String fileName = sourceFile.getFileName().toString();
String errorFileName = addTimestamp(fileName) + ".error";
Path targetPath = Paths.get(config.getErrorDirectory(), errorFileName);
Files.move(sourceFile, targetPath, StandardCopyOption.REPLACE_EXISTING);
// 오류 정보를 별도 파일로 저장
String errorInfoFileName = errorFileName + ".info";
Path errorInfoPath = Paths.get(config.getErrorDirectory(), errorInfoFileName);
String errorInfo =
String.format(
"파일: %s%n오류 시간: %s%n오류 원인: %s%n", fileName, java.time.Instant.now(), errorReason);
Files.write(errorInfoPath, errorInfo.getBytes());
log.warn("파일을 오류 폴더로 이동: {} (원인: {})", fileName, errorReason);
}
/** 파일명에 타임스탬프 추가 */
private String addTimestamp(String fileName) {
int lastDotIndex = fileName.lastIndexOf('.');
String name = (lastDotIndex > 0) ? fileName.substring(0, lastDotIndex) : fileName;
String extension = (lastDotIndex > 0) ? fileName.substring(lastDotIndex) : "";
return String.format("%s_%d%s", name, System.currentTimeMillis(), extension);
}
/** 수동으로 특정 파일 처리 (테스트/관리 목적) */
public void processFileManually(String filePath) {
Path archiveFile = Paths.get(filePath);
if (!Files.exists(archiveFile)) {
log.error("파일이 존재하지 않습니다: {}", filePath);
return;
} }
/** if (!archiveExtractorService.isSupportedArchive(archiveFile)) {
* 모니터링 폴더에서 압축파일들을 찾아서 처리 log.error("지원하지 않는 압축파일 형식입니다: {}", filePath);
*/ return;
private void processArchiveFiles() {
Path watchDir = Paths.get(config.getWatchDirectory());
// 디렉토리 존재 확인
if (!Files.exists(watchDir)) {
log.debug("Watch 디렉토리가 존재하지 않습니다: {}", watchDir);
return;
}
if (!Files.isDirectory(watchDir)) {
log.warn("Watch 경로가 디렉토리가 아닙니다: {}", watchDir);
return;
}
if (!Files.isReadable(watchDir)) {
log.warn("Watch 디렉토리에 읽기 권한이 없습니다: {}", watchDir);
return;
}
try (Stream<Path> files = Files.list(watchDir)) {
files.filter(Files::isRegularFile)
.filter(archiveExtractorService::isSupportedArchive)
.filter(archiveExtractorService::isFileSizeValid)
.forEach(this::processArchiveFile);
} catch (IOException e) {
log.error("파일 목록 조회 실패: {}", watchDir, e);
}
} }
/** log.info("수동 파일 처리 시작: {}", filePath);
* 개별 압축파일 처리 processArchiveFile(archiveFile);
*/ }
private void processArchiveFile(Path archiveFile) {
String fileName = archiveFile.getFileName().toString();
log.info("압축파일 처리 시작: {}", fileName);
try { /** 디렉토리 초기화를 수동으로 실행 (API에서 호출 가능) */
// 1. 압축파일에서 GeoJSON 파일들 추출 public void initializeDirectoriesManually() {
Map<String, String> geoJsonContents = archiveExtractorService.extractGeoJsonFiles(archiveFile); log.info("디렉토리 수동 초기화 시작");
try {
ensureDirectoriesExist();
log.info("디렉토리 수동 초기화 완료");
} catch (Exception e) {
log.error("디렉토리 수동 초기화 실패", e);
throw new RuntimeException("디렉토리 초기화 실패", e);
}
}
if (geoJsonContents.isEmpty()) { /** 모니터링 상태 정보 반환 */
log.warn("압축파일에서 GeoJSON 파일을 찾을 수 없습니다: {}", fileName); public Map<String, Object> getMonitorStatus() {
moveFileToError(archiveFile, "GeoJSON 파일 없음"); return Map.of(
return; "watchDirectory", config.getWatchDirectory(),
} "processedDirectory", config.getProcessedDirectory(),
"errorDirectory", config.getErrorDirectory(),
"cronExpression", config.getCronExpression(),
"supportedExtensions", config.getSupportedExtensions(),
"maxFileSize", config.getMaxFileSize(),
"maxFileSizeMB", config.getMaxFileSize() / 1024 / 1024);
}
// 2. 처리 가능한 파일 수인지 확인 /** 시스템 통계 정보 조회 */
if (!geoJsonDataService.isProcessable(geoJsonContents)) { public Map<String, Object> getSystemStats() {
log.warn("처리할 수 없는 파일입니다: {}", fileName); Map<String, Object> stats = new HashMap<>();
moveFileToError(archiveFile, "처리 불가능한 파일");
return;
}
// 3. GeoJSON 데이터를 데이터베이스에 저장 try {
List<Long> savedLearnDataIds = geoJsonDataService.processGeoJsonFiles(geoJsonContents, fileName); // 데이터베이스 통계
long totalLearnData = learnDataRepository.count();
long totalGeomData = geomRepository.count();
long pendingAnalysis = learnDataRepository.countByAnalState("PENDING");
if (savedLearnDataIds.isEmpty()) { stats.put(
log.warn("저장된 학습 데이터가 없습니다: {}", fileName); "database",
moveFileToError(archiveFile, "데이터 저장 실패"); Map.of(
return; "totalLearnData", totalLearnData,
} "totalGeomData", totalGeomData,
"pendingAnalysis", pendingAnalysis));
// 4. Geometry 데이터로 변환 // 파일 시스템 통계
List<Long> geometryIds = geometryConversionService.convertToGeometryData(savedLearnDataIds); stats.put("fileSystem", getFileSystemStats());
// 5. 처리 완료된 파일을 처리된 폴더로 이동 // 모니터링 설정
moveFileToProcessed(archiveFile); stats.put(
"monitoring",
Map.of(
"isActive", true,
"cronExpression", "0/30 * * * * *",
"watchDirectory", config.getWatchDirectory(),
"processedDirectory", config.getProcessedDirectory(),
"errorDirectory", config.getErrorDirectory()));
log.info("압축파일 처리 완료: {} (학습 데이터: {}개, Geometry: {}개)", } catch (Exception e) {
fileName, savedLearnDataIds.size(), geometryIds.size()); log.error("통계 정보 조회 실패", e);
stats.put("error", e.getMessage());
} catch (Exception e) {
log.error("압축파일 처리 실패: {}", fileName, e);
try {
moveFileToError(archiveFile, "처리 중 오류 발생: " + e.getMessage());
} catch (IOException moveError) {
log.error("오류 파일 이동 실패: {}", fileName, moveError);
}
}
} }
/** return stats;
* 미처리된 Geometry 변환 작업 수행 }
*/
private void processUnprocessedGeometryData() { /** 파일 시스템 통계 조회 */
try { private Map<String, Object> getFileSystemStats() {
List<Long> processedIds = geometryConversionService.processUnprocessedLearnData(); Map<String, Object> fileStats = new HashMap<>();
if (!processedIds.isEmpty()) {
log.info("미처리 Geometry 변환 완료: {}개", processedIds.size()); try {
} // 각 디렉토리의 파일 수 계산
} catch (Exception e) { Path watchDir = Paths.get(config.getWatchDirectory());
log.error("미처리 Geometry 변환 작업 실패", e); Path processedDir = Paths.get(config.getProcessedDirectory());
} Path errorDir = Paths.get(config.getErrorDirectory());
fileStats.put("watchDirectoryCount", countFilesInDirectory(watchDir));
fileStats.put("processedDirectoryCount", countFilesInDirectory(processedDir));
fileStats.put("errorDirectoryCount", countFilesInDirectory(errorDir));
} catch (Exception e) {
log.warn("파일 시스템 통계 조회 실패: {}", e.getMessage());
fileStats.put("error", e.getMessage());
} }
/** return fileStats;
* 처리 완료된 파일을 processed 폴더로 이동 }
*/
private void moveFileToProcessed(Path sourceFile) throws IOException {
String fileName = sourceFile.getFileName().toString();
String timestampedFileName = addTimestamp(fileName);
Path targetPath = Paths.get(config.getProcessedDirectory(), timestampedFileName);
Files.move(sourceFile, targetPath, StandardCopyOption.REPLACE_EXISTING); /** 디렉토리 내 파일 개수 계산 */
log.info("파일을 처리된 폴더로 이동: {} -> {}", fileName, timestampedFileName); private long countFilesInDirectory(Path directory) {
if (!Files.exists(directory) || !Files.isDirectory(directory)) {
return 0;
} }
/** try (Stream<Path> files = Files.list(directory)) {
* 오류가 발생한 파일을 error 폴더로 이동 return files.filter(Files::isRegularFile).count();
*/ } catch (IOException e) {
private void moveFileToError(Path sourceFile, String errorReason) throws IOException { log.warn("디렉토리 파일 계산 실패: {}", directory, e);
String fileName = sourceFile.getFileName().toString(); return 0;
String errorFileName = addTimestamp(fileName) + ".error";
Path targetPath = Paths.get(config.getErrorDirectory(), errorFileName);
Files.move(sourceFile, targetPath, StandardCopyOption.REPLACE_EXISTING);
// 오류 정보를 별도 파일로 저장
String errorInfoFileName = errorFileName + ".info";
Path errorInfoPath = Paths.get(config.getErrorDirectory(), errorInfoFileName);
String errorInfo = String.format("파일: %s%n오류 시간: %s%n오류 원인: %s%n",
fileName, java.time.Instant.now(), errorReason);
Files.write(errorInfoPath, errorInfo.getBytes());
log.warn("파일을 오류 폴더로 이동: {} (원인: {})", fileName, errorReason);
}
/**
* 파일명에 타임스탬프 추가
*/
private String addTimestamp(String fileName) {
int lastDotIndex = fileName.lastIndexOf('.');
String name = (lastDotIndex > 0) ? fileName.substring(0, lastDotIndex) : fileName;
String extension = (lastDotIndex > 0) ? fileName.substring(lastDotIndex) : "";
return String.format("%s_%d%s", name, System.currentTimeMillis(), extension);
}
/**
* 수동으로 특정 파일 처리 (테스트/관리 목적)
*/
public void processFileManually(String filePath) {
Path archiveFile = Paths.get(filePath);
if (!Files.exists(archiveFile)) {
log.error("파일이 존재하지 않습니다: {}", filePath);
return;
}
if (!archiveExtractorService.isSupportedArchive(archiveFile)) {
log.error("지원하지 않는 압축파일 형식입니다: {}", filePath);
return;
}
log.info("수동 파일 처리 시작: {}", filePath);
processArchiveFile(archiveFile);
}
/**
* 디렉토리 초기화를 수동으로 실행 (API에서 호출 가능)
*/
public void initializeDirectoriesManually() {
log.info("디렉토리 수동 초기화 시작");
try {
ensureDirectoriesExist();
log.info("디렉토리 수동 초기화 완료");
} catch (Exception e) {
log.error("디렉토리 수동 초기화 실패", e);
throw new RuntimeException("디렉토리 초기화 실패", e);
}
}
/**
* 모니터링 상태 정보 반환
*/
public Map<String, Object> getMonitorStatus() {
return Map.of(
"watchDirectory", config.getWatchDirectory(),
"processedDirectory", config.getProcessedDirectory(),
"errorDirectory", config.getErrorDirectory(),
"cronExpression", config.getCronExpression(),
"supportedExtensions", config.getSupportedExtensions(),
"maxFileSize", config.getMaxFileSize(),
"maxFileSizeMB", config.getMaxFileSize() / 1024 / 1024
);
}
/**
* 시스템 통계 정보 조회
*/
public Map<String, Object> getSystemStats() {
Map<String, Object> stats = new HashMap<>();
try {
// 데이터베이스 통계
long totalLearnData = learnDataRepository.count();
long totalGeomData = geomRepository.count();
long pendingAnalysis = learnDataRepository.countByAnalState("PENDING");
stats.put("database", Map.of(
"totalLearnData", totalLearnData,
"totalGeomData", totalGeomData,
"pendingAnalysis", pendingAnalysis
));
// 파일 시스템 통계
stats.put("fileSystem", getFileSystemStats());
// 모니터링 설정
stats.put("monitoring", Map.of(
"isActive", true,
"cronExpression", "0/30 * * * * *",
"watchDirectory", config.getWatchDirectory(),
"processedDirectory", config.getProcessedDirectory(),
"errorDirectory", config.getErrorDirectory()
));
} catch (Exception e) {
log.error("통계 정보 조회 실패", e);
stats.put("error", e.getMessage());
}
return stats;
}
/**
* 파일 시스템 통계 조회
*/
private Map<String, Object> getFileSystemStats() {
Map<String, Object> fileStats = new HashMap<>();
try {
// 각 디렉토리의 파일 수 계산
Path watchDir = Paths.get(config.getWatchDirectory());
Path processedDir = Paths.get(config.getProcessedDirectory());
Path errorDir = Paths.get(config.getErrorDirectory());
fileStats.put("watchDirectoryCount", countFilesInDirectory(watchDir));
fileStats.put("processedDirectoryCount", countFilesInDirectory(processedDir));
fileStats.put("errorDirectoryCount", countFilesInDirectory(errorDir));
} catch (Exception e) {
log.warn("파일 시스템 통계 조회 실패: {}", e.getMessage());
fileStats.put("error", e.getMessage());
}
return fileStats;
}
/**
* 디렉토리 내 파일 개수 계산
*/
private long countFilesInDirectory(Path directory) {
if (!Files.exists(directory) || !Files.isDirectory(directory)) {
return 0;
}
try (Stream<Path> files = Files.list(directory)) {
return files.filter(Files::isRegularFile).count();
} catch (IOException e) {
log.warn("디렉토리 파일 계산 실패: {}", directory, e);
return 0;
}
} }
}
} }

View File

@@ -7,442 +7,430 @@ import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository; import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository; import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.*;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.locationtech.jts.geom.*; import org.locationtech.jts.geom.*;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.time.Instant; /** Geometry 데이터 변환 서비스 */
import java.util.*;
/**
* Geometry 데이터 변환 서비스
*/
@Slf4j @Slf4j
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
public class GeometryConversionService { public class GeometryConversionService {
private final MapSheetLearnDataRepository mapSheetLearnDataRepository; private final MapSheetLearnDataRepository mapSheetLearnDataRepository;
private final MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository; private final MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
private final ObjectMapper objectMapper; private final ObjectMapper objectMapper;
private final GeometryFactory geometryFactory = new GeometryFactory(); private final GeometryFactory geometryFactory = new GeometryFactory();
/** /** MapSheetLearnData의 JSON 데이터를 기반으로 Geometry 테이블에 저장 */
* MapSheetLearnData의 JSON 데이터를 기반으로 Geometry 테이블에 저장 @Transactional
*/ public List<Long> convertToGeometryData(List<Long> learnDataIds) {
@Transactional List<Long> processedIds = new ArrayList<>();
public List<Long> convertToGeometryData(List<Long> learnDataIds) {
List<Long> processedIds = new ArrayList<>();
log.info("Geometry 변환 시작: {} 개의 학습 데이터", learnDataIds.size()); log.info("Geometry 변환 시작: {} 개의 학습 데이터", learnDataIds.size());
for (Long dataId : learnDataIds) { for (Long dataId : learnDataIds) {
try { try {
if (dataId != null) { if (dataId != null) {
Optional<MapSheetLearnDataEntity> learnDataOpt = mapSheetLearnDataRepository.findById(dataId); Optional<MapSheetLearnDataEntity> learnDataOpt =
if (learnDataOpt.isPresent()) { mapSheetLearnDataRepository.findById(dataId);
List<Long> geometryIds = processLearnDataToGeometry(learnDataOpt.get()); if (learnDataOpt.isPresent()) {
processedIds.addAll(geometryIds); List<Long> geometryIds = processLearnDataToGeometry(learnDataOpt.get());
log.debug("학습 데이터 {} 에서 {} 개의 geometry 데이터 생성", dataId, geometryIds.size()); processedIds.addAll(geometryIds);
} else { log.debug("학습 데이터 {} 에서 {} 개의 geometry 데이터 생성", dataId, geometryIds.size());
log.warn("학습 데이터를 찾을 수 없습니다: {}", dataId); } else {
} log.warn("학습 데이터를 찾을 수 없습니다: {}", dataId);
} }
} catch (Exception e) {
log.error("Geometry 변환 실패 - 학습 데이터 ID: {}", dataId, e);
// 개별 변환 실패는 전체 처리를 중단시키지 않음
}
} }
} catch (Exception e) {
log.info("Geometry 변환 완료: {} 개 처리, {} 개의 geometry 생성", learnDataIds.size(), processedIds.size()); log.error("Geometry 변환 실패 - 학습 데이터 ID: {}", dataId, e);
return processedIds; // 개별 변환 실패는 전체 처리를 중단시키지 않음
}
} }
/** log.info(
* 개별 학습 데이터를 Geometry 데이터로 변환 "Geometry 변환 완료: {} 개 처리, {} 개의 geometry 생성", learnDataIds.size(), processedIds.size());
*/ return processedIds;
private List<Long> processLearnDataToGeometry(MapSheetLearnDataEntity learnData) { }
List<Long> geometryIds = new ArrayList<>();
try { /** 개별 학습 데이터를 Geometry 데이터로 변환 */
// 기존 geometry 데이터 삭제 (재생성) private List<Long> processLearnDataToGeometry(MapSheetLearnDataEntity learnData) {
mapSheetLearnDataGeomRepository.deleteByDataUid(learnData.getId()); List<Long> geometryIds = new ArrayList<>();
// JSON 데이터에서 GeoJSON 추출 try {
Map<String, Object> dataJson = learnData.getDataJson(); // 기존 geometry 데이터 삭제 (재생성)
if (dataJson == null || dataJson.isEmpty()) { mapSheetLearnDataGeomRepository.deleteByDataUid(learnData.getId());
log.warn("JSON 데이터가 없습니다: {}", learnData.getId());
return geometryIds;
}
// JSON을 GeoJSON으로 파싱
String geoJsonString = objectMapper.writeValueAsString(dataJson);
JsonNode geoJsonNode = objectMapper.readTree(geoJsonString);
// GeoJSON 타입에 따라 처리
String type = geoJsonNode.get("type").asText();
switch (type) {
case "FeatureCollection":
geometryIds.addAll(processFeatureCollection(geoJsonNode, learnData));
break;
case "Feature":
Long geometryId = processFeature(geoJsonNode, learnData);
if (geometryId != null) {
geometryIds.add(geometryId);
}
break;
case "Point":
case "LineString":
case "Polygon":
case "MultiPoint":
case "MultiLineString":
case "MultiPolygon":
Long directGeometryId = processDirectGeometry(geoJsonNode, learnData);
if (directGeometryId != null) {
geometryIds.add(directGeometryId);
}
break;
default:
log.warn("지원하지 않는 GeoJSON type: {} (데이터 ID: {})", type, learnData.getId());
}
} catch (Exception e) {
log.error("Geometry 변환 실패: 학습 데이터 ID {}", learnData.getId(), e);
throw new RuntimeException("Geometry 변환 실패", e);
}
// JSON 데이터에서 GeoJSON 추출
Map<String, Object> dataJson = learnData.getDataJson();
if (dataJson == null || dataJson.isEmpty()) {
log.warn("JSON 데이터가 없습니다: {}", learnData.getId());
return geometryIds; return geometryIds;
}
// JSON을 GeoJSON으로 파싱
String geoJsonString = objectMapper.writeValueAsString(dataJson);
JsonNode geoJsonNode = objectMapper.readTree(geoJsonString);
// GeoJSON 타입에 따라 처리
String type = geoJsonNode.get("type").asText();
switch (type) {
case "FeatureCollection":
geometryIds.addAll(processFeatureCollection(geoJsonNode, learnData));
break;
case "Feature":
Long geometryId = processFeature(geoJsonNode, learnData);
if (geometryId != null) {
geometryIds.add(geometryId);
}
break;
case "Point":
case "LineString":
case "Polygon":
case "MultiPoint":
case "MultiLineString":
case "MultiPolygon":
Long directGeometryId = processDirectGeometry(geoJsonNode, learnData);
if (directGeometryId != null) {
geometryIds.add(directGeometryId);
}
break;
default:
log.warn("지원하지 않는 GeoJSON type: {} (데이터 ID: {})", type, learnData.getId());
}
} catch (Exception e) {
log.error("Geometry 변환 실패: 학습 데이터 ID {}", learnData.getId(), e);
throw new RuntimeException("Geometry 변환 실패", e);
} }
/** return geometryIds;
* FeatureCollection 처리 }
*/
private List<Long> processFeatureCollection(JsonNode featureCollectionNode, MapSheetLearnDataEntity learnData) {
List<Long> geometryIds = new ArrayList<>();
if (!featureCollectionNode.has("features")) { /** FeatureCollection 처리 */
log.warn("FeatureCollection에 features 배열이 없습니다: {}", learnData.getId()); private List<Long> processFeatureCollection(
return geometryIds; JsonNode featureCollectionNode, MapSheetLearnDataEntity learnData) {
} List<Long> geometryIds = new ArrayList<>();
JsonNode featuresNode = featureCollectionNode.get("features"); if (!featureCollectionNode.has("features")) {
if (featuresNode.isArray()) { log.warn("FeatureCollection에 features 배열이 없습니다: {}", learnData.getId());
for (JsonNode featureNode : featuresNode) { return geometryIds;
try {
Long geometryId = processFeature(featureNode, learnData);
if (geometryId != null) {
geometryIds.add(geometryId);
}
} catch (Exception e) {
log.error("Feature 처리 실패 (학습 데이터 ID: {})", learnData.getId(), e);
}
}
}
return geometryIds;
} }
/** JsonNode featuresNode = featureCollectionNode.get("features");
* Feature 처리 if (featuresNode.isArray()) {
*/ for (JsonNode featureNode : featuresNode) {
private Long processFeature(JsonNode featureNode, MapSheetLearnDataEntity learnData) {
try { try {
if (!featureNode.has("geometry")) { Long geometryId = processFeature(featureNode, learnData);
log.warn("Feature에 geometry가 없습니다: {}", learnData.getId()); if (geometryId != null) {
return null; geometryIds.add(geometryId);
} }
JsonNode geometryNode = featureNode.get("geometry");
JsonNode propertiesNode = featureNode.has("properties") ? featureNode.get("properties") : null;
return createGeometryEntity(geometryNode, propertiesNode, learnData);
} catch (Exception e) { } catch (Exception e) {
log.error("Feature 처리 중 오류 (학습 데이터 ID: {})", learnData.getId(), e); log.error("Feature 처리 실패 (학습 데이터 ID: {})", learnData.getId(), e);
return null;
} }
}
} }
/** return geometryIds;
* 직접 Geometry 처리 }
*/
private Long processDirectGeometry(JsonNode geometryNode, MapSheetLearnDataEntity learnData) { /** Feature 처리 */
return createGeometryEntity(geometryNode, null, learnData); private Long processFeature(JsonNode featureNode, MapSheetLearnDataEntity learnData) {
try {
if (!featureNode.has("geometry")) {
log.warn("Feature에 geometry가 없습니다: {}", learnData.getId());
return null;
}
JsonNode geometryNode = featureNode.get("geometry");
JsonNode propertiesNode =
featureNode.has("properties") ? featureNode.get("properties") : null;
return createGeometryEntity(geometryNode, propertiesNode, learnData);
} catch (Exception e) {
log.error("Feature 처리 중 오류 (학습 데이터 ID: {})", learnData.getId(), e);
return null;
}
}
/** 직접 Geometry 처리 */
private Long processDirectGeometry(JsonNode geometryNode, MapSheetLearnDataEntity learnData) {
return createGeometryEntity(geometryNode, null, learnData);
}
/** GeometryEntity 생성 및 저장 */
private Long createGeometryEntity(
JsonNode geometryNode, JsonNode propertiesNode, MapSheetLearnDataEntity learnData) {
try {
MapSheetLearnDataGeomEntity geometryEntity = new MapSheetLearnDataGeomEntity();
// 기본 정보 설정
geometryEntity.setDataUid(learnData.getId());
geometryEntity.setBeforeYyyy(learnData.getDataYyyy());
geometryEntity.setAfterYyyy(learnData.getCompareYyyy());
// Geometry 변환 및 설정
Geometry geometry = parseGeometryFromGeoJson(geometryNode);
if (geometry != null) {
geometryEntity.setGeom(geometry);
geometryEntity.setGeoType(geometry.getGeometryType());
// 면적 계산 (Polygon인 경우)
if (geometry instanceof Polygon || geometry.getGeometryType().contains("Polygon")) {
double area = geometry.getArea();
geometryEntity.setArea(area);
}
} else {
log.warn("Geometry 변환 실패: {}", geometryNode);
return null;
}
// Properties에서 추가 정보 추출
if (propertiesNode != null) {
extractPropertiesData(geometryEntity, propertiesNode, learnData);
}
// 시간 정보 설정
ZonedDateTime now = ZonedDateTime.now();
geometryEntity.setCreatedDttm(now);
geometryEntity.setUpdatedDttm(now);
// 저장
MapSheetLearnDataGeomEntity savedEntity =
mapSheetLearnDataGeomRepository.save(geometryEntity);
return savedEntity.getId();
} catch (Exception e) {
log.error("GeometryEntity 생성 실패 (학습 데이터 ID: {})", learnData.getId(), e);
return null;
}
}
/** GeoJSON 노드에서 JTS Geometry 객체 생성 */
private Geometry parseGeometryFromGeoJson(JsonNode geometryNode) {
try {
if (!geometryNode.has("type") || !geometryNode.has("coordinates")) {
log.warn("유효하지 않은 Geometry 형식: type 또는 coordinates가 없습니다.");
return null;
}
String geometryType = geometryNode.get("type").asText();
JsonNode coordinatesNode = geometryNode.get("coordinates");
switch (geometryType.toLowerCase()) {
case "point":
return createPoint(coordinatesNode);
case "linestring":
return createLineString(coordinatesNode);
case "polygon":
return createPolygon(coordinatesNode);
case "multipoint":
return createMultiPoint(coordinatesNode);
case "multilinestring":
return createMultiLineString(coordinatesNode);
case "multipolygon":
return createMultiPolygon(coordinatesNode);
default:
log.warn("지원하지 않는 Geometry 타입: {}", geometryType);
return null;
}
} catch (Exception e) {
log.error("Geometry 파싱 실패", e);
return null;
}
}
private Point createPoint(JsonNode coordinatesNode) {
if (coordinatesNode.size() < 2) return null;
double x = coordinatesNode.get(0).asDouble();
double y = coordinatesNode.get(1).asDouble();
return geometryFactory.createPoint(new Coordinate(x, y));
}
private LineString createLineString(JsonNode coordinatesNode) {
List<Coordinate> coords = new ArrayList<>();
for (JsonNode coordNode : coordinatesNode) {
if (coordNode.size() >= 2) {
coords.add(new Coordinate(coordNode.get(0).asDouble(), coordNode.get(1).asDouble()));
}
}
return geometryFactory.createLineString(coords.toArray(new Coordinate[0]));
}
private Polygon createPolygon(JsonNode coordinatesNode) {
if (coordinatesNode.size() == 0) return null;
// Exterior ring
JsonNode exteriorRing = coordinatesNode.get(0);
List<Coordinate> coords = new ArrayList<>();
for (JsonNode coordNode : exteriorRing) {
if (coordNode.size() >= 2) {
coords.add(new Coordinate(coordNode.get(0).asDouble(), coordNode.get(1).asDouble()));
}
} }
/** if (coords.size() < 3) return null;
* GeometryEntity 생성 및 저장
*/
private Long createGeometryEntity(JsonNode geometryNode, JsonNode propertiesNode, MapSheetLearnDataEntity learnData) {
try {
MapSheetLearnDataGeomEntity geometryEntity = new MapSheetLearnDataGeomEntity();
// 기본 정보 설정 // Close ring if not already closed
geometryEntity.setDataUid(learnData.getId()); if (!coords.get(0).equals2D(coords.get(coords.size() - 1))) {
geometryEntity.setBeforeYyyy(learnData.getDataYyyy()); coords.add(new Coordinate(coords.get(0)));
geometryEntity.setAfterYyyy(learnData.getCompareYyyy());
// Geometry 변환 및 설정
Geometry geometry = parseGeometryFromGeoJson(geometryNode);
if (geometry != null) {
geometryEntity.setGeom(geometry);
geometryEntity.setGeoType(geometry.getGeometryType());
// 면적 계산 (Polygon인 경우)
if (geometry instanceof Polygon || geometry.getGeometryType().contains("Polygon")) {
double area = geometry.getArea();
geometryEntity.setArea(area);
}
} else {
log.warn("Geometry 변환 실패: {}", geometryNode);
return null;
}
// Properties에서 추가 정보 추출
if (propertiesNode != null) {
extractPropertiesData(geometryEntity, propertiesNode, learnData);
}
// 시간 정보 설정
ZonedDateTime now = ZonedDateTime.now();
geometryEntity.setCreatedDttm(now);
geometryEntity.setUpdatedDttm(now);
// 저장
MapSheetLearnDataGeomEntity savedEntity = mapSheetLearnDataGeomRepository.save(geometryEntity);
return savedEntity.getId();
} catch (Exception e) {
log.error("GeometryEntity 생성 실패 (학습 데이터 ID: {})", learnData.getId(), e);
return null;
}
} }
/** LinearRing shell = geometryFactory.createLinearRing(coords.toArray(new Coordinate[0]));
* GeoJSON 노드에서 JTS Geometry 객체 생성
*/
private Geometry parseGeometryFromGeoJson(JsonNode geometryNode) {
try {
if (!geometryNode.has("type") || !geometryNode.has("coordinates")) {
log.warn("유효하지 않은 Geometry 형식: type 또는 coordinates가 없습니다.");
return null;
}
String geometryType = geometryNode.get("type").asText(); // Interior rings (holes)
JsonNode coordinatesNode = geometryNode.get("coordinates"); LinearRing[] holes = new LinearRing[coordinatesNode.size() - 1];
for (int i = 1; i < coordinatesNode.size(); i++) {
switch (geometryType.toLowerCase()) { JsonNode holeRing = coordinatesNode.get(i);
case "point": List<Coordinate> holeCoords = new ArrayList<>();
return createPoint(coordinatesNode); for (JsonNode coordNode : holeRing) {
case "linestring": if (coordNode.size() >= 2) {
return createLineString(coordinatesNode); holeCoords.add(new Coordinate(coordNode.get(0).asDouble(), coordNode.get(1).asDouble()));
case "polygon":
return createPolygon(coordinatesNode);
case "multipoint":
return createMultiPoint(coordinatesNode);
case "multilinestring":
return createMultiLineString(coordinatesNode);
case "multipolygon":
return createMultiPolygon(coordinatesNode);
default:
log.warn("지원하지 않는 Geometry 타입: {}", geometryType);
return null;
}
} catch (Exception e) {
log.error("Geometry 파싱 실패", e);
return null;
} }
}
if (holeCoords.size() >= 3) {
if (!holeCoords.get(0).equals2D(holeCoords.get(holeCoords.size() - 1))) {
holeCoords.add(new Coordinate(holeCoords.get(0)));
}
holes[i - 1] = geometryFactory.createLinearRing(holeCoords.toArray(new Coordinate[0]));
}
} }
private Point createPoint(JsonNode coordinatesNode) { return geometryFactory.createPolygon(shell, holes);
if (coordinatesNode.size() < 2) return null; }
double x = coordinatesNode.get(0).asDouble();
double y = coordinatesNode.get(1).asDouble(); private MultiPoint createMultiPoint(JsonNode coordinatesNode) {
return geometryFactory.createPoint(new Coordinate(x, y)); List<Point> points = new ArrayList<>();
for (JsonNode pointNode : coordinatesNode) {
Point point = createPoint(pointNode);
if (point != null) {
points.add(point);
}
}
return geometryFactory.createMultiPoint(points.toArray(new Point[0]));
}
private MultiLineString createMultiLineString(JsonNode coordinatesNode) {
List<LineString> lineStrings = new ArrayList<>();
for (JsonNode lineNode : coordinatesNode) {
LineString line = createLineString(lineNode);
if (line != null) {
lineStrings.add(line);
}
}
return geometryFactory.createMultiLineString(lineStrings.toArray(new LineString[0]));
}
private MultiPolygon createMultiPolygon(JsonNode coordinatesNode) {
List<Polygon> polygons = new ArrayList<>();
for (JsonNode polygonNode : coordinatesNode) {
Polygon polygon = createPolygon(polygonNode);
if (polygon != null) {
polygons.add(polygon);
}
}
return geometryFactory.createMultiPolygon(polygons.toArray(new Polygon[0]));
}
/** Properties에서 추가 정보 추출 */
private void extractPropertiesData(
MapSheetLearnDataGeomEntity geometryEntity,
JsonNode propertiesNode,
MapSheetLearnDataEntity learnData) {
// CD 정확도 정보
if (propertiesNode.has("cd_prob")) {
try {
double cdProb = propertiesNode.get("cd_prob").asDouble();
geometryEntity.setCdProb(cdProb);
} catch (Exception e) {
log.debug("cd_prob 파싱 실패", e);
}
} }
private LineString createLineString(JsonNode coordinatesNode) { // Before class 정보
List<Coordinate> coords = new ArrayList<>(); if (propertiesNode.has("class_before_name")) {
for (JsonNode coordNode : coordinatesNode) { geometryEntity.setClassBeforeName(propertiesNode.get("class_before_name").asText());
if (coordNode.size() >= 2) { }
coords.add(new Coordinate(coordNode.get(0).asDouble(), coordNode.get(1).asDouble())); if (propertiesNode.has("class_before_prob")) {
} try {
} double beforeProb = propertiesNode.get("class_before_prob").asDouble();
return geometryFactory.createLineString(coords.toArray(new Coordinate[0])); geometryEntity.setClassBeforeProb(beforeProb);
} catch (Exception e) {
log.debug("class_before_prob 파싱 실패", e);
}
} }
private Polygon createPolygon(JsonNode coordinatesNode) { // After class 정보
if (coordinatesNode.size() == 0) return null; if (propertiesNode.has("class_after_name")) {
geometryEntity.setClassAfterName(propertiesNode.get("class_after_name").asText());
// Exterior ring }
JsonNode exteriorRing = coordinatesNode.get(0); if (propertiesNode.has("class_after_prob")) {
List<Coordinate> coords = new ArrayList<>(); try {
for (JsonNode coordNode : exteriorRing) { double afterProb = propertiesNode.get("class_after_prob").asDouble();
if (coordNode.size() >= 2) { geometryEntity.setClassAfterProb(afterProb);
coords.add(new Coordinate(coordNode.get(0).asDouble(), coordNode.get(1).asDouble())); } catch (Exception e) {
} log.debug("class_after_prob 파싱 실패", e);
} }
if (coords.size() < 3) return null;
// Close ring if not already closed
if (!coords.get(0).equals2D(coords.get(coords.size() - 1))) {
coords.add(new Coordinate(coords.get(0)));
}
LinearRing shell = geometryFactory.createLinearRing(coords.toArray(new Coordinate[0]));
// Interior rings (holes)
LinearRing[] holes = new LinearRing[coordinatesNode.size() - 1];
for (int i = 1; i < coordinatesNode.size(); i++) {
JsonNode holeRing = coordinatesNode.get(i);
List<Coordinate> holeCoords = new ArrayList<>();
for (JsonNode coordNode : holeRing) {
if (coordNode.size() >= 2) {
holeCoords.add(new Coordinate(coordNode.get(0).asDouble(), coordNode.get(1).asDouble()));
}
}
if (holeCoords.size() >= 3) {
if (!holeCoords.get(0).equals2D(holeCoords.get(holeCoords.size() - 1))) {
holeCoords.add(new Coordinate(holeCoords.get(0)));
}
holes[i - 1] = geometryFactory.createLinearRing(holeCoords.toArray(new Coordinate[0]));
}
}
return geometryFactory.createPolygon(shell, holes);
} }
private MultiPoint createMultiPoint(JsonNode coordinatesNode) { // 도엽 번호
List<Point> points = new ArrayList<>(); if (propertiesNode.has("map_sheet_num")) {
for (JsonNode pointNode : coordinatesNode) { try {
Point point = createPoint(pointNode); long mapSheetNum = propertiesNode.get("map_sheet_num").asLong();
if (point != null) { geometryEntity.setMapSheetNum(mapSheetNum);
points.add(point); } catch (Exception e) {
} log.debug("map_sheet_num 파싱 실패", e);
} }
return geometryFactory.createMultiPoint(points.toArray(new Point[0]));
} }
private MultiLineString createMultiLineString(JsonNode coordinatesNode) { // 면적 (properties에서 제공되는 경우)
List<LineString> lineStrings = new ArrayList<>(); if (propertiesNode.has("area")) {
for (JsonNode lineNode : coordinatesNode) { try {
LineString line = createLineString(lineNode); double area = propertiesNode.get("area").asDouble();
if (line != null) { geometryEntity.setArea(area);
lineStrings.add(line); } catch (Exception e) {
} log.debug("area 파싱 실패", e);
} }
return geometryFactory.createMultiLineString(lineStrings.toArray(new LineString[0])); }
}
/** 미처리된 학습 데이터들을 찾아서 자동으로 geometry 변환 수행 */
@Transactional
public List<Long> processUnprocessedLearnData() {
// 분석 상태가 PENDING인 학습 데이터 조회
List<MapSheetLearnDataEntity> unprocessedData =
mapSheetLearnDataRepository.findByAnalState("PENDING");
if (unprocessedData.isEmpty()) {
log.debug("처리할 미완료 학습 데이터가 없습니다.");
return new ArrayList<>();
} }
private MultiPolygon createMultiPolygon(JsonNode coordinatesNode) { log.info("미처리 학습 데이터 {}개에 대해 geometry 변환을 수행합니다.", unprocessedData.size());
List<Polygon> polygons = new ArrayList<>();
for (JsonNode polygonNode : coordinatesNode) { List<Long> processedIds = new ArrayList<>();
Polygon polygon = createPolygon(polygonNode); for (MapSheetLearnDataEntity data : unprocessedData) {
if (polygon != null) { try {
polygons.add(polygon); List<Long> geometryIds = processLearnDataToGeometry(data);
} processedIds.addAll(geometryIds);
}
return geometryFactory.createMultiPolygon(polygons.toArray(new Polygon[0])); // 처리 완료 상태로 업데이트
data.setAnalState("COMPLETED");
mapSheetLearnDataRepository.save(data);
} catch (Exception e) {
log.error("미처리 학습 데이터 처리 실패: {}", data.getId(), e);
// 실패한 경우 ERROR 상태로 설정
data.setAnalState("ERROR");
mapSheetLearnDataRepository.save(data);
}
} }
/** return processedIds;
* Properties에서 추가 정보 추출 }
*/
private void extractPropertiesData(MapSheetLearnDataGeomEntity geometryEntity, JsonNode propertiesNode, MapSheetLearnDataEntity learnData) {
// CD 정확도 정보
if (propertiesNode.has("cd_prob")) {
try {
double cdProb = propertiesNode.get("cd_prob").asDouble();
geometryEntity.setCdProb(cdProb);
} catch (Exception e) {
log.debug("cd_prob 파싱 실패", e);
}
}
// Before class 정보
if (propertiesNode.has("class_before_name")) {
geometryEntity.setClassBeforeName(propertiesNode.get("class_before_name").asText());
}
if (propertiesNode.has("class_before_prob")) {
try {
double beforeProb = propertiesNode.get("class_before_prob").asDouble();
geometryEntity.setClassBeforeProb(beforeProb);
} catch (Exception e) {
log.debug("class_before_prob 파싱 실패", e);
}
}
// After class 정보
if (propertiesNode.has("class_after_name")) {
geometryEntity.setClassAfterName(propertiesNode.get("class_after_name").asText());
}
if (propertiesNode.has("class_after_prob")) {
try {
double afterProb = propertiesNode.get("class_after_prob").asDouble();
geometryEntity.setClassAfterProb(afterProb);
} catch (Exception e) {
log.debug("class_after_prob 파싱 실패", e);
}
}
// 도엽 번호
if (propertiesNode.has("map_sheet_num")) {
try {
long mapSheetNum = propertiesNode.get("map_sheet_num").asLong();
geometryEntity.setMapSheetNum(mapSheetNum);
} catch (Exception e) {
log.debug("map_sheet_num 파싱 실패", e);
}
}
// 면적 (properties에서 제공되는 경우)
if (propertiesNode.has("area")) {
try {
double area = propertiesNode.get("area").asDouble();
geometryEntity.setArea(area);
} catch (Exception e) {
log.debug("area 파싱 실패", e);
}
}
}
/**
* 미처리된 학습 데이터들을 찾아서 자동으로 geometry 변환 수행
*/
@Transactional
public List<Long> processUnprocessedLearnData() {
// 분석 상태가 PENDING인 학습 데이터 조회
List<MapSheetLearnDataEntity> unprocessedData = mapSheetLearnDataRepository.findByAnalState("PENDING");
if (unprocessedData.isEmpty()) {
log.debug("처리할 미완료 학습 데이터가 없습니다.");
return new ArrayList<>();
}
log.info("미처리 학습 데이터 {}개에 대해 geometry 변환을 수행합니다.", unprocessedData.size());
List<Long> processedIds = new ArrayList<>();
for (MapSheetLearnDataEntity data : unprocessedData) {
try {
List<Long> geometryIds = processLearnDataToGeometry(data);
processedIds.addAll(geometryIds);
// 처리 완료 상태로 업데이트
data.setAnalState("COMPLETED");
mapSheetLearnDataRepository.save(data);
} catch (Exception e) {
log.error("미처리 학습 데이터 처리 실패: {}", data.getId(), e);
// 실패한 경우 ERROR 상태로 설정
data.setAnalState("ERROR");
mapSheetLearnDataRepository.save(data);
}
}
return processedIds;
}
} }

View File

@@ -4,7 +4,6 @@ import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Detail; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Detail;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchReq;
import com.kamco.cd.kamcoback.inference.service.InferenceResultService; import com.kamco.cd.kamcoback.inference.service.InferenceResultService;
import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.Parameter;
@@ -29,132 +28,125 @@ public class InferenceResultApiController {
private final InferenceResultService inferenceResultService; private final InferenceResultService inferenceResultService;
@Operation( @Operation(summary = "추론관리 분석결과 목록 조회", description = "분석상태, 제목으로 분석결과를 조회 합니다.")
summary = "추론관리 분석결과 목록 조회",
description =
"분석상태, 제목으로 분석결과를 조회 합니다.")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
responseCode = "200", responseCode = "200",
description = "검색 성공", description = "검색 성공",
content = content =
@Content( @Content(
mediaType = "application/json", mediaType = "application/json",
schema = @Schema(implementation = Page.class))), schema = @Schema(implementation = Page.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content), @ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
}) })
@GetMapping("/list") @GetMapping("/list")
public ApiResponseDto<Page<InferenceResultDto.AnalResList>> getInferenceResultList( public ApiResponseDto<Page<InferenceResultDto.AnalResList>> getInferenceResultList(
@Parameter(description = "분석상태", example = "0000") @Parameter(description = "분석상태", example = "0000") @RequestParam(required = false)
@RequestParam(required = false) String statCode,
String statCode, @Parameter(description = "제목", example = "2023_2024년도") @RequestParam(required = false)
@Parameter(description = "제목", example = "2023_2024년도") @RequestParam(required = false) String title,
String title, @Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0")
@Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0") int page,
int page, @Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20")
@Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20") int size,
int size, @Parameter(description = "정렬 조건 (형식: 필드명,방향)", example = "name,asc")
@Parameter(description = "정렬 조건 (형식: 필드명,방향)", example = "name,asc") @RequestParam(required = false)
@RequestParam(required = false) String sort) {
String sort InferenceResultDto.SearchReq searchReq =
) { new InferenceResultDto.SearchReq(statCode, title, page, size, sort);
InferenceResultDto.SearchReq searchReq = new InferenceResultDto.SearchReq(statCode, title, page, size, sort); Page<InferenceResultDto.AnalResList> analResList =
Page<InferenceResultDto.AnalResList> analResList = inferenceResultService.getInferenceResultList(searchReq); inferenceResultService.getInferenceResultList(searchReq);
return ApiResponseDto.ok(analResList); return ApiResponseDto.ok(analResList);
} }
@Operation( @Operation(summary = "추론관리 분석결과 요약정보", description = "분석결과 요약정보를 조회합니다.")
summary = "추론관리 분석결과 요약정보",
description =
"분석결과 요약정보를 조회합니다.")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
responseCode = "200", responseCode = "200",
description = "검색 성공", description = "검색 성공",
content = content =
@Content( @Content(
mediaType = "application/json", mediaType = "application/json",
schema = @Schema(implementation = InferenceResultDto.AnalResSummary.class))), schema = @Schema(implementation = InferenceResultDto.AnalResSummary.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content), @ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
}) })
@GetMapping("/summary") @GetMapping("/summary")
public ApiResponseDto<InferenceResultDto.AnalResSummary> getInferenceResultSummary( public ApiResponseDto<InferenceResultDto.AnalResSummary> getInferenceResultSummary(
@Parameter(description = "목록 id", example = "1") @Parameter(description = "목록 id", example = "1") @RequestParam Long id) {
@RequestParam Long id) {
return ApiResponseDto.ok(inferenceResultService.getInferenceResultSummary(id)); return ApiResponseDto.ok(inferenceResultService.getInferenceResultSummary(id));
} }
@Operation(summary = "추론관리 분석결과 상세", description = "분석결과 상제 정보 Summary, DashBoard")
@Operation(
summary = "추론관리 분석결과 상세",
description =
"분석결과 상제 정보 Summary, DashBoard")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
responseCode = "200", responseCode = "200",
description = "검색 성공", description = "검색 성공",
content = content =
@Content( @Content(
mediaType = "application/json", mediaType = "application/json",
schema = @Schema(implementation = InferenceResultDto.Detail.class))), schema = @Schema(implementation = InferenceResultDto.Detail.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content), @ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
}) })
@GetMapping("/detail") @GetMapping("/detail")
public ApiResponseDto<InferenceResultDto.Detail> getInferenceDetail( public ApiResponseDto<InferenceResultDto.Detail> getInferenceDetail(
@Parameter(description = "목록 id", example = "1") @Parameter(description = "목록 id", example = "1") @RequestParam Long id) {
@RequestParam Long id) {
// summary // summary
InferenceResultDto.AnalResSummary summary = inferenceResultService.getInferenceResultSummary(id); InferenceResultDto.AnalResSummary summary =
//dashBoard inferenceResultService.getInferenceResultSummary(id);
// dashBoard
List<InferenceResultDto.Dashboard> dashboardList = this.getInferenceResultDashboard(id); List<InferenceResultDto.Dashboard> dashboardList = this.getInferenceResultDashboard(id);
return ApiResponseDto.ok(new Detail(summary, dashboardList)); return ApiResponseDto.ok(new Detail(summary, dashboardList));
} }
@Operation( @Operation(summary = "추론관리 분석결과 상세 목록", description = "추론관리 분석결과 상세 목록 geojson 데이터 조회")
summary = "추론관리 분석결과 상세 목록",
description =
"추론관리 분석결과 상세 목록 geojson 데이터 조회")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
responseCode = "200", responseCode = "200",
description = "검색 성공", description = "검색 성공",
content = content =
@Content( @Content(
mediaType = "application/json", mediaType = "application/json",
schema = @Schema(implementation = Page.class))), schema = @Schema(implementation = Page.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content), @ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
}) })
@GetMapping("/geom") @GetMapping("/geom")
public ApiResponseDto<Page<InferenceResultDto.Geom>> getInferenceResultGeomList( public ApiResponseDto<Page<InferenceResultDto.Geom>> getInferenceResultGeomList(
@Parameter(description = "기준년도 분류", example = "0001") @RequestParam(required = false) String targetClass, @Parameter(description = "기준년도 분류", example = "0001") @RequestParam(required = false)
@Parameter(description = "비교년도 분류", example = "0002") @RequestParam(required = false) String compareClass, String targetClass,
@Parameter(description = "5000:1 도협번호 37801011,37801012") @RequestParam(required = false) List<Long> mapSheetNum, @Parameter(description = "비교년도 분류", example = "0002") @RequestParam(required = false)
@Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0") int page, String compareClass,
@Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20") int size, @Parameter(description = "5000:1 도협번호 37801011,37801012") @RequestParam(required = false)
@Parameter(description = "정렬 조건 (형식: 필드명,방향)", example = "name,asc") @RequestParam(required = false) String sort List<Long> mapSheetNum,
) { @Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0")
InferenceResultDto.SearchGeoReq searchGeoReq = new InferenceResultDto.SearchGeoReq(targetClass, compareClass, mapSheetNum, page, size, sort); int page,
Page<InferenceResultDto.Geom> geomList = inferenceResultService.getInferenceResultGeomList(searchGeoReq); @Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20")
int size,
@Parameter(description = "정렬 조건 (형식: 필드명,방향)", example = "name,asc")
@RequestParam(required = false)
String sort) {
InferenceResultDto.SearchGeoReq searchGeoReq =
new InferenceResultDto.SearchGeoReq(
targetClass, compareClass, mapSheetNum, page, size, sort);
Page<InferenceResultDto.Geom> geomList =
inferenceResultService.getInferenceResultGeomList(searchGeoReq);
return ApiResponseDto.ok(geomList); return ApiResponseDto.ok(geomList);
} }
/** /**
* 분석결과 상세 대시보드 조회 * 분석결과 상세 대시보드 조회
*
* @param id * @param id
* @return * @return
*/ */
private List<Dashboard> getInferenceResultDashboard(Long id) { private List<Dashboard> getInferenceResultDashboard(Long id) {
return inferenceResultService.getInferenceResultBasic(id); return inferenceResultService.getInferenceResultBasic(id);
} }
} }

View File

@@ -8,7 +8,6 @@ import lombok.AllArgsConstructor;
import lombok.Getter; import lombok.Getter;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import lombok.Setter; import lombok.Setter;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort;
@@ -23,23 +22,20 @@ public class InferenceResultDto {
private String dataName; private String dataName;
private Long mapSheepNum; private Long mapSheepNum;
private Long detectingCnt; private Long detectingCnt;
@JsonFormatDttm @JsonFormatDttm private ZonedDateTime analStrtDttm;
private ZonedDateTime analStrtDttm; @JsonFormatDttm private ZonedDateTime analEndDttm;
@JsonFormatDttm
private ZonedDateTime analEndDttm;
private Long analSec; private Long analSec;
private String analState; private String analState;
public Basic( public Basic(
Long id, Long id,
String dataName, String dataName,
Long mapSheepNum, Long mapSheepNum,
Long detectingCnt, Long detectingCnt,
ZonedDateTime analStrtDttm, ZonedDateTime analStrtDttm,
ZonedDateTime analEndDttm, ZonedDateTime analEndDttm,
Long analSec, Long analSec,
String analState String analState) {
) {
this.id = id; this.id = id;
this.dataName = dataName; this.dataName = dataName;
this.mapSheepNum = mapSheepNum; this.mapSheepNum = mapSheepNum;
@@ -58,10 +54,8 @@ public class InferenceResultDto {
private String analTitle; private String analTitle;
private String analMapSheet; private String analMapSheet;
private Long detectingCnt; private Long detectingCnt;
@JsonFormatDttm @JsonFormatDttm private ZonedDateTime analStrtDttm;
private ZonedDateTime analStrtDttm; @JsonFormatDttm private ZonedDateTime analEndDttm;
@JsonFormatDttm
private ZonedDateTime analEndDttm;
private Long analSec; private Long analSec;
private Long analPredSec; private Long analPredSec;
private String analState; private String analState;
@@ -69,18 +63,17 @@ public class InferenceResultDto {
private String gukyuinUsed; private String gukyuinUsed;
public AnalResList( public AnalResList(
Long id, Long id,
String analTitle, String analTitle,
String analMapSheet, String analMapSheet,
Long detectingCnt, Long detectingCnt,
ZonedDateTime analStrtDttm, ZonedDateTime analStrtDttm,
ZonedDateTime analEndDttm, ZonedDateTime analEndDttm,
Long analSec, Long analSec,
Long analPredSec, Long analPredSec,
String analState, String analState,
String analStateNm, String analStateNm,
String gukyuinUsed String gukyuinUsed) {
) {
this.id = id; this.id = id;
this.analTitle = analTitle; this.analTitle = analTitle;
this.analMapSheet = analMapSheet; this.analMapSheet = analMapSheet;
@@ -103,10 +96,8 @@ public class InferenceResultDto {
private Integer targetYyyy; private Integer targetYyyy;
private Integer compareYyyy; private Integer compareYyyy;
private String analMapSheet; private String analMapSheet;
@JsonFormatDttm @JsonFormatDttm private ZonedDateTime analStrtDttm;
private ZonedDateTime analStrtDttm; @JsonFormatDttm private ZonedDateTime analEndDttm;
@JsonFormatDttm
private ZonedDateTime analEndDttm;
private Long analSec; private Long analSec;
private Long analPredSec; private Long analPredSec;
private String resultUrl; private String resultUrl;
@@ -116,21 +107,20 @@ public class InferenceResultDto {
private String analStateNm; private String analStateNm;
public AnalResSummary( public AnalResSummary(
Long id, Long id,
String modelInfo, String modelInfo,
Integer targetYyyy, Integer targetYyyy,
Integer compareYyyy, Integer compareYyyy,
String analMapSheet, String analMapSheet,
ZonedDateTime analStrtDttm, ZonedDateTime analStrtDttm,
ZonedDateTime analEndDttm, ZonedDateTime analEndDttm,
Long analSec, Long analSec,
Long analPredSec, Long analPredSec,
String resultUrl, String resultUrl,
Long detectingCnt, Long detectingCnt,
Double accuracy, Double accuracy,
String analState, String analState,
String analStateNm String analStateNm) {
) {
this.id = id; this.id = id;
this.modelInfo = modelInfo; this.modelInfo = modelInfo;
this.targetYyyy = targetYyyy; this.targetYyyy = targetYyyy;
@@ -157,30 +147,27 @@ public class InferenceResultDto {
String classAfterName; String classAfterName;
Long classBeforeCnt; Long classBeforeCnt;
Long classAfterCnt; Long classAfterCnt;
@JsonFormatDttm @JsonFormatDttm ZonedDateTime createdDttm;
ZonedDateTime createdDttm;
Long createdUid; Long createdUid;
@JsonFormatDttm @JsonFormatDttm ZonedDateTime updatedDttm;
ZonedDateTime updatedDttm;
Long updatedUid; Long updatedUid;
Long refMapSheetNum; Long refMapSheetNum;
Long dataUid; Long dataUid;
public Dashboard( public Dashboard(
Integer compareYyyy, Integer compareYyyy,
Integer targetYyyy, Integer targetYyyy,
Long mapSheetNum, Long mapSheetNum,
String classBeforeName, String classBeforeName,
String classAfterName, String classAfterName,
Long classBeforeCnt, Long classBeforeCnt,
Long classAfterCnt, Long classAfterCnt,
ZonedDateTime createdDttm, ZonedDateTime createdDttm,
Long createdUid, Long createdUid,
ZonedDateTime updatedDttm, ZonedDateTime updatedDttm,
Long updatedUid, Long updatedUid,
Long refMapSheetNum, Long refMapSheetNum,
Long dataUid Long dataUid) {
) {
this.compareYyyy = compareYyyy; this.compareYyyy = compareYyyy;
this.targetYyyy = targetYyyy; this.targetYyyy = targetYyyy;
this.mapSheetNum = mapSheetNum; this.mapSheetNum = mapSheetNum;
@@ -202,10 +189,7 @@ public class InferenceResultDto {
AnalResSummary summary; AnalResSummary summary;
List<Dashboard> dashboard; List<Dashboard> dashboard;
public Detail( public Detail(AnalResSummary summary, List<Dashboard> dashboard) {
AnalResSummary summary,
List<Dashboard> dashboard
) {
this.summary = summary; this.summary = summary;
this.dashboard = dashboard; this.dashboard = dashboard;
} }
@@ -224,16 +208,15 @@ public class InferenceResultDto {
Long mapSheetNum; Long mapSheetNum;
public Geom( public Geom(
Integer compareYyyy, Integer compareYyyy,
Integer targetYyyy, Integer targetYyyy,
String classBeforeCd, String classBeforeCd,
String classBeforeName, String classBeforeName,
Double classBeforeProb, Double classBeforeProb,
String classAfterCd, String classAfterCd,
String classAfterName, String classAfterName,
Double classAfterProb, Double classAfterProb,
Long mapSheetNum Long mapSheetNum) {
) {
this.compareYyyy = compareYyyy; this.compareYyyy = compareYyyy;
this.targetYyyy = targetYyyy; this.targetYyyy = targetYyyy;
this.classBeforeCd = classBeforeCd; this.classBeforeCd = classBeforeCd;
@@ -246,9 +229,6 @@ public class InferenceResultDto {
} }
} }
@Schema(name = "InferenceResultSearchReq", description = "분석결과 목록 요청 정보") @Schema(name = "InferenceResultSearchReq", description = "분석결과 목록 요청 정보")
@Getter @Getter
@Setter @Setter
@@ -270,7 +250,7 @@ public class InferenceResultDto {
String[] sortParams = sort.split(","); String[] sortParams = sort.split(",");
String property = sortParams[0]; String property = sortParams[0];
Sort.Direction direction = Sort.Direction direction =
sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC; sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC;
return PageRequest.of(page, size, Sort.by(direction, property)); return PageRequest.of(page, size, Sort.by(direction, property));
} }
return PageRequest.of(page, size); return PageRequest.of(page, size);
@@ -288,7 +268,7 @@ public class InferenceResultDto {
private String targetClass; private String targetClass;
// 비교년도 // 비교년도
private String compareClass; private String compareClass;
//분석도엽 // 분석도엽
private List<Long> mapSheetNum; private List<Long> mapSheetNum;
// 페이징 파라미터 // 페이징 파라미터
@@ -301,7 +281,7 @@ public class InferenceResultDto {
String[] sortParams = sort.split(","); String[] sortParams = sort.split(",");
String property = sortParams[0]; String property = sortParams[0];
Sort.Direction direction = Sort.Direction direction =
sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC; sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC;
return PageRequest.of(page, size, Sort.by(direction, property)); return PageRequest.of(page, size, Sort.by(direction, property));
} }
return PageRequest.of(page, size); return PageRequest.of(page, size);

View File

@@ -1,7 +1,6 @@
package com.kamco.cd.kamcoback.inference.service; package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Basic;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService; import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import java.util.List; import java.util.List;
@@ -19,15 +18,18 @@ public class InferenceResultService {
/** /**
* 추론관리 > 분석결과 목록 조회 * 추론관리 > 분석결과 목록 조회
*
* @param searchReq * @param searchReq
* @return * @return
*/ */
public Page<InferenceResultDto.AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq) { public Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq) {
return inferenceResultCoreService.getInferenceResultList(searchReq); return inferenceResultCoreService.getInferenceResultList(searchReq);
} }
/** /**
* 분석결과 요약정보 * 분석결과 요약정보
*
* @param id * @param id
* @return * @return
*/ */
@@ -37,6 +39,7 @@ public class InferenceResultService {
/** /**
* 분석결과 대시보드 조회 * 분석결과 대시보드 조회
*
* @param id * @param id
* @return * @return
*/ */
@@ -46,12 +49,12 @@ public class InferenceResultService {
/** /**
* 분석결과 상세 목록 * 분석결과 상세 목록
*
* @param searchGeoReq * @param searchGeoReq
* @return * @return
*/ */
public Page<InferenceResultDto.Geom> getInferenceResultGeomList(InferenceResultDto.SearchGeoReq searchGeoReq) { public Page<InferenceResultDto.Geom> getInferenceResultGeomList(
InferenceResultDto.SearchGeoReq searchGeoReq) {
return inferenceResultCoreService.getInferenceResultGeomList(searchGeoReq); return inferenceResultCoreService.getInferenceResultGeomList(searchGeoReq);
} }
} }

View File

@@ -3,7 +3,6 @@ package com.kamco.cd.kamcoback.log;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto; import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
import com.kamco.cd.kamcoback.log.service.AuditLogService; import com.kamco.cd.kamcoback.log.service.AuditLogService;
import com.kamco.cd.kamcoback.postgres.core.AuditLogCoreService;
import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag; import io.swagger.v3.oas.annotations.tags.Tag;
import java.time.LocalDate; import java.time.LocalDate;
@@ -29,8 +28,7 @@ public class AuditLogApiController {
@RequestParam(required = false) LocalDate endDate, @RequestParam(required = false) LocalDate endDate,
@RequestParam int page, @RequestParam int page,
@RequestParam(defaultValue = "20") int size) { @RequestParam(defaultValue = "20") int size) {
AuditLogDto.searchReq searchReq = AuditLogDto.searchReq searchReq = new AuditLogDto.searchReq(page, size, "created_dttm,desc");
new AuditLogDto.searchReq(page, size, "created_dttm,desc");
Page<AuditLogDto.DailyAuditList> result = Page<AuditLogDto.DailyAuditList> result =
auditLogService.getLogByDaily(searchReq, startDate, endDate); auditLogService.getLogByDaily(searchReq, startDate, endDate);
@@ -44,10 +42,8 @@ public class AuditLogApiController {
@RequestParam LocalDate logDate, @RequestParam LocalDate logDate,
@RequestParam int page, @RequestParam int page,
@RequestParam(defaultValue = "20") int size) { @RequestParam(defaultValue = "20") int size) {
AuditLogDto.searchReq searchReq = AuditLogDto.searchReq searchReq = new AuditLogDto.searchReq(page, size, "created_dttm,desc");
new AuditLogDto.searchReq(page, size, "created_dttm,desc"); Page<AuditLogDto.DailyDetail> result = auditLogService.getLogByDailyResult(searchReq, logDate);
Page<AuditLogDto.DailyDetail> result =
auditLogService.getLogByDailyResult(searchReq, logDate);
return ApiResponseDto.ok(result); return ApiResponseDto.ok(result);
} }
@@ -58,8 +54,7 @@ public class AuditLogApiController {
@RequestParam(required = false) String searchValue, @RequestParam(required = false) String searchValue,
@RequestParam int page, @RequestParam int page,
@RequestParam(defaultValue = "20") int size) { @RequestParam(defaultValue = "20") int size) {
AuditLogDto.searchReq searchReq = AuditLogDto.searchReq searchReq = new AuditLogDto.searchReq(page, size, "created_dttm,desc");
new AuditLogDto.searchReq(page, size, "created_dttm,desc");
Page<AuditLogDto.MenuAuditList> result = auditLogService.getLogByMenu(searchReq, searchValue); Page<AuditLogDto.MenuAuditList> result = auditLogService.getLogByMenu(searchReq, searchValue);
return ApiResponseDto.ok(result); return ApiResponseDto.ok(result);
@@ -71,10 +66,8 @@ public class AuditLogApiController {
@RequestParam String menuId, @RequestParam String menuId,
@RequestParam int page, @RequestParam int page,
@RequestParam(defaultValue = "20") int size) { @RequestParam(defaultValue = "20") int size) {
AuditLogDto.searchReq searchReq = AuditLogDto.searchReq searchReq = new AuditLogDto.searchReq(page, size, "created_dttm,desc");
new AuditLogDto.searchReq(page, size, "created_dttm,desc"); Page<AuditLogDto.MenuDetail> result = auditLogService.getLogByMenuResult(searchReq, menuId);
Page<AuditLogDto.MenuDetail> result =
auditLogService.getLogByMenuResult(searchReq, menuId);
return ApiResponseDto.ok(result); return ApiResponseDto.ok(result);
} }
@@ -85,8 +78,7 @@ public class AuditLogApiController {
@RequestParam(required = false) String searchValue, @RequestParam(required = false) String searchValue,
@RequestParam int page, @RequestParam int page,
@RequestParam(defaultValue = "20") int size) { @RequestParam(defaultValue = "20") int size) {
AuditLogDto.searchReq searchReq = AuditLogDto.searchReq searchReq = new AuditLogDto.searchReq(page, size, "created_dttm,desc");
new AuditLogDto.searchReq(page, size, "created_dttm,desc");
Page<AuditLogDto.UserAuditList> result = Page<AuditLogDto.UserAuditList> result =
auditLogService.getLogByAccount(searchReq, searchValue); auditLogService.getLogByAccount(searchReq, searchValue);
@@ -99,10 +91,8 @@ public class AuditLogApiController {
@RequestParam Long userUid, @RequestParam Long userUid,
@RequestParam int page, @RequestParam int page,
@RequestParam(defaultValue = "20") int size) { @RequestParam(defaultValue = "20") int size) {
AuditLogDto.searchReq searchReq = AuditLogDto.searchReq searchReq = new AuditLogDto.searchReq(page, size, "created_dttm,desc");
new AuditLogDto.searchReq(page, size, "created_dttm,desc"); Page<AuditLogDto.UserDetail> result = auditLogService.getLogByAccountResult(searchReq, userUid);
Page<AuditLogDto.UserDetail> result =
auditLogService.getLogByAccountResult(searchReq, userUid);
return ApiResponseDto.ok(result); return ApiResponseDto.ok(result);
} }

View File

@@ -4,7 +4,6 @@ import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.log.dto.ErrorLogDto; import com.kamco.cd.kamcoback.log.dto.ErrorLogDto;
import com.kamco.cd.kamcoback.log.dto.EventType; import com.kamco.cd.kamcoback.log.dto.EventType;
import com.kamco.cd.kamcoback.log.service.ErrorLogService; import com.kamco.cd.kamcoback.log.service.ErrorLogService;
import com.kamco.cd.kamcoback.postgres.core.ErrorLogCoreService;
import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag; import io.swagger.v3.oas.annotations.tags.Tag;
import java.time.LocalDate; import java.time.LocalDate;

View File

@@ -3,8 +3,6 @@ package com.kamco.cd.kamcoback.log.dto;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm; import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Getter; import lombok.Getter;
@@ -72,7 +70,13 @@ public class AuditLogDto {
public static class DailyAuditList extends AuditCommon { public static class DailyAuditList extends AuditCommon {
private final String baseDate; private final String baseDate;
public DailyAuditList(int readCount, int cudCount, int printCount, int downloadCount, Long totalCount, String baseDate) { public DailyAuditList(
int readCount,
int cudCount,
int printCount,
int downloadCount,
Long totalCount,
String baseDate) {
super(readCount, cudCount, printCount, downloadCount, totalCount); super(readCount, cudCount, printCount, downloadCount, totalCount);
this.baseDate = baseDate; this.baseDate = baseDate;
} }
@@ -84,7 +88,14 @@ public class AuditLogDto {
private final String menuId; private final String menuId;
private final String menuName; private final String menuName;
public MenuAuditList(String menuId, String menuName, int readCount, int cudCount, int printCount, int downloadCount, Long totalCount) { public MenuAuditList(
String menuId,
String menuName,
int readCount,
int cudCount,
int printCount,
int downloadCount,
Long totalCount) {
super(readCount, cudCount, printCount, downloadCount, totalCount); super(readCount, cudCount, printCount, downloadCount, totalCount);
this.menuId = menuId; this.menuId = menuId;
this.menuName = menuName; this.menuName = menuName;
@@ -98,7 +109,15 @@ public class AuditLogDto {
private final String loginId; private final String loginId;
private final String username; private final String username;
public UserAuditList(Long accountId, String loginId, String username, int readCount, int cudCount, int printCount, int downloadCount, Long totalCount) { public UserAuditList(
Long accountId,
String loginId,
String username,
int readCount,
int cudCount,
int printCount,
int downloadCount,
Long totalCount) {
super(readCount, cudCount, printCount, downloadCount, totalCount); super(readCount, cudCount, printCount, downloadCount, totalCount);
this.accountId = accountId; this.accountId = accountId;
this.loginId = loginId; this.loginId = loginId;
@@ -122,13 +141,20 @@ public class AuditLogDto {
private final String loginId; private final String loginId;
private final String menuName; private final String menuName;
public DailyDetail(Long logId, String userName, String loginId, String menuName, EventType eventType, LogDetail detail){ public DailyDetail(
Long logId,
String userName,
String loginId,
String menuName,
EventType eventType,
LogDetail detail) {
super(logId, eventType, detail); super(logId, eventType, detail);
this.userName = userName; this.userName = userName;
this.loginId = loginId; this.loginId = loginId;
this.menuName = menuName; this.menuName = menuName;
} }
} }
@Schema(name = "MenuDetail", description = "메뉴별 로그 상세") @Schema(name = "MenuDetail", description = "메뉴별 로그 상세")
@Getter @Getter
public static class MenuDetail extends AuditDetail { public static class MenuDetail extends AuditDetail {
@@ -136,20 +162,28 @@ public class AuditLogDto {
private final String userName; private final String userName;
private final String loginId; private final String loginId;
public MenuDetail(Long logId, String logDateTime, String userName, String loginId, EventType eventType, LogDetail detail){ public MenuDetail(
Long logId,
String logDateTime,
String userName,
String loginId,
EventType eventType,
LogDetail detail) {
super(logId, eventType, detail); super(logId, eventType, detail);
this.logDateTime = logDateTime; this.logDateTime = logDateTime;
this.userName = userName; this.userName = userName;
this.loginId = loginId; this.loginId = loginId;
} }
} }
@Schema(name = "UserDetail", description = "사용자별 로그 상세") @Schema(name = "UserDetail", description = "사용자별 로그 상세")
@Getter @Getter
public static class UserDetail extends AuditDetail { public static class UserDetail extends AuditDetail {
private final String logDateTime; private final String logDateTime;
private final String menuNm; private final String menuNm;
public UserDetail(Long logId, String logDateTime, String menuNm, EventType eventType, LogDetail detail){ public UserDetail(
Long logId, String logDateTime, String menuNm, EventType eventType, LogDetail detail) {
super(logId, eventType, detail); super(logId, eventType, detail);
this.logDateTime = logDateTime; this.logDateTime = logDateTime;
this.menuNm = menuNm; this.menuNm = menuNm;

View File

@@ -2,13 +2,12 @@ package com.kamco.cd.kamcoback.log.service;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto; import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
import com.kamco.cd.kamcoback.postgres.core.AuditLogCoreService; import com.kamco.cd.kamcoback.postgres.core.AuditLogCoreService;
import java.time.LocalDate;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.time.LocalDate;
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
@Transactional(readOnly = true) @Transactional(readOnly = true)
@@ -16,34 +15,32 @@ public class AuditLogService {
private final AuditLogCoreService auditLogCoreService; private final AuditLogCoreService auditLogCoreService;
public Page<AuditLogDto.DailyAuditList> getLogByDaily( public Page<AuditLogDto.DailyAuditList> getLogByDaily(
AuditLogDto.searchReq searchRange, LocalDate startDate, LocalDate endDate) { AuditLogDto.searchReq searchRange, LocalDate startDate, LocalDate endDate) {
return auditLogCoreService.getLogByDaily(searchRange, startDate, endDate); return auditLogCoreService.getLogByDaily(searchRange, startDate, endDate);
} }
public Page<AuditLogDto.MenuAuditList> getLogByMenu( public Page<AuditLogDto.MenuAuditList> getLogByMenu(
AuditLogDto.searchReq searchRange, String searchValue) { AuditLogDto.searchReq searchRange, String searchValue) {
return auditLogCoreService.getLogByMenu(searchRange, searchValue); return auditLogCoreService.getLogByMenu(searchRange, searchValue);
} }
public Page<AuditLogDto.UserAuditList> getLogByAccount( public Page<AuditLogDto.UserAuditList> getLogByAccount(
AuditLogDto.searchReq searchRange, String searchValue) { AuditLogDto.searchReq searchRange, String searchValue) {
return auditLogCoreService.getLogByAccount(searchRange, searchValue); return auditLogCoreService.getLogByAccount(searchRange, searchValue);
} }
public Page<AuditLogDto.DailyDetail> getLogByDailyResult( public Page<AuditLogDto.DailyDetail> getLogByDailyResult(
AuditLogDto.searchReq searchRange, LocalDate logDate) { AuditLogDto.searchReq searchRange, LocalDate logDate) {
return auditLogCoreService.getLogByDailyResult(searchRange, logDate); return auditLogCoreService.getLogByDailyResult(searchRange, logDate);
} }
public Page<AuditLogDto.MenuDetail> getLogByMenuResult( public Page<AuditLogDto.MenuDetail> getLogByMenuResult(
AuditLogDto.searchReq searchRange, String menuId) { AuditLogDto.searchReq searchRange, String menuId) {
return auditLogCoreService.getLogByMenuResult(searchRange, menuId); return auditLogCoreService.getLogByMenuResult(searchRange, menuId);
} }
public Page<AuditLogDto.UserDetail> getLogByAccountResult( public Page<AuditLogDto.UserDetail> getLogByAccountResult(
AuditLogDto.searchReq searchRange, Long accountId) { AuditLogDto.searchReq searchRange, Long accountId) {
return auditLogCoreService.getLogByAccountResult(searchRange, accountId); return auditLogCoreService.getLogByAccountResult(searchRange, accountId);
} }
} }

View File

@@ -2,7 +2,6 @@ package com.kamco.cd.kamcoback.model;
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto; import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.model.dto.ModelVerDto; import com.kamco.cd.kamcoback.model.dto.ModelVerDto;
import com.kamco.cd.kamcoback.model.service.ModelMngService; import com.kamco.cd.kamcoback.model.service.ModelMngService;
@@ -13,13 +12,12 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag; import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.transaction.Transactional; import jakarta.transaction.Transactional;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.*;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.*;
@Tag(name = "모델 관리", description = "모델 관리 API") @Tag(name = "모델 관리", description = "모델 관리 API")
@RequiredArgsConstructor @RequiredArgsConstructor
@@ -32,17 +30,17 @@ public class ModelMngApiController {
@Operation(summary = "목록 조회", description = "모든 모델 조회") @Operation(summary = "목록 조회", description = "모든 모델 조회")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
responseCode = "200", responseCode = "200",
description = "조회 성공", description = "조회 성공",
content = content =
@Content( @Content(
mediaType = "application/json", mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))), schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
}) })
@GetMapping @GetMapping
public ApiResponseDto<List<ModelMngDto.Basic>> getFindAll() { public ApiResponseDto<List<ModelMngDto.Basic>> getFindAll() {
return ApiResponseDto.ok(modelMngService.findModelMngAll()); return ApiResponseDto.ok(modelMngService.findModelMngAll());
@@ -50,6 +48,7 @@ public class ModelMngApiController {
/** /**
* 최종 등록 모델 정보 * 최종 등록 모델 정보
*
* @return ModelMngDto.FinalModelDto * @return ModelMngDto.FinalModelDto
*/ */
@Operation(summary = "최종 등록 모델 조회", description = "최종 등록 모델 조회") @Operation(summary = "최종 등록 모델 조회", description = "최종 등록 모델 조회")
@@ -60,6 +59,7 @@ public class ModelMngApiController {
/** /**
* 모델 등록 => 모델, 버전 동시 등록 (UI 상 따로 등록하는 곳 없음) * 모델 등록 => 모델, 버전 동시 등록 (UI 상 따로 등록하는 곳 없음)
*
* @param addReq 모델 입력 값 * @param addReq 모델 입력 값
* @return ModelVerDto.Basic * @return ModelVerDto.Basic
*/ */
@@ -71,7 +71,8 @@ public class ModelMngApiController {
@Operation(summary = "모델 수정", description = "모델 수정") @Operation(summary = "모델 수정", description = "모델 수정")
@PutMapping("/{id}") @PutMapping("/{id}")
public ApiResponseDto<Long> update(@PathVariable Long id, @RequestBody ModelMngDto.AddReq addReq) { public ApiResponseDto<Long> update(
@PathVariable Long id, @RequestBody ModelMngDto.AddReq addReq) {
return ApiResponseDto.ok(modelMngService.update(id, addReq)); return ApiResponseDto.ok(modelMngService.update(id, addReq));
} }
@@ -84,19 +85,19 @@ public class ModelMngApiController {
@Operation(summary = "모델 등록 이력", description = "모델 등록 이력") @Operation(summary = "모델 등록 이력", description = "모델 등록 이력")
@GetMapping("/reg-history") @GetMapping("/reg-history")
public ApiResponseDto<Page<ModelMngDto.ModelRegHistory>> getRegHistoryList( public ApiResponseDto<Page<ModelMngDto.ModelRegHistory>> getRegHistoryList(
@RequestParam(required = false) LocalDate startDate, @RequestParam(required = false) LocalDate startDate,
@RequestParam(required = false) LocalDate endDate, @RequestParam(required = false) LocalDate endDate,
@RequestParam int page, @RequestParam int page,
@RequestParam(defaultValue = "20") int size, @RequestParam(defaultValue = "20") int size,
@RequestParam(required = false) String searchVal, @RequestParam(required = false) String searchVal,
@RequestParam(required = false) String searchColumn @RequestParam(required = false) String searchColumn) {
) {
ModelMngDto.searchReq searchReq = ModelMngDto.searchReq searchReq =
new ModelMngDto.searchReq(page, size, Optional.ofNullable(searchColumn).orElse("createdDate") + ",desc"); new ModelMngDto.searchReq(
//searchColumn:: Entity 컬럼명칭으로 -> 기본값 = 등록일 createdDate, (선택) 배포일 deployDttm page, size, Optional.ofNullable(searchColumn).orElse("createdDate") + ",desc");
// searchColumn:: Entity 컬럼명칭으로 -> 기본값 = 등록일 createdDate, (선택) 배포일 deployDttm
Page<ModelMngDto.ModelRegHistory> result = Page<ModelMngDto.ModelRegHistory> result =
modelMngService.getRegHistoryList(searchReq, startDate, endDate, searchVal); modelMngService.getRegHistoryList(searchReq, startDate, endDate, searchVal);
return ApiResponseDto.ok(result); return ApiResponseDto.ok(result);
} }

View File

@@ -3,6 +3,7 @@ package com.kamco.cd.kamcoback.model.dto;
import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm; import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotEmpty; import jakarta.validation.constraints.NotEmpty;
import java.time.ZonedDateTime;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Getter; import lombok.Getter;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
@@ -11,8 +12,6 @@ import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort;
import java.time.ZonedDateTime;
public class ModelMngDto { public class ModelMngDto {
@Schema(name = "ModelMng Basic", description = "모델관리 엔티티 기본 정보") @Schema(name = "ModelMng Basic", description = "모델관리 엔티티 기본 정보")
@@ -24,26 +23,23 @@ public class ModelMngDto {
private final String modelCate; private final String modelCate;
private final String modelPath; private final String modelPath;
@JsonFormatDttm @JsonFormatDttm private final ZonedDateTime createdDttm;
private final ZonedDateTime createdDttm;
private final Long createdUid; private final Long createdUid;
@JsonFormatDttm @JsonFormatDttm private final ZonedDateTime updatedDttm;
private final ZonedDateTime updatedDttm;
private final Long updatedUid; private final Long updatedUid;
private final String modelCntnt; private final String modelCntnt;
public Basic( public Basic(
Long id, Long id,
String modelNm, String modelNm,
String modelCate, String modelCate,
String modelPath, String modelPath,
ZonedDateTime createdDttm, ZonedDateTime createdDttm,
Long createdUid, Long createdUid,
ZonedDateTime updatedDttm, ZonedDateTime updatedDttm,
Long updatedUid, Long updatedUid,
String modelCntnt String modelCntnt) {
) {
this.id = id; this.id = id;
this.modelNm = modelNm; this.modelNm = modelNm;
this.modelCate = modelCate; this.modelCate = modelCate;
@@ -70,8 +66,17 @@ public class ModelMngDto {
private final String deployState; private final String deployState;
private final String modelPath; private final String modelPath;
public FinalModelDto(Long modelUid, String modelNm, String modelCate, Long modelVerUid, String modelVer, public FinalModelDto(
String usedState, String modelState, Double qualityProb, String deployState, String modelPath) { Long modelUid,
String modelNm,
String modelCate,
Long modelVerUid,
String modelVer,
String usedState,
String modelState,
Double qualityProb,
String deployState,
String modelPath) {
this.modelUid = modelUid; this.modelUid = modelUid;
this.modelNm = modelNm; this.modelNm = modelNm;
this.modelCate = modelCate; this.modelCate = modelCate;
@@ -117,7 +122,7 @@ public class ModelMngDto {
String[] sortParams = sort.split(","); String[] sortParams = sort.split(",");
String property = sortParams[0]; String property = sortParams[0];
Sort.Direction direction = Sort.Direction direction =
sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC; sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC;
return PageRequest.of(page, size, Sort.by(direction, property)); return PageRequest.of(page, size, Sort.by(direction, property));
} }
return PageRequest.of(page, size); return PageRequest.of(page, size);

View File

@@ -2,13 +2,8 @@ package com.kamco.cd.kamcoback.model.dto;
import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm; import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotEmpty;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import lombok.Getter;
public class ModelVerDto { public class ModelVerDto {
@@ -28,29 +23,26 @@ public class ModelVerDto {
private final String deployState; private final String deployState;
private final String modelPath; private final String modelPath;
@JsonFormatDttm @JsonFormatDttm private final ZonedDateTime createdDttm;
private final ZonedDateTime createdDttm;
private final Long createdUid; private final Long createdUid;
@JsonFormatDttm @JsonFormatDttm private final ZonedDateTime updatedDttm;
private final ZonedDateTime updatedDttm;
private final Long updatedUid; private final Long updatedUid;
public Basic( public Basic(
Long id, Long id,
Long modelUid, Long modelUid,
String modelCate, String modelCate,
String modelVer, String modelVer,
String usedState, String usedState,
String modelState, String modelState,
Double qualityProb, Double qualityProb,
String deployState, String deployState,
String modelPath, String modelPath,
ZonedDateTime createdDttm, ZonedDateTime createdDttm,
Long createdUid, Long createdUid,
ZonedDateTime updatedDttm, ZonedDateTime updatedDttm,
Long updatedUid Long updatedUid) {
) {
this.id = id; this.id = id;
this.modelUid = modelUid; this.modelUid = modelUid;
this.modelCate = modelCate; this.modelCate = modelCate;
@@ -66,5 +58,4 @@ public class ModelVerDto {
this.updatedUid = updatedUid; this.updatedUid = updatedUid;
} }
} }
} }

View File

@@ -1,18 +1,16 @@
package com.kamco.cd.kamcoback.model.service; package com.kamco.cd.kamcoback.model.service;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.model.dto.ModelVerDto; import com.kamco.cd.kamcoback.model.dto.ModelVerDto;
import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService; import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService;
import java.time.LocalDate;
import java.util.List;
import java.util.Optional;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.time.LocalDate;
import java.util.List;
import java.util.Optional;
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
@Transactional(readOnly = true) @Transactional(readOnly = true)
@@ -20,7 +18,7 @@ public class ModelMngService {
private final ModelMngCoreService modelMngCoreService; private final ModelMngCoreService modelMngCoreService;
public List<ModelMngDto.Basic> findModelMngAll(){ public List<ModelMngDto.Basic> findModelMngAll() {
return modelMngCoreService.findModelMngAll(); return modelMngCoreService.findModelMngAll();
} }
@@ -40,7 +38,8 @@ public class ModelMngService {
return modelMngCoreService.delete(id); return modelMngCoreService.delete(id);
} }
public Page<ModelMngDto.ModelRegHistory> getRegHistoryList(ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal) { public Page<ModelMngDto.ModelRegHistory> getRegHistoryList(
ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal) {
return modelMngCoreService.getRegHistoryList(searchReq, startDate, endDate, searchVal); return modelMngCoreService.getRegHistoryList(searchReq, startDate, endDate, searchVal);
} }
} }

View File

@@ -8,20 +8,22 @@ import org.springframework.data.domain.Pageable;
public class QuerydslOrderUtil { public class QuerydslOrderUtil {
/** /**
* Pageable의 Sort 정보를 QueryDSL OrderSpecifier 배열로 변환 * Pageable의 Sort 정보를 QueryDSL OrderSpecifier 배열로 변환
*
* @param pageable Spring Pageable * @param pageable Spring Pageable
* @param entityClass 엔티티 클래스 (예: User.class) * @param entityClass 엔티티 클래스 (예: User.class)
* @param alias Q 엔티티 alias (예: "user") * @param alias Q 엔티티 alias (예: "user")
*/ */
public static <T> OrderSpecifier<?>[] getOrderSpecifiers(Pageable pageable, Class<T> entityClass, String alias) { public static <T> OrderSpecifier<?>[] getOrderSpecifiers(
Pageable pageable, Class<T> entityClass, String alias) {
PathBuilder<T> entityPath = new PathBuilder<>(entityClass, alias); PathBuilder<T> entityPath = new PathBuilder<>(entityClass, alias);
return pageable.getSort() return pageable.getSort().stream()
.stream() .map(
.map(sort -> { sort -> {
Order order = sort.isAscending() ? Order.ASC : Order.DESC; Order order = sort.isAscending() ? Order.ASC : Order.DESC;
// PathBuilder.get()는 컬럼명(String)을 동적 Path로 반환 // PathBuilder.get()는 컬럼명(String)을 동적 Path로 반환
return new OrderSpecifier<>(order, entityPath.get(sort.getProperty(), String.class)); return new OrderSpecifier<>(order, entityPath.get(sort.getProperty(), String.class));
}) })
.toArray(OrderSpecifier[]::new); .toArray(OrderSpecifier[]::new);
} }
} }

View File

@@ -1,17 +1,15 @@
package com.kamco.cd.kamcoback.postgres.core; package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto; import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.changedetection.ChangeDetectionRepository; import com.kamco.cd.kamcoback.postgres.repository.changedetection.ChangeDetectionRepository;
import java.util.List;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Point;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
public class ChangeDetectionCoreService { public class ChangeDetectionCoreService {
@@ -21,18 +19,15 @@ public class ChangeDetectionCoreService {
public List<ChangeDetectionDto> getPolygonToPoint() { public List<ChangeDetectionDto> getPolygonToPoint() {
List<MapSheetAnalDataGeomEntity> list = changeDetectionRepository.findAll(); List<MapSheetAnalDataGeomEntity> list = changeDetectionRepository.findAll();
return list.stream().map(p -> { return list.stream()
Geometry polygon = p.getGeom(); .map(
// 중심 좌표 계산 p -> {
Point centroid = polygon.getCentroid(); Geometry polygon = p.getGeom();
// 중심 좌표 계산
Point centroid = polygon.getCentroid();
return new ChangeDetectionDto( return new ChangeDetectionDto(p.getId(), polygon, centroid.getX(), centroid.getY());
p.getId(), })
polygon, .collect(Collectors.toList());
centroid.getX(),
centroid.getY()
);
})
.collect(Collectors.toList());
} }
} }

View File

@@ -75,6 +75,7 @@ public class CommonCodeCoreService
/** /**
* 공통코드 이름 조회 * 공통코드 이름 조회
*
* @param parentCodeCd * @param parentCodeCd
* @param childCodeCd * @param childCodeCd
* @return * @return

View File

@@ -6,7 +6,6 @@ import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalSttcEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository; import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
import jakarta.persistence.EntityNotFoundException; import jakarta.persistence.EntityNotFoundException;
import java.util.List; import java.util.List;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@@ -19,41 +18,49 @@ public class InferenceResultCoreService {
/** /**
* 추론관리 > 분석결과 목록 조회 * 추론관리 > 분석결과 목록 조회
*
* @param searchReq * @param searchReq
* @return * @return
*/ */
public Page<InferenceResultDto.AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq) { public Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq) {
return inferenceResultRepository.getInferenceResultList(searchReq); return inferenceResultRepository.getInferenceResultList(searchReq);
} }
/** /**
* 분석결과 요약정보 * 분석결과 요약정보
*
* @param id * @param id
* @return * @return
*/ */
public InferenceResultDto.AnalResSummary getInferenceResultSummary(Long id) { public InferenceResultDto.AnalResSummary getInferenceResultSummary(Long id) {
InferenceResultDto.AnalResSummary summary = inferenceResultRepository.getInferenceResultSummary(id).orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id)); InferenceResultDto.AnalResSummary summary =
inferenceResultRepository
.getInferenceResultSummary(id)
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
return summary; return summary;
} }
/** /**
* 분석결과 대시보드 조회 * 분석결과 대시보드 조회
*
* @param id * @param id
* @return * @return
*/ */
public List<Dashboard> getInferenceResultDashboard(Long id) { public List<Dashboard> getInferenceResultDashboard(Long id) {
return inferenceResultRepository.getInferenceResultDashboard(id) return inferenceResultRepository.getInferenceResultDashboard(id).stream()
.stream() .map(MapSheetAnalSttcEntity::toDto)
.map(MapSheetAnalSttcEntity::toDto) .toList();
.toList();
} }
/** /**
* 분석결과 상세 목록 * 분석결과 상세 목록
*
* @param searchGeoReq * @param searchGeoReq
* @return * @return
*/ */
public Page<InferenceResultDto.Geom> getInferenceResultGeomList(InferenceResultDto.SearchGeoReq searchGeoReq) { public Page<InferenceResultDto.Geom> getInferenceResultGeomList(
InferenceResultDto.SearchGeoReq searchGeoReq) {
return inferenceResultRepository.getInferenceGeomList(searchGeoReq); return inferenceResultRepository.getInferenceGeomList(searchGeoReq);
} }
} }

View File

@@ -7,13 +7,12 @@ import com.kamco.cd.kamcoback.postgres.entity.ModelVerEntity;
import com.kamco.cd.kamcoback.postgres.repository.model.ModelMngRepository; import com.kamco.cd.kamcoback.postgres.repository.model.ModelMngRepository;
import com.kamco.cd.kamcoback.postgres.repository.model.ModelVerRepository; import com.kamco.cd.kamcoback.postgres.repository.model.ModelVerRepository;
import jakarta.persistence.EntityNotFoundException; import jakarta.persistence.EntityNotFoundException;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
@@ -26,37 +25,57 @@ public class ModelMngCoreService {
return modelMngRepository.findModelMngAll().stream().map(ModelMngEntity::toDto).toList(); return modelMngRepository.findModelMngAll().stream().map(ModelMngEntity::toDto).toList();
} }
public Optional<ModelMngDto.FinalModelDto> getFinalModelInfo(){ public Optional<ModelMngDto.FinalModelDto> getFinalModelInfo() {
return modelMngRepository.getFinalModelInfo(); return modelMngRepository.getFinalModelInfo();
} }
public ModelVerDto.Basic save(ModelMngDto.AddReq addReq) { public ModelVerDto.Basic save(ModelMngDto.AddReq addReq) {
ModelMngEntity modelMngEntity = new ModelMngEntity(addReq.getModelNm(), addReq.getModelCate(), addReq.getModelPath(), ModelMngEntity modelMngEntity =
1L, 1L, addReq.getModelCntnt()); //TODO: 로그인 기능 붙이면 Uid 넣어야 함 new ModelMngEntity(
addReq.getModelNm(),
addReq.getModelCate(),
addReq.getModelPath(),
1L,
1L,
addReq.getModelCntnt()); // TODO: 로그인 기능 붙이면 Uid 넣어야 함
ModelMngEntity saved = modelMngRepository.save(modelMngEntity); ModelMngEntity saved = modelMngRepository.save(modelMngEntity);
ModelVerEntity modelVerEntity = new ModelVerEntity(saved.getId(), addReq.getModelCate(), addReq.getModelVer(), "NONE", "NONE", ModelVerEntity modelVerEntity =
0.0, "NONE", addReq.getModelPath(), 1L, 1L); new ModelVerEntity(
saved.getId(),
addReq.getModelCate(),
addReq.getModelVer(),
"NONE",
"NONE",
0.0,
"NONE",
addReq.getModelPath(),
1L,
1L);
return modelVerRepository.save(modelVerEntity).toDto(); return modelVerRepository.save(modelVerEntity).toDto();
} }
public Long update(Long id, ModelMngDto.AddReq addReq) { public Long update(Long id, ModelMngDto.AddReq addReq) {
//조회 // 조회
ModelVerEntity existData = modelVerRepository.findModelVerById(id) ModelVerEntity existData =
.orElseThrow(EntityNotFoundException::new); //데이터 없는 경우 exception modelVerRepository
.findModelVerById(id)
.orElseThrow(EntityNotFoundException::new); // 데이터 없는 경우 exception
existData.update(addReq); existData.update(addReq);
//TODO: 추후 수정 단계에서 도커파일 업로드하면 버전 업데이트 하는 로직 필요 // TODO: 추후 수정 단계에서 도커파일 업로드하면 버전 업데이트 하는 로직 필요
return existData.getId(); return existData.getId();
} }
public Long delete(Long id) { public Long delete(Long id) {
//조회 // 조회
ModelVerEntity verEntity = modelVerRepository.findModelVerById(id) ModelVerEntity verEntity =
.orElseThrow(() -> new EntityNotFoundException("버전 id 에 대한 정보를 찾을 수 없습니다. id : " + id)); modelVerRepository
.findModelVerById(id)
.orElseThrow(() -> new EntityNotFoundException("버전 id 에 대한 정보를 찾을 수 없습니다. id : " + id));
//usedState가 USED 이거나 이미 삭제된 상태이면 삭제 불가 // usedState가 USED 이거나 이미 삭제된 상태이면 삭제 불가
if (verEntity.getUsedState().equals("USED") || verEntity.isDeleted().equals(true)) { if (verEntity.getUsedState().equals("USED") || verEntity.isDeleted().equals(true)) {
throw new IllegalStateException("해당 모델이 사용중이라 삭제 불가"); //TODO: 추후 규칙 정의되면 수정 필요 throw new IllegalStateException("해당 모델이 사용중이라 삭제 불가"); // TODO: 추후 규칙 정의되면 수정 필요
} }
// id 코드 deleted = true 업데이트 // id 코드 deleted = true 업데이트
@@ -64,7 +83,8 @@ public class ModelMngCoreService {
return verEntity.getId(); return verEntity.getId();
} }
public Page<ModelMngDto.ModelRegHistory> getRegHistoryList(ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal) { public Page<ModelMngDto.ModelRegHistory> getRegHistoryList(
ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal) {
return modelMngRepository.getRegHistoryList(searchReq, startDate, endDate, searchVal); return modelMngRepository.getRegHistoryList(searchReq, startDate, endDate, searchVal);
} }
} }

View File

@@ -8,8 +8,6 @@ import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator; import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import jakarta.validation.constraints.Size; import jakarta.validation.constraints.Size;
import java.time.Instant;
import java.time.LocalTime;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.Map; import java.util.Map;
import lombok.Getter; import lombok.Getter;
@@ -26,7 +24,10 @@ public class MapSheetAnalDataEntity {
@Id @Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_anal_data_id_gen") @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_anal_data_id_gen")
@SequenceGenerator(name = "tb_map_sheet_anal_data_id_gen", sequenceName = "tb_map_sheet_learn_data_data_uid", allocationSize = 1) @SequenceGenerator(
name = "tb_map_sheet_anal_data_id_gen",
sequenceName = "tb_map_sheet_learn_data_data_uid",
allocationSize = 1)
@Column(name = "data_uid", nullable = false) @Column(name = "data_uid", nullable = false)
private Long id; private Long id;
@@ -103,5 +104,4 @@ public class MapSheetAnalDataEntity {
@Column(name = "detecting_cnt") @Column(name = "detecting_cnt")
private Long detectingCnt; private Long detectingCnt;
} }

View File

@@ -2,12 +2,11 @@ package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.*; import jakarta.persistence.*;
import jakarta.validation.constraints.Size; import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import lombok.Getter; import lombok.Getter;
import lombok.Setter; import lombok.Setter;
import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Geometry;
import java.time.ZonedDateTime;
@Getter @Getter
@Setter @Setter
@Entity @Entity
@@ -15,8 +14,13 @@ import java.time.ZonedDateTime;
public class MapSheetAnalDataGeomEntity { public class MapSheetAnalDataGeomEntity {
@Id @Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_anal_data_geom_id_gen") @GeneratedValue(
@SequenceGenerator(name = "tb_map_sheet_anal_data_geom_id_gen", sequenceName = "tb_map_sheet_learn_data_geom_geom_uid", allocationSize = 1) strategy = GenerationType.SEQUENCE,
generator = "tb_map_sheet_anal_data_geom_id_gen")
@SequenceGenerator(
name = "tb_map_sheet_anal_data_geom_id_gen",
sequenceName = "tb_map_sheet_learn_data_geom_geom_uid",
allocationSize = 1)
@Column(name = "geo_uid", nullable = false) @Column(name = "geo_uid", nullable = false)
private Long id; private Long id;

View File

@@ -2,16 +2,12 @@ package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column; import jakarta.persistence.Column;
import jakarta.persistence.Entity; import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.GeneratedValue; import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType; import jakarta.persistence.GenerationType;
import jakarta.persistence.Id; import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.SequenceGenerator; import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import jakarta.validation.constraints.Size; import jakarta.validation.constraints.Size;
import java.time.Instant;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import lombok.Getter; import lombok.Getter;
import lombok.Setter; import lombok.Setter;
@@ -25,7 +21,10 @@ public class MapSheetAnalEntity {
@Id @Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_anal_id_gen") @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_anal_id_gen")
@SequenceGenerator(name = "tb_map_sheet_anal_id_gen", sequenceName = "tb_map_sheet_anal_anal_uid", allocationSize = 1) @SequenceGenerator(
name = "tb_map_sheet_anal_id_gen",
sequenceName = "tb_map_sheet_anal_anal_uid",
allocationSize = 1)
@Column(name = "anal_uid", nullable = false) @Column(name = "anal_uid", nullable = false)
private Long id; private Long id;
@@ -92,5 +91,4 @@ public class MapSheetAnalEntity {
@Column(name = "detecting_cnt") @Column(name = "detecting_cnt")
private Long detectingCnt; private Long detectingCnt;
} }

View File

@@ -1,13 +1,11 @@
package com.kamco.cd.kamcoback.postgres.entity; package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import jakarta.persistence.Column; import jakarta.persistence.Column;
import jakarta.persistence.EmbeddedId; import jakarta.persistence.EmbeddedId;
import jakarta.persistence.Entity; import jakarta.persistence.Entity;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import lombok.Getter; import lombok.Getter;
import lombok.Setter; import lombok.Setter;
@@ -19,8 +17,7 @@ import org.hibernate.annotations.ColumnDefault;
@Table(name = "tb_map_sheet_anal_sttc") @Table(name = "tb_map_sheet_anal_sttc")
public class MapSheetAnalSttcEntity { public class MapSheetAnalSttcEntity {
@EmbeddedId @EmbeddedId private MapSheetAnalSttcEntityId id;
private MapSheetAnalSttcEntityId id;
@Column(name = "class_before_cnt") @Column(name = "class_before_cnt")
private Long classBeforeCnt; private Long classBeforeCnt;
@@ -52,19 +49,18 @@ public class MapSheetAnalSttcEntity {
public InferenceResultDto.Dashboard toDto() { public InferenceResultDto.Dashboard toDto() {
return new InferenceResultDto.Dashboard( return new InferenceResultDto.Dashboard(
id.getCompareYyyy(), id.getCompareYyyy(),
id.getTargetYyyy(), id.getTargetYyyy(),
id.getMapSheetNum(), id.getMapSheetNum(),
id.getClassBeforeName(), id.getClassBeforeName(),
id.getClassAfterName(), id.getClassAfterName(),
this.classBeforeCnt, this.classBeforeCnt,
this.classAfterCnt, this.classAfterCnt,
this.createdDttm, this.createdDttm,
this.createdUid, this.createdUid,
this.updatedDttm, this.updatedDttm,
this.updatedUid, this.updatedUid,
this.refMapSheetNum, this.refMapSheetNum,
this.dataUid this.dataUid);
);
} }
} }

View File

@@ -16,6 +16,7 @@ import org.hibernate.Hibernate;
public class MapSheetAnalSttcEntityId implements Serializable { public class MapSheetAnalSttcEntityId implements Serializable {
private static final long serialVersionUID = -8630519290255405042L; private static final long serialVersionUID = -8630519290255405042L;
@NotNull @NotNull
@Column(name = "compare_yyyy", nullable = false) @Column(name = "compare_yyyy", nullable = false)
private Integer compareYyyy; private Integer compareYyyy;
@@ -47,16 +48,15 @@ public class MapSheetAnalSttcEntityId implements Serializable {
return false; return false;
} }
MapSheetAnalSttcEntityId entity = (MapSheetAnalSttcEntityId) o; MapSheetAnalSttcEntityId entity = (MapSheetAnalSttcEntityId) o;
return Objects.equals(this.targetYyyy, entity.targetYyyy) && return Objects.equals(this.targetYyyy, entity.targetYyyy)
Objects.equals(this.classBeforeName, entity.classBeforeName) && && Objects.equals(this.classBeforeName, entity.classBeforeName)
Objects.equals(this.classAfterName, entity.classAfterName) && && Objects.equals(this.classAfterName, entity.classAfterName)
Objects.equals(this.compareYyyy, entity.compareYyyy) && && Objects.equals(this.compareYyyy, entity.compareYyyy)
Objects.equals(this.mapSheetNum, entity.mapSheetNum); && Objects.equals(this.mapSheetNum, entity.mapSheetNum);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(targetYyyy, classBeforeName, classAfterName, compareYyyy, mapSheetNum); return Objects.hash(targetYyyy, classBeforeName, classAfterName, compareYyyy, mapSheetNum);
} }
} }

View File

@@ -5,8 +5,6 @@ import jakarta.persistence.Entity;
import jakarta.persistence.Id; import jakarta.persistence.Id;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import jakarta.validation.constraints.Size; import jakarta.validation.constraints.Size;
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.Map; import java.util.Map;
import lombok.Getter; import lombok.Getter;
@@ -104,5 +102,4 @@ public class MapSheetLearnDataEntity {
@Column(name = "updated_uid") @Column(name = "updated_uid")
private Long updatedUid; private Long updatedUid;
} }

View File

@@ -2,18 +2,12 @@ package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column; import jakarta.persistence.Column;
import jakarta.persistence.Entity; import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.Id; import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import jakarta.validation.constraints.Size; import jakarta.validation.constraints.Size;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import lombok.Getter; import lombok.Getter;
import lombok.Setter; import lombok.Setter;
import org.hibernate.annotations.OnDelete;
import org.hibernate.annotations.OnDeleteAction;
import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Geometry;
@Getter @Getter
@@ -75,5 +69,4 @@ public class MapSheetLearnDataGeomEntity {
@Column(name = "updated_uid") @Column(name = "updated_uid")
private Long updatedUid; private Long updatedUid;
} }

View File

@@ -2,11 +2,10 @@ package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.postgres.CommonDateEntity; import com.kamco.cd.kamcoback.postgres.CommonDateEntity;
import jakarta.persistence.*; import jakarta.persistence.*;
import java.time.ZonedDateTime;
import lombok.Getter; import lombok.Getter;
import lombok.Setter; import lombok.Setter;
import java.time.ZonedDateTime;
@Getter @Getter
@Setter @Setter
@Entity @Entity
@@ -14,7 +13,10 @@ import java.time.ZonedDateTime;
public class ModelDeployHstEntity extends CommonDateEntity { public class ModelDeployHstEntity extends CommonDateEntity {
@Id @Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_model_deploy_hst_id_gen") @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_model_deploy_hst_id_gen")
@SequenceGenerator(name = "tb_model_deploy_hst_id_gen", sequenceName = "tb_model_deploy_hst_deploy_uid", allocationSize = 1) @SequenceGenerator(
name = "tb_model_deploy_hst_id_gen",
sequenceName = "tb_model_deploy_hst_deploy_uid",
allocationSize = 1)
@Column(name = "deploy_uid", nullable = false) @Column(name = "deploy_uid", nullable = false)
private Long id; private Long id;

View File

@@ -8,8 +8,6 @@ import lombok.Getter;
import lombok.Setter; import lombok.Setter;
import org.hibernate.annotations.ColumnDefault; import org.hibernate.annotations.ColumnDefault;
import java.time.ZonedDateTime;
@Getter @Getter
@Setter @Setter
@Entity @Entity
@@ -18,7 +16,10 @@ public class ModelMngEntity extends CommonDateEntity {
@Id @Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_model_mng_id_gen") @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_model_mng_id_gen")
@SequenceGenerator(name = "tb_model_mng_id_gen", sequenceName = "tb_model_mng_model_uid", allocationSize = 1) @SequenceGenerator(
name = "tb_model_mng_id_gen",
sequenceName = "tb_model_mng_model_uid",
allocationSize = 1)
@Column(name = "model_uid", nullable = false) @Column(name = "model_uid", nullable = false)
private Long id; private Long id;
@@ -46,8 +47,13 @@ public class ModelMngEntity extends CommonDateEntity {
@Column(name = "model_cntnt", columnDefinition = "TEXT") @Column(name = "model_cntnt", columnDefinition = "TEXT")
private String modelCntnt; private String modelCntnt;
public ModelMngEntity(String modelNm, String modelCate, String modelPath, public ModelMngEntity(
Long createdUid, Long updatedUid, String modelCntnt) { String modelNm,
String modelCate,
String modelPath,
Long createdUid,
Long updatedUid,
String modelCntnt) {
this.modelNm = modelNm; this.modelNm = modelNm;
this.modelCate = modelCate; this.modelCate = modelCate;
this.modelPath = modelPath; this.modelPath = modelPath;
@@ -59,14 +65,14 @@ public class ModelMngEntity extends CommonDateEntity {
public ModelMngDto.Basic toDto() { public ModelMngDto.Basic toDto() {
return new ModelMngDto.Basic( return new ModelMngDto.Basic(
this.id, this.id,
this.modelNm, this.modelNm,
this.modelCate, this.modelCate,
this.modelPath, this.modelPath,
super.getCreatedDate(), super.getCreatedDate(),
this.createdUid, this.createdUid,
super.getModifiedDate(), super.getModifiedDate(),
this.updatedUid, this.updatedUid,
this.modelCntnt); this.modelCntnt);
} }
} }

View File

@@ -19,7 +19,10 @@ public class ModelVerEntity extends CommonDateEntity {
@Id @Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_model_ver_id_gen") @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_model_ver_id_gen")
@SequenceGenerator(name = "tb_model_ver_id_gen", sequenceName = "tb_model_ver_model_ver_uid", allocationSize = 1) @SequenceGenerator(
name = "tb_model_ver_id_gen",
sequenceName = "tb_model_ver_model_ver_uid",
allocationSize = 1)
@Column(name = "model_ver_uid", nullable = false) @Column(name = "model_ver_uid", nullable = false)
private Long id; private Long id;
@@ -62,8 +65,19 @@ public class ModelVerEntity extends CommonDateEntity {
private Boolean deleted = false; private Boolean deleted = false;
public ModelVerEntity(Long id, Long modelUid, String modelCate, String modelVer, String usedState, String modelState, public ModelVerEntity(
Double qualityProb, String deployState, String modelPath, Long createdUid, Long updatedUid, Boolean deleted) { Long id,
Long modelUid,
String modelCate,
String modelVer,
String usedState,
String modelState,
Double qualityProb,
String deployState,
String modelPath,
Long createdUid,
Long updatedUid,
Boolean deleted) {
this.id = id; this.id = id;
this.modelUid = modelUid; this.modelUid = modelUid;
this.modelCate = modelCate; this.modelCate = modelCate;
@@ -78,8 +92,17 @@ public class ModelVerEntity extends CommonDateEntity {
this.deleted = deleted; this.deleted = deleted;
} }
public ModelVerEntity(Long modelUid, String modelCate, String modelVer, String usedState, String modelState, public ModelVerEntity(
Double qualityProb, String deployState, String modelPath, Long createdUid, Long updatedUid) { Long modelUid,
String modelCate,
String modelVer,
String usedState,
String modelState,
Double qualityProb,
String deployState,
String modelPath,
Long createdUid,
Long updatedUid) {
this.modelUid = modelUid; this.modelUid = modelUid;
this.modelCate = modelCate; this.modelCate = modelCate;
this.modelVer = modelVer; this.modelVer = modelVer;
@@ -94,19 +117,19 @@ public class ModelVerEntity extends CommonDateEntity {
public ModelVerDto.Basic toDto() { public ModelVerDto.Basic toDto() {
return new ModelVerDto.Basic( return new ModelVerDto.Basic(
this.id, this.id,
this.modelUid, this.modelUid,
this.modelCate, this.modelCate,
this.modelVer, this.modelVer,
this.usedState, this.usedState,
this.modelState, this.modelState,
this.qualityProb, this.qualityProb,
this.deployState, this.deployState,
this.modelPath, this.modelPath,
super.getCreatedDate(), super.getCreatedDate(),
this.createdUid, this.createdUid,
super.getModifiedDate(), super.getModifiedDate(),
this.updatedUid); this.updatedUid);
} }
public void update(ModelMngDto.AddReq addReq) { public void update(ModelMngDto.AddReq addReq) {
@@ -119,7 +142,7 @@ public class ModelVerEntity extends CommonDateEntity {
return deleted; return deleted;
} }
public void deleted(){ public void deleted() {
this.deleted = true; this.deleted = true;
} }
} }

View File

@@ -3,6 +3,5 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
public interface InferenceResultRepository extends JpaRepository<MapSheetAnalEntity, Long>, InferenceResultRepositoryCustom { public interface InferenceResultRepository
extends JpaRepository<MapSheetAnalEntity, Long>, InferenceResultRepositoryCustom {}
}

View File

@@ -1,17 +1,18 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference; package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalSttcEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalSttcEntity;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
public interface InferenceResultRepositoryCustom { public interface InferenceResultRepositoryCustom {
Page<InferenceResultDto.AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq); Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq);
Optional<InferenceResultDto.AnalResSummary> getInferenceResultSummary(Long id); Optional<InferenceResultDto.AnalResSummary> getInferenceResultSummary(Long id);
List<MapSheetAnalSttcEntity> getInferenceResultDashboard(Long id);
List<MapSheetAnalSttcEntity> getInferenceResultDashboard(Long id);
Page<InferenceResultDto.Geom> getInferenceGeomList(InferenceResultDto.SearchGeoReq searchGeoReq); Page<InferenceResultDto.Geom> getInferenceGeomList(InferenceResultDto.SearchGeoReq searchGeoReq);
} }

View File

@@ -2,13 +2,13 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalSttcEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalSttcEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity; import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity; import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity; import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity; import com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity; import com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Projections; import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.Expressions; import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.jpa.JPAExpressions; import com.querydsl.jpa.JPAExpressions;
@@ -21,7 +21,6 @@ import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Repository; import org.springframework.stereotype.Repository;
import com.querydsl.core.BooleanBuilder;
@Repository @Repository
@RequiredArgsConstructor @RequiredArgsConstructor
@@ -31,18 +30,20 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
private final QMapSheetAnalEntity mapSheetAnal = QMapSheetAnalEntity.mapSheetAnalEntity; private final QMapSheetAnalEntity mapSheetAnal = QMapSheetAnalEntity.mapSheetAnalEntity;
private final QModelMngEntity tmm = QModelMngEntity.modelMngEntity; private final QModelMngEntity tmm = QModelMngEntity.modelMngEntity;
private final QModelVerEntity tmv = QModelVerEntity.modelVerEntity; private final QModelVerEntity tmv = QModelVerEntity.modelVerEntity;
private final QMapSheetAnalSttcEntity mapSheetAnalSttc = QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity; private final QMapSheetAnalSttcEntity mapSheetAnalSttc =
private final QMapSheetAnalDataGeomEntity mapSheetAnalDataGeom = QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity; QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
private final QMapSheetAnalDataGeomEntity mapSheetAnalDataGeom =
QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
/** /**
* 분석결과 목록 조회 * 분석결과 목록 조회
*
* @param searchReq * @param searchReq
* @return * @return
*/ */
@Override @Override
public Page<InferenceResultDto.AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq) { public Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq) {
Pageable pageable = searchReq.toPageable(); Pageable pageable = searchReq.toPageable();
// "0000" 전체조회 // "0000" 전체조회
@@ -56,101 +57,98 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
builder.and(mapSheetAnal.analTitle.like("%" + searchReq.getTitle() + "%")); builder.and(mapSheetAnal.analTitle.like("%" + searchReq.getTitle() + "%"));
} }
List<InferenceResultDto.AnalResList> content = queryFactory List<InferenceResultDto.AnalResList> content =
.select(Projections.constructor(InferenceResultDto.AnalResList.class, queryFactory
mapSheetAnal.id, .select(
mapSheetAnal.analTitle, Projections.constructor(
mapSheetAnal.analMapSheet, InferenceResultDto.AnalResList.class,
mapSheetAnal.detectingCnt, mapSheetAnal.id,
mapSheetAnal.analStrtDttm, mapSheetAnal.analTitle,
mapSheetAnal.analEndDttm, mapSheetAnal.analMapSheet,
mapSheetAnal.analSec, mapSheetAnal.detectingCnt,
mapSheetAnal.analPredSec, mapSheetAnal.analStrtDttm,
mapSheetAnal.analState, mapSheetAnal.analEndDttm,
Expressions.stringTemplate("fn_code_name({0}, {1})", "0002", mapSheetAnal.analState), mapSheetAnal.analSec,
mapSheetAnal.gukyuinUsed mapSheetAnal.analPredSec,
)) mapSheetAnal.analState,
.from(mapSheetAnal) Expressions.stringTemplate(
.where( "fn_code_name({0}, {1})", "0002", mapSheetAnal.analState),
builder mapSheetAnal.gukyuinUsed))
) .from(mapSheetAnal)
.offset(pageable.getOffset()) .where(builder)
.limit(pageable.getPageSize()) .offset(pageable.getOffset())
.orderBy(mapSheetAnal.createdDttm.desc()) .limit(pageable.getPageSize())
.fetch(); .orderBy(mapSheetAnal.createdDttm.desc())
.fetch();
long total = queryFactory long total =
.select(mapSheetAnal.id) queryFactory.select(mapSheetAnal.id).from(mapSheetAnal).where(builder).fetchCount();
.from(mapSheetAnal)
.where(
builder
)
.fetchCount();
return new PageImpl<>(content, pageable, total); return new PageImpl<>(content, pageable, total);
} }
/** /**
* 분석결과 요약정보 * 분석결과 요약정보
*
* @param id * @param id
* @return * @return
*/ */
@Override @Override
public Optional<InferenceResultDto.AnalResSummary> getInferenceResultSummary(Long id) { public Optional<InferenceResultDto.AnalResSummary> getInferenceResultSummary(Long id) {
// 1. 최신 버전 UID를 가져오는 서브쿼리 // 1. 최신 버전 UID를 가져오는 서브쿼리
JPQLQuery<Long> latestVerUidSub = JPAExpressions JPQLQuery<Long> latestVerUidSub =
.select(tmv.id.max()) JPAExpressions.select(tmv.id.max()).from(tmv).where(tmv.modelUid.eq(tmm.id));
.from(tmv)
.where(tmv.modelUid.eq(tmm.id));
Optional<InferenceResultDto.AnalResSummary> content = Optional.ofNullable(queryFactory Optional<InferenceResultDto.AnalResSummary> content =
.select(Projections.constructor(InferenceResultDto.AnalResSummary.class, Optional.ofNullable(
mapSheetAnal.id, queryFactory
tmm.modelNm.concat(" ").concat(tmv.modelVer).as("modelInfo"), .select(
mapSheetAnal.targetYyyy, Projections.constructor(
mapSheetAnal.compareYyyy, InferenceResultDto.AnalResSummary.class,
mapSheetAnal.analMapSheet, mapSheetAnal.id,
mapSheetAnal.analStrtDttm, tmm.modelNm.concat(" ").concat(tmv.modelVer).as("modelInfo"),
mapSheetAnal.analEndDttm, mapSheetAnal.targetYyyy,
mapSheetAnal.analSec, mapSheetAnal.compareYyyy,
mapSheetAnal.analPredSec, mapSheetAnal.analMapSheet,
mapSheetAnal.resultUrl, mapSheetAnal.analStrtDttm,
mapSheetAnal.detectingCnt, mapSheetAnal.analEndDttm,
mapSheetAnal.accuracy, mapSheetAnal.analSec,
mapSheetAnal.analState, mapSheetAnal.analPredSec,
Expressions.stringTemplate("fn_code_name({0}, {1})", "0002", mapSheetAnal.analState) mapSheetAnal.resultUrl,
)) mapSheetAnal.detectingCnt,
.from(mapSheetAnal) mapSheetAnal.accuracy,
.leftJoin(tmm).on(mapSheetAnal.modelUid.eq(tmm.id)) mapSheetAnal.analState,
.leftJoin(tmv).on( Expressions.stringTemplate(
tmv.modelUid.eq(tmm.id) "fn_code_name({0}, {1})", "0002", mapSheetAnal.analState)))
.and(tmv.id.eq(latestVerUidSub)) .from(mapSheetAnal)
) .leftJoin(tmm)
.where(mapSheetAnal.id.eq(id)) .on(mapSheetAnal.modelUid.eq(tmm.id))
.fetchOne() .leftJoin(tmv)
); .on(tmv.modelUid.eq(tmm.id).and(tmv.id.eq(latestVerUidSub)))
.where(mapSheetAnal.id.eq(id))
.fetchOne());
return content; return content;
} }
/** /**
* 분석결과 상세 대시보드 조회 * 분석결과 상세 대시보드 조회
*
* @param id * @param id
* @return * @return
*/ */
@Override @Override
public List<MapSheetAnalSttcEntity> getInferenceResultDashboard(Long id) { public List<MapSheetAnalSttcEntity> getInferenceResultDashboard(Long id) {
return queryFactory return queryFactory
.select(mapSheetAnalSttc) .select(mapSheetAnalSttc)
.from(mapSheetAnalSttc) .from(mapSheetAnalSttc)
.where(mapSheetAnalSttc.dataUid.eq(id)) .where(mapSheetAnalSttc.dataUid.eq(id))
.fetch(); .fetch();
} }
/** /**
* 분석결과 상세 목록 * 분석결과 상세 목록
*
* @param searchGeoReq * @param searchGeoReq
* @return * @return
*/ */
@@ -160,45 +158,47 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
BooleanBuilder builder = new BooleanBuilder(); BooleanBuilder builder = new BooleanBuilder();
// 기준년도 분류 // 기준년도 분류
if(searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")){ if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) {
builder.and(mapSheetAnalDataGeom.classAfterCd.eq(searchGeoReq.getTargetClass())); builder.and(mapSheetAnalDataGeom.classAfterCd.eq(searchGeoReq.getTargetClass()));
} }
// 비교년도 분류 // 비교년도 분류
if(searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")){ if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")) {
builder.and(mapSheetAnalDataGeom.classBeforeCd.eq(searchGeoReq.getCompareClass())); builder.and(mapSheetAnalDataGeom.classBeforeCd.eq(searchGeoReq.getCompareClass()));
} }
// 분석도엽 // 분석도엽
if(searchGeoReq.getMapSheetNum() != null && !searchGeoReq.getMapSheetNum().isEmpty()){ if (searchGeoReq.getMapSheetNum() != null && !searchGeoReq.getMapSheetNum().isEmpty()) {
List<Long> mapSheetNum = searchGeoReq.getMapSheetNum(); List<Long> mapSheetNum = searchGeoReq.getMapSheetNum();
builder.and(mapSheetAnalDataGeom.mapSheetNum.in(mapSheetNum)); builder.and(mapSheetAnalDataGeom.mapSheetNum.in(mapSheetNum));
} }
List<InferenceResultDto.Geom> content = queryFactory List<InferenceResultDto.Geom> content =
.select(Projections.constructor(InferenceResultDto.Geom.class, queryFactory
mapSheetAnalDataGeom.compareYyyy, .select(
mapSheetAnalDataGeom.targetYyyy, Projections.constructor(
mapSheetAnalDataGeom.classBeforeCd, InferenceResultDto.Geom.class,
Expressions.stringTemplate("fn_code_name({0}, {1})", "0000", mapSheetAnalDataGeom.classBeforeCd), mapSheetAnalDataGeom.compareYyyy,
mapSheetAnalDataGeom.classBeforeProb, mapSheetAnalDataGeom.targetYyyy,
mapSheetAnalDataGeom.classAfterCd, mapSheetAnalDataGeom.classBeforeCd,
Expressions.stringTemplate("fn_code_name({0}, {1})", "0000", mapSheetAnalDataGeom.classAfterCd), Expressions.stringTemplate(
mapSheetAnalDataGeom.classAfterProb, "fn_code_name({0}, {1})", "0000", mapSheetAnalDataGeom.classBeforeCd),
mapSheetAnalDataGeom.mapSheetNum)) mapSheetAnalDataGeom.classBeforeProb,
.from(mapSheetAnalDataGeom) mapSheetAnalDataGeom.classAfterCd,
.where(builder) Expressions.stringTemplate(
.fetch() "fn_code_name({0}, {1})", "0000", mapSheetAnalDataGeom.classAfterCd),
; mapSheetAnalDataGeom.classAfterProb,
mapSheetAnalDataGeom.mapSheetNum))
.from(mapSheetAnalDataGeom)
.where(builder)
.fetch();
long total = queryFactory long total =
.select(mapSheetAnalDataGeom.id) queryFactory
.from(mapSheetAnalDataGeom) .select(mapSheetAnalDataGeom.id)
.where( .from(mapSheetAnalDataGeom)
builder .where(builder)
) .fetchCount();
.fetchCount();
return new PageImpl<>(content, pageable, total); return new PageImpl<>(content, pageable, total);
} }

View File

@@ -1,36 +1,27 @@
package com.kamco.cd.kamcoback.postgres.repository; package com.kamco.cd.kamcoback.postgres.repository;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository; import org.springframework.stereotype.Repository;
import java.util.List;
@Repository @Repository
public interface MapSheetLearnDataGeomRepository extends JpaRepository<MapSheetLearnDataGeomEntity, Long> { public interface MapSheetLearnDataGeomRepository
extends JpaRepository<MapSheetLearnDataGeomEntity, Long> {
/** /** 데이터 UID로 지오메트리 정보 조회 */
* 데이터 UID로 지오메트리 정보 조회
*/
List<MapSheetLearnDataGeomEntity> findByDataUid(Long dataUid); List<MapSheetLearnDataGeomEntity> findByDataUid(Long dataUid);
/** /** 도엽 번호로 지오메트리 정보 조회 */
* 도엽 번호로 지오메트리 정보 조회
*/
List<MapSheetLearnDataGeomEntity> findByMapSheetNum(Long mapSheetNum); List<MapSheetLearnDataGeomEntity> findByMapSheetNum(Long mapSheetNum);
/** /** 연도 범위로 지오메트리 정보 조회 */
* 연도 범위로 지오메트리 정보 조회 List<MapSheetLearnDataGeomEntity> findByBeforeYyyyAndAfterYyyy(
*/ Integer beforeYyyy, Integer afterYyyy);
List<MapSheetLearnDataGeomEntity> findByBeforeYyyyAndAfterYyyy(Integer beforeYyyy, Integer afterYyyy);
/** /** 지오메트리 타입별 조회 */
* 지오메트리 타입별 조회
*/
List<MapSheetLearnDataGeomEntity> findByGeoType(String geoType); List<MapSheetLearnDataGeomEntity> findByGeoType(String geoType);
/** /** 데이터 UID로 기존 지오메트리 데이터 삭제 (재생성 전에 사용) */
* 데이터 UID로 기존 지오메트리 데이터 삭제 (재생성 전에 사용)
*/
void deleteByDataUid(Long dataUid); void deleteByDataUid(Long dataUid);
} }

View File

@@ -1,47 +1,32 @@
package com.kamco.cd.kamcoback.postgres.repository; package com.kamco.cd.kamcoback.postgres.repository;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
@Repository @Repository
public interface MapSheetLearnDataRepository extends JpaRepository<MapSheetLearnDataEntity, Long> { public interface MapSheetLearnDataRepository extends JpaRepository<MapSheetLearnDataEntity, Long> {
/** /** 데이터 이름으로 조회 */
* 데이터 이름으로 조회
*/
Optional<MapSheetLearnDataEntity> findByDataName(String dataName); Optional<MapSheetLearnDataEntity> findByDataName(String dataName);
/** /** 데이터 경로로 조회 */
* 데이터 경로로 조회
*/
Optional<MapSheetLearnDataEntity> findByDataPath(String dataPath); Optional<MapSheetLearnDataEntity> findByDataPath(String dataPath);
/** /** 처리 상태별 조회 */
* 처리 상태별 조회
*/
List<MapSheetLearnDataEntity> findByDataState(String dataState); List<MapSheetLearnDataEntity> findByDataState(String dataState);
/** /** 데이터 타입별 조회 */
* 데이터 타입별 조회
*/
List<MapSheetLearnDataEntity> findByDataType(String dataType); List<MapSheetLearnDataEntity> findByDataType(String dataType);
/** /** 분석 상태별 조회 */
* 분석 상태별 조회
*/
List<MapSheetLearnDataEntity> findByAnalState(String analState); List<MapSheetLearnDataEntity> findByAnalState(String analState);
/** /** 분석 상태별 개수 조회 */
* 분석 상태별 개수 조회
*/
long countByAnalState(String analState); long countByAnalState(String analState);
/** /** 처리되지 않은 데이터 조회 (data_state가 'PENDING' 또는 null인 것들) */
* 처리되지 않은 데이터 조회 (data_state가 'PENDING' 또는 null인 것들)
*/
List<MapSheetLearnDataEntity> findByDataStateIsNullOrDataState(String dataState); List<MapSheetLearnDataEntity> findByDataStateIsNullOrDataState(String dataState);
} }

View File

@@ -3,4 +3,5 @@ package com.kamco.cd.kamcoback.postgres.repository.changedetection;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
public interface ChangeDetectionRepository extends JpaRepository<MapSheetAnalDataGeomEntity, Long>, ChangeDetectionRepositoryCustom {} public interface ChangeDetectionRepository
extends JpaRepository<MapSheetAnalDataGeomEntity, Long>, ChangeDetectionRepositoryCustom {}

View File

@@ -1,9 +1,6 @@
package com.kamco.cd.kamcoback.postgres.repository.changedetection; package com.kamco.cd.kamcoback.postgres.repository.changedetection;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
public interface ChangeDetectionRepositoryCustom { public interface ChangeDetectionRepositoryCustom {
String getPolygonToPoint(); String getPolygonToPoint();
} }

View File

@@ -1,15 +1,14 @@
package com.kamco.cd.kamcoback.postgres.repository.changedetection; package com.kamco.cd.kamcoback.postgres.repository.changedetection;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
import java.util.List;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.List;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
implements ChangeDetectionRepositoryCustom { implements ChangeDetectionRepositoryCustom {
private final JPAQueryFactory queryFactory; private final JPAQueryFactory queryFactory;
@@ -26,8 +25,8 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
public List<MapSheetAnalDataGeomEntity> findAll() { public List<MapSheetAnalDataGeomEntity> findAll() {
return queryFactory return queryFactory
.selectFrom(mapSheetAnalDataGeomEntity) .selectFrom(mapSheetAnalDataGeomEntity)
.orderBy(mapSheetAnalDataGeomEntity.id.desc()) .orderBy(mapSheetAnalDataGeomEntity.id.desc())
.fetch(); .fetch();
} }
} }

View File

@@ -88,13 +88,13 @@ public class CommonCodeRepositoryImpl extends QuerydslRepositorySupport
QCommonCodeEntity parent = QCommonCodeEntity.commonCodeEntity; QCommonCodeEntity parent = QCommonCodeEntity.commonCodeEntity;
QCommonCodeEntity child = new QCommonCodeEntity("child"); QCommonCodeEntity child = new QCommonCodeEntity("child");
String result = queryFactory String result =
.select(child.name) queryFactory
.from(child) .select(child.name)
.join(child.parent, parent) .from(child)
.where(parent.code.eq(parentCodeCd) .join(child.parent, parent)
.and(child.code.eq(childCodeCd))) .where(parent.code.eq(parentCodeCd).and(child.code.eq(childCodeCd)))
.fetchFirst(); // 단일 결과만 .fetchFirst(); // 단일 결과만
return Optional.ofNullable(result); return Optional.ofNullable(result);
} }

View File

@@ -18,8 +18,7 @@ public interface AuditLogRepositoryCustom {
Page<AuditLogDto.DailyDetail> findLogByDailyResult( Page<AuditLogDto.DailyDetail> findLogByDailyResult(
AuditLogDto.searchReq searchReq, LocalDate logDate); AuditLogDto.searchReq searchReq, LocalDate logDate);
Page<AuditLogDto.MenuDetail> findLogByMenuResult( Page<AuditLogDto.MenuDetail> findLogByMenuResult(AuditLogDto.searchReq searchReq, String menuId);
AuditLogDto.searchReq searchReq, String menuId);
Page<AuditLogDto.UserDetail> findLogByAccountResult( Page<AuditLogDto.UserDetail> findLogByAccountResult(
AuditLogDto.searchReq searchReq, Long accountId); AuditLogDto.searchReq searchReq, Long accountId);

View File

@@ -39,7 +39,7 @@ public class AuditLogRepositoryImpl extends QuerydslRepositorySupport
public Page<AuditLogDto.DailyAuditList> findLogByDaily( public Page<AuditLogDto.DailyAuditList> findLogByDaily(
AuditLogDto.searchReq searchReq, LocalDate startDate, LocalDate endDate) { AuditLogDto.searchReq searchReq, LocalDate startDate, LocalDate endDate) {
StringExpression groupDateTime = StringExpression groupDateTime =
Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')", auditLogEntity.createdDate); Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')", auditLogEntity.createdDate);
Pageable pageable = searchReq.toPageable(); Pageable pageable = searchReq.toPageable();
List<AuditLogDto.DailyAuditList> foundContent = List<AuditLogDto.DailyAuditList> foundContent =
@@ -52,9 +52,7 @@ public class AuditLogRepositoryImpl extends QuerydslRepositorySupport
printCount().as("printCount"), printCount().as("printCount"),
downloadCount().as("downloadCount"), downloadCount().as("downloadCount"),
auditLogEntity.count().as("totalCount"), auditLogEntity.count().as("totalCount"),
groupDateTime.as("baseDate") groupDateTime.as("baseDate")))
)
)
.from(auditLogEntity) .from(auditLogEntity)
.where(eventEndedAtBetween(startDate, endDate)) .where(eventEndedAtBetween(startDate, endDate))
.groupBy(groupDateTime) .groupBy(groupDateTime)
@@ -242,7 +240,9 @@ public class AuditLogRepositoryImpl extends QuerydslRepositorySupport
Projections.constructor( Projections.constructor(
AuditLogDto.MenuDetail.class, AuditLogDto.MenuDetail.class,
auditLogEntity.id.as("logId"), auditLogEntity.id.as("logId"),
Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')", auditLogEntity.createdDate).as("logDateTime"), //?? Expressions.stringTemplate(
"to_char({0}, 'YYYY-MM-DD')", auditLogEntity.createdDate)
.as("logDateTime"), // ??
userEntity.userNm.as("userName"), userEntity.userNm.as("userName"),
userEntity.userId.as("loginId"), userEntity.userId.as("loginId"),
auditLogEntity.eventType.as("eventType"), auditLogEntity.eventType.as("eventType"),
@@ -307,7 +307,9 @@ public class AuditLogRepositoryImpl extends QuerydslRepositorySupport
Projections.constructor( Projections.constructor(
AuditLogDto.UserDetail.class, AuditLogDto.UserDetail.class,
auditLogEntity.id.as("logId"), auditLogEntity.id.as("logId"),
Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')", auditLogEntity.createdDate).as("logDateTime"), Expressions.stringTemplate(
"to_char({0}, 'YYYY-MM-DD')", auditLogEntity.createdDate)
.as("logDateTime"),
menuEntity.menuNm.as("menuName"), menuEntity.menuNm.as("menuName"),
auditLogEntity.eventType.as("eventType"), auditLogEntity.eventType.as("eventType"),
Projections.constructor( Projections.constructor(
@@ -392,7 +394,7 @@ public class AuditLogRepositoryImpl extends QuerydslRepositorySupport
private BooleanExpression eventEndedAtEqDate(LocalDate logDate) { private BooleanExpression eventEndedAtEqDate(LocalDate logDate) {
StringExpression eventEndedDate = StringExpression eventEndedDate =
Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')", auditLogEntity.createdDate); Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')", auditLogEntity.createdDate);
LocalDateTime comparisonDate = logDate.atStartOfDay(); LocalDateTime comparisonDate = logDate.atStartOfDay();
return eventEndedDate.eq(comparisonDate.toString()); return eventEndedDate.eq(comparisonDate.toString());

View File

@@ -3,4 +3,5 @@ package com.kamco.cd.kamcoback.postgres.repository.model;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity; import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
public interface ModelMngRepository extends JpaRepository<ModelMngEntity, Long>, ModelMngRepositoryCustom {} public interface ModelMngRepository
extends JpaRepository<ModelMngEntity, Long>, ModelMngRepositoryCustom {}

View File

@@ -2,11 +2,10 @@ package com.kamco.cd.kamcoback.postgres.repository.model;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity; import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import org.springframework.data.domain.Page;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import org.springframework.data.domain.Page;
public interface ModelMngRepositoryCustom { public interface ModelMngRepositoryCustom {
@@ -14,5 +13,6 @@ public interface ModelMngRepositoryCustom {
Optional<ModelMngDto.FinalModelDto> getFinalModelInfo(); Optional<ModelMngDto.FinalModelDto> getFinalModelInfo();
Page<ModelMngDto.ModelRegHistory> getRegHistoryList(ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal); Page<ModelMngDto.ModelRegHistory> getRegHistoryList(
ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal);
} }

View File

@@ -1,5 +1,9 @@
package com.kamco.cd.kamcoback.postgres.repository.model; package com.kamco.cd.kamcoback.postgres.repository.model;
import static com.kamco.cd.kamcoback.postgres.entity.QModelDeployHstEntity.modelDeployHstEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity.modelMngEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity.modelVerEntity;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.QuerydslOrderUtil; import com.kamco.cd.kamcoback.postgres.QuerydslOrderUtil;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity; import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
@@ -10,24 +14,19 @@ import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.StringExpression; import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory; import com.querydsl.jpa.impl.JPAQueryFactory;
import io.micrometer.common.util.StringUtils; import io.micrometer.common.util.StringUtils;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
import java.time.LocalDate; import java.time.LocalDate;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import org.springframework.data.domain.Page;
import static com.kamco.cd.kamcoback.postgres.entity.QModelDeployHstEntity.modelDeployHstEntity; import org.springframework.data.domain.PageImpl;
import static com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity.modelMngEntity; import org.springframework.data.domain.Pageable;
import static com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity.modelVerEntity; import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
public class ModelMngRepositoryImpl extends QuerydslRepositorySupport public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
implements ModelMngRepositoryCustom { implements ModelMngRepositoryCustom {
private final JPAQueryFactory queryFactory; private final JPAQueryFactory queryFactory;
private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)"); private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)");
@@ -39,85 +38,81 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
@Override @Override
public List<ModelMngEntity> findModelMngAll() { public List<ModelMngEntity> findModelMngAll() {
return queryFactory return queryFactory.selectFrom(modelMngEntity).orderBy(modelMngEntity.id.desc()).fetch();
.selectFrom(modelMngEntity)
.orderBy(modelMngEntity.id.desc())
.fetch();
} }
@Override @Override
public Optional<ModelMngDto.FinalModelDto> getFinalModelInfo(){ public Optional<ModelMngDto.FinalModelDto> getFinalModelInfo() {
return queryFactory return queryFactory
.select( .select(
Projections.constructor( Projections.constructor(
ModelMngDto.FinalModelDto.class, ModelMngDto.FinalModelDto.class,
modelMngEntity.id.as("modelUid"), modelMngEntity.id.as("modelUid"),
modelMngEntity.modelNm, modelMngEntity.modelNm,
modelMngEntity.modelCate, modelMngEntity.modelCate,
modelVerEntity.id.as("modelVerUid"), modelVerEntity.id.as("modelVerUid"),
modelVerEntity.modelVer, modelVerEntity.modelVer,
modelVerEntity.usedState, modelVerEntity.usedState,
modelVerEntity.modelState, modelVerEntity.modelState,
modelVerEntity.qualityProb, modelVerEntity.qualityProb,
modelVerEntity.deployState, modelVerEntity.deployState,
modelVerEntity.modelPath modelVerEntity.modelPath))
)
)
.from(modelMngEntity) .from(modelMngEntity)
.innerJoin(modelVerEntity) .innerJoin(modelVerEntity)
.on(modelMngEntity.id.eq(modelVerEntity.modelUid)) .on(modelMngEntity.id.eq(modelVerEntity.modelUid))
.where(modelVerEntity.usedState.eq("USED")) //USED 인 것 중에 .where(modelVerEntity.usedState.eq("USED")) // USED 인 것 중에
.orderBy(modelVerEntity.modelVer.desc()) //Version 높은 것 기준 .orderBy(modelVerEntity.modelVer.desc()) // Version 높은 것 기준
.stream() .stream()
.findFirst(); .findFirst();
} }
@Override @Override
public Page<ModelMngDto.ModelRegHistory> getRegHistoryList(ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal) { public Page<ModelMngDto.ModelRegHistory> getRegHistoryList(
ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal) {
Pageable pageable = searchReq.toPageable(); Pageable pageable = searchReq.toPageable();
List<ModelMngDto.ModelRegHistory> foundContent = List<ModelMngDto.ModelRegHistory> foundContent =
queryFactory queryFactory
.select( .select(
Projections.constructor( Projections.constructor(
ModelMngDto.ModelRegHistory.class, ModelMngDto.ModelRegHistory.class,
modelMngEntity.modelNm, modelMngEntity.modelNm,
modelMngEntity.modelCate, modelMngEntity.modelCate,
modelVerEntity.modelVer, modelVerEntity.modelVer,
Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')", modelVerEntity.createdDate).as("createdDttm"), Expressions.stringTemplate(
modelVerEntity.usedState, "to_char({0}, 'YYYY-MM-DD')", modelVerEntity.createdDate)
modelVerEntity.deployState, .as("createdDttm"),
Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')", modelDeployHstEntity.deployDttm).as("deployDttm") modelVerEntity.usedState,
) modelVerEntity.deployState,
) Expressions.stringTemplate(
"to_char({0}, 'YYYY-MM-DD')", modelDeployHstEntity.deployDttm)
.as("deployDttm")))
.from(modelMngEntity) .from(modelMngEntity)
.innerJoin(modelVerEntity) .innerJoin(modelVerEntity)
.on(modelMngEntity.id.eq(modelVerEntity.modelUid)) .on(modelMngEntity.id.eq(modelVerEntity.modelUid))
.leftJoin(modelDeployHstEntity) .leftJoin(modelDeployHstEntity)
.on( .on(
modelVerEntity.id.eq(modelDeployHstEntity.modelVerUid) modelVerEntity
.and(modelDeployHstEntity.serverId.eq(1L)) //1건만 조회해야 하기에 1번 서버만 조회하기 .id
) .eq(modelDeployHstEntity.modelVerUid)
.where( .and(modelDeployHstEntity.serverId.eq(1L)) // 1건만 조회해야 하기에 1번 서버만 조회하기
eventEndedAtBetween(startDate, endDate), )
searchModelVerLike(searchVal) .where(eventEndedAtBetween(startDate, endDate), searchModelVerLike(searchVal))
)
.offset(pageable.getOffset()) .offset(pageable.getOffset())
.limit(pageable.getPageSize()) .limit(pageable.getPageSize())
.orderBy(QuerydslOrderUtil.getOrderSpecifiers(pageable, ModelVerEntity.class, "modelVerEntity")) .orderBy(
QuerydslOrderUtil.getOrderSpecifiers(
pageable, ModelVerEntity.class, "modelVerEntity"))
.fetch(); .fetch();
Long countQuery = Long countQuery =
queryFactory queryFactory
.select(modelVerEntity.id.count()) .select(modelVerEntity.id.count())
.from(modelMngEntity) .from(modelMngEntity)
.innerJoin(modelVerEntity) .innerJoin(modelVerEntity)
.on(modelMngEntity.id.eq(modelVerEntity.modelUid)) .on(modelMngEntity.id.eq(modelVerEntity.modelUid))
.where( .where(eventEndedAtBetween(startDate, endDate), searchModelVerLike(searchVal))
eventEndedAtBetween(startDate, endDate), .fetchOne();
searchModelVerLike(searchVal)
)
.fetchOne();
return new PageImpl<>(foundContent, pageable, countQuery); return new PageImpl<>(foundContent, pageable, countQuery);
} }
@@ -128,11 +123,13 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
} }
LocalDateTime startDateTime = startDate.atStartOfDay(); LocalDateTime startDateTime = startDate.atStartOfDay();
LocalDateTime endDateTime = endDate.plusDays(1).atStartOfDay(); LocalDateTime endDateTime = endDate.plusDays(1).atStartOfDay();
return modelMngEntity.createdDate.goe(ZonedDateTime.from(startDateTime)) return modelMngEntity
.and(modelMngEntity.modifiedDate.lt(ZonedDateTime.from(endDateTime))); .createdDate
.goe(ZonedDateTime.from(startDateTime))
.and(modelMngEntity.modifiedDate.lt(ZonedDateTime.from(endDateTime)));
} }
private BooleanExpression searchModelVerLike(String searchVal){ private BooleanExpression searchModelVerLike(String searchVal) {
if (StringUtils.isBlank(searchVal)) { if (StringUtils.isBlank(searchVal)) {
return null; return null;
} }

View File

@@ -1,7 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.model; package com.kamco.cd.kamcoback.postgres.repository.model;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import com.kamco.cd.kamcoback.postgres.entity.ModelVerEntity; import com.kamco.cd.kamcoback.postgres.entity.ModelVerEntity;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
public interface ModelVerRepository extends JpaRepository<ModelVerEntity, Long>, ModelVerRepositoryCustom {} public interface ModelVerRepository
extends JpaRepository<ModelVerEntity, Long>, ModelVerRepositoryCustom {}

View File

@@ -1,10 +1,6 @@
package com.kamco.cd.kamcoback.postgres.repository.model; package com.kamco.cd.kamcoback.postgres.repository.model;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import com.kamco.cd.kamcoback.postgres.entity.ModelVerEntity; import com.kamco.cd.kamcoback.postgres.entity.ModelVerEntity;
import java.util.List;
import java.util.Optional; import java.util.Optional;
public interface ModelVerRepositoryCustom { public interface ModelVerRepositoryCustom {

View File

@@ -1,18 +1,17 @@
package com.kamco.cd.kamcoback.postgres.repository.model; package com.kamco.cd.kamcoback.postgres.repository.model;
import static com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity.modelVerEntity;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity; import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import com.kamco.cd.kamcoback.postgres.entity.ModelVerEntity; import com.kamco.cd.kamcoback.postgres.entity.ModelVerEntity;
import com.querydsl.core.types.dsl.Expressions; import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.StringExpression; import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory; import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.Optional;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport; import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
import java.util.Optional;
import static com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity.modelVerEntity;
public class ModelVerRepositoryImpl extends QuerydslRepositorySupport public class ModelVerRepositoryImpl extends QuerydslRepositorySupport
implements ModelVerRepositoryCustom { implements ModelVerRepositoryCustom {
private final JPAQueryFactory queryFactory; private final JPAQueryFactory queryFactory;
private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)"); private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)");
@@ -24,10 +23,10 @@ public class ModelVerRepositoryImpl extends QuerydslRepositorySupport
@Override @Override
public Optional<ModelVerEntity> findModelVerById(Long id) { public Optional<ModelVerEntity> findModelVerById(Long id) {
return Optional.ofNullable(queryFactory return Optional.ofNullable(
.selectFrom(modelVerEntity) queryFactory
.where(modelVerEntity.id.eq(id)) //model_ver_uid .selectFrom(modelVerEntity)
.fetchOne() .where(modelVerEntity.id.eq(id)) // model_ver_uid
); .fetchOne());
} }
} }