./gradlew :spotlessApply 실행

This commit is contained in:
2025-11-26 10:13:11 +09:00
parent 51a9f18af2
commit e96cff6ea5
54 changed files with 2095 additions and 2214 deletions

View File

@@ -5,13 +5,12 @@ import com.kamco.cd.kamcoback.changedetection.service.ChangeDetectionService;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.transaction.Transactional;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@Tag(name = "변화탐지", description = "변화탐지 API")
@RequiredArgsConstructor
@RestController

View File

@@ -2,9 +2,4 @@ package com.kamco.cd.kamcoback.changedetection.dto;
import org.locationtech.jts.geom.Geometry;
public record ChangeDetectionDto(
Long id,
Geometry polygon,
double centroidX,
double centroidY
) {}
public record ChangeDetectionDto(Long id, Geometry polygon, double centroidX, double centroidY) {}

View File

@@ -1,13 +1,11 @@
package com.kamco.cd.kamcoback.changedetection.service;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.postgres.core.ChangeDetectionCoreService;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
@RequiredArgsConstructor
public class ChangeDetectionService {

View File

@@ -291,7 +291,7 @@ public class GlobalExceptionHandler {
// TODO : stackTrace limit 20줄? 확인 필요
String stackTraceStr =
Arrays.stream(stackTrace)
// .limit(20)
// .limit(20)
.map(StackTraceElement::toString)
.collect(Collectors.joining("\n"))
.substring(0, Math.min(stackTrace.length, 255));

View File

@@ -1,60 +1,40 @@
package com.kamco.cd.kamcoback.geojson.config;
import jakarta.annotation.PostConstruct;
import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
import jakarta.annotation.PostConstruct;
/**
* GeoJSON 파일 모니터링 설정
*/
/** GeoJSON 파일 모니터링 설정 */
@Component
@ConfigurationProperties(prefix = "geojson.monitor")
@Getter
@Setter
public class GeoJsonMonitorConfig {
/**
* 모니터링할 폴더 경로
*/
/** 모니터링할 폴더 경로 */
private String watchDirectory = "~/geojson/upload";
/**
* 처리 완료 후 파일을 이동할 폴더 경로
*/
/** 처리 완료 후 파일을 이동할 폴더 경로 */
private String processedDirectory = "~/geojson/processed";
/**
* 처리 실패 파일을 이동할 폴더 경로
*/
/** 처리 실패 파일을 이동할 폴더 경로 */
private String errorDirectory = "~/geojson/error";
/**
* 파일 모니터링 스케줄 (cron 표현식)
* 기본값: 매 30초마다 실행
*/
/** 파일 모니터링 스케줄 (cron 표현식) 기본값: 매 30초마다 실행 */
private String cronExpression = "0/30 * * * * *";
/**
* 지원하는 압축파일 확장자
*/
/** 지원하는 압축파일 확장자 */
private String[] supportedExtensions = {"zip", "tar", "tar.gz", "tgz"};
/**
* 처리할 최대 파일 크기 (바이트)
*/
/** 처리할 최대 파일 크기 (바이트) */
private long maxFileSize = 100 * 1024 * 1024; // 100MB
/**
* 임시 압축해제 폴더
*/
/** 임시 압축해제 폴더 */
private String tempDirectory = "/tmp/geojson_extract";
/**
* 홈 디렉토리 경로 확장
*/
/** 홈 디렉토리 경로 확장 */
@PostConstruct
public void expandPaths() {
watchDirectory = expandPath(watchDirectory);

View File

@@ -4,20 +4,17 @@ import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.PageRequest;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* GeoJSON 데이터 조회 및 테스트용 API 컨트롤러
*/
/** GeoJSON 데이터 조회 및 테스트용 API 컨트롤러 */
@Slf4j
@RestController
@RequestMapping("/api/geojson/data")
@@ -27,9 +24,7 @@ public class GeoJsonDataController {
private final MapSheetLearnDataRepository mapSheetLearnDataRepository;
private final MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
/**
* 학습 데이터 목록 조회
*/
/** 학습 데이터 목록 조회 */
@GetMapping("/learn-data")
public ResponseEntity<Map<String, Object>> getLearnDataList(
@RequestParam(defaultValue = "0") int page,
@@ -62,15 +57,12 @@ public class GeoJsonDataController {
}
}
/**
* 특정 학습 데이터 상세 조회
*/
/** 특정 학습 데이터 상세 조회 */
@GetMapping("/learn-data/{id}")
public ResponseEntity<Map<String, Object>> getLearnDataDetail(@PathVariable Long id) {
try {
if (id == null) {
return ResponseEntity.badRequest()
.body(Map.of("error", "ID가 필요합니다."));
return ResponseEntity.badRequest().body(Map.of("error", "ID가 필요합니다."));
}
Optional<MapSheetLearnDataEntity> learnDataOpt = mapSheetLearnDataRepository.findById(id);
@@ -80,7 +72,8 @@ public class GeoJsonDataController {
}
MapSheetLearnDataEntity learnData = learnDataOpt.get();
List<MapSheetLearnDataGeomEntity> geometryList = mapSheetLearnDataGeomRepository.findByDataUid(id);
List<MapSheetLearnDataGeomEntity> geometryList =
mapSheetLearnDataGeomRepository.findByDataUid(id);
Map<String, Object> response = new HashMap<>();
response.put("learnData", learnData);
@@ -95,9 +88,7 @@ public class GeoJsonDataController {
}
}
/**
* Geometry 데이터 목록 조회
*/
/** Geometry 데이터 목록 조회 */
@GetMapping("/geometry")
public ResponseEntity<Map<String, Object>> getGeometryDataList(
@RequestParam(defaultValue = "0") int page,
@@ -130,19 +121,21 @@ public class GeoJsonDataController {
}
}
/**
* 시스템 통계 정보 조회
*/
/** 시스템 통계 정보 조회 */
@GetMapping("/statistics")
public ResponseEntity<Map<String, Object>> getStatistics() {
try {
long totalLearnData = mapSheetLearnDataRepository.count();
long totalGeometryData = mapSheetLearnDataGeomRepository.count();
List<MapSheetLearnDataEntity> processedData = mapSheetLearnDataRepository.findByDataState("PROCESSED");
List<MapSheetLearnDataEntity> pendingAnalysis = mapSheetLearnDataRepository.findByAnalState("PENDING");
List<MapSheetLearnDataEntity> completedAnalysis = mapSheetLearnDataRepository.findByAnalState("COMPLETED");
List<MapSheetLearnDataEntity> errorAnalysis = mapSheetLearnDataRepository.findByAnalState("ERROR");
List<MapSheetLearnDataEntity> processedData =
mapSheetLearnDataRepository.findByDataState("PROCESSED");
List<MapSheetLearnDataEntity> pendingAnalysis =
mapSheetLearnDataRepository.findByAnalState("PENDING");
List<MapSheetLearnDataEntity> completedAnalysis =
mapSheetLearnDataRepository.findByAnalState("COMPLETED");
List<MapSheetLearnDataEntity> errorAnalysis =
mapSheetLearnDataRepository.findByAnalState("ERROR");
Map<String, Object> statistics = new HashMap<>();
statistics.put("totalLearnData", totalLearnData);
@@ -160,10 +153,8 @@ public class GeoJsonDataController {
statistics.put("completionRate", 0.0);
}
return ResponseEntity.ok(Map.of(
"statistics", statistics,
"timestamp", java.time.Instant.now()
));
return ResponseEntity.ok(
Map.of("statistics", statistics, "timestamp", java.time.Instant.now()));
} catch (Exception e) {
log.error("통계 정보 조회 실패", e);
return ResponseEntity.internalServerError()
@@ -171,9 +162,7 @@ public class GeoJsonDataController {
}
}
/**
* 데이터 상태별 카운트 조회
*/
/** 데이터 상태별 카운트 조회 */
@GetMapping("/status-counts")
public ResponseEntity<Map<String, Object>> getStatusCounts() {
try {
@@ -181,19 +170,25 @@ public class GeoJsonDataController {
Map<String, Long> analStateCounts = new HashMap<>();
// 데이터 상태별 카운트
dataStateCounts.put("PROCESSED", mapSheetLearnDataRepository.findByDataState("PROCESSED").size() + 0L);
dataStateCounts.put("PENDING", mapSheetLearnDataRepository.findByDataStateIsNullOrDataState("PENDING").size() + 0L);
dataStateCounts.put(
"PROCESSED", mapSheetLearnDataRepository.findByDataState("PROCESSED").size() + 0L);
dataStateCounts.put(
"PENDING",
mapSheetLearnDataRepository.findByDataStateIsNullOrDataState("PENDING").size() + 0L);
// 분석 상태별 카운트
analStateCounts.put("PENDING", mapSheetLearnDataRepository.findByAnalState("PENDING").size() + 0L);
analStateCounts.put("COMPLETED", mapSheetLearnDataRepository.findByAnalState("COMPLETED").size() + 0L);
analStateCounts.put("ERROR", mapSheetLearnDataRepository.findByAnalState("ERROR").size() + 0L);
analStateCounts.put(
"PENDING", mapSheetLearnDataRepository.findByAnalState("PENDING").size() + 0L);
analStateCounts.put(
"COMPLETED", mapSheetLearnDataRepository.findByAnalState("COMPLETED").size() + 0L);
analStateCounts.put(
"ERROR", mapSheetLearnDataRepository.findByAnalState("ERROR").size() + 0L);
return ResponseEntity.ok(Map.of(
return ResponseEntity.ok(
Map.of(
"dataStateCounts", dataStateCounts,
"analStateCounts", analStateCounts,
"timestamp", java.time.Instant.now()
));
"timestamp", java.time.Instant.now()));
} catch (Exception e) {
log.error("상태별 카운트 조회 실패", e);
return ResponseEntity.internalServerError()

View File

@@ -2,17 +2,14 @@ package com.kamco.cd.kamcoback.geojson.controller;
import com.kamco.cd.kamcoback.geojson.service.GeoJsonFileMonitorService;
import com.kamco.cd.kamcoback.geojson.service.GeometryConversionService;
import java.util.List;
import java.util.Map;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
/**
* GeoJSON 파일 모니터링 및 처리 API 컨트롤러
*/
/** GeoJSON 파일 모니터링 및 처리 API 컨트롤러 */
@Slf4j
@RestController
@RequestMapping("/api/geojson")
@@ -22,17 +19,13 @@ public class GeoJsonMonitorController {
private final GeoJsonFileMonitorService monitorService;
private final GeometryConversionService geometryConversionService;
/**
* 모니터링 상태 조회
*/
/** 모니터링 상태 조회 */
@GetMapping("/monitor/status")
public Map<String, Object> getMonitorStatus() {
return monitorService.getMonitorStatus();
}
/**
* 시스템 통계 정보 조회
*/
/** 시스템 통계 정보 조회 */
@GetMapping("/monitor/stats")
public ResponseEntity<Map<String, Object>> getSystemStats() {
try {
@@ -41,114 +34,101 @@ public class GeoJsonMonitorController {
} catch (Exception e) {
log.error("시스템 통계 조회 실패", e);
return ResponseEntity.internalServerError()
.body(Map.of(
"error", "시스템 통계 조회 실패: " + e.getMessage(),
"status", "error"
));
.body(Map.of("error", "시스템 통계 조회 실패: " + e.getMessage(), "status", "error"));
}
}
/**
* 디렉토리 초기화 (수동 실행)
*/
/** 디렉토리 초기화 (수동 실행) */
@PostMapping("/monitor/init-directories")
public ResponseEntity<Map<String, Object>> initializeDirectories() {
try {
log.info("디렉토리 초기화 수동 실행 요청");
monitorService.initializeDirectoriesManually();
return ResponseEntity.ok(Map.of(
return ResponseEntity.ok(
Map.of(
"message", "디렉토리 초기화가 완료되었습니다.",
"status", "success"
));
"status", "success"));
} catch (Exception e) {
log.error("디렉토리 초기화 실패", e);
return ResponseEntity.internalServerError()
.body(Map.of(
"error", "디렉토리 초기화 실패: " + e.getMessage(),
"status", "error"
));
.body(Map.of("error", "디렉토리 초기화 실패: " + e.getMessage(), "status", "error"));
}
}
/**
* 수동으로 특정 파일 처리
*/
/** 수동으로 특정 파일 처리 */
@PostMapping("/process/file")
public ResponseEntity<Map<String, Object>> processFileManually(@RequestParam String filePath) {
try {
log.info("수동 파일 처리 요청: {}", filePath);
monitorService.processFileManually(filePath);
return ResponseEntity.ok(Map.of(
return ResponseEntity.ok(
Map.of(
"message", "파일 처리가 완료되었습니다.",
"filePath", filePath,
"status", "success"
));
"status", "success"));
} catch (Exception e) {
log.error("수동 파일 처리 실패: {}", filePath, e);
return ResponseEntity.internalServerError()
.body(Map.of(
"error", "파일 처리 실패: " + e.getMessage(),
"filePath", filePath,
"status", "error"
));
.body(
Map.of(
"error", "파일 처리 실패: " + e.getMessage(), "filePath", filePath, "status", "error"));
}
}
/**
* 미처리된 Geometry 데이터 수동 변환
*/
/** 미처리된 Geometry 데이터 수동 변환 */
@PostMapping("/process/geometry")
public ResponseEntity<Map<String, Object>> processUnprocessedGeometry() {
try {
log.info("미처리 Geometry 변환 수동 실행 요청");
List<Long> processedIds = geometryConversionService.processUnprocessedLearnData();
return ResponseEntity.ok(Map.of(
"message", "Geometry 변환이 완료되었습니다.",
"processedCount", processedIds.size(),
"processedIds", processedIds,
"status", "success"
));
return ResponseEntity.ok(
Map.of(
"message",
"Geometry 변환이 완료되었습니다.",
"processedCount",
processedIds.size(),
"processedIds",
processedIds,
"status",
"success"));
} catch (Exception e) {
log.error("Geometry 변환 실패", e);
return ResponseEntity.internalServerError()
.body(Map.of(
"error", "Geometry 변환 실패: " + e.getMessage(),
"status", "error"
));
.body(Map.of("error", "Geometry 변환 실패: " + e.getMessage(), "status", "error"));
}
}
/**
* 특정 학습 데이터의 Geometry 변환
*/
/** 특정 학습 데이터의 Geometry 변환 */
@PostMapping("/process/geometry/convert")
public ResponseEntity<Map<String, Object>> convertSpecificGeometry(@RequestBody List<Long> learnDataIds) {
public ResponseEntity<Map<String, Object>> convertSpecificGeometry(
@RequestBody List<Long> learnDataIds) {
try {
if (learnDataIds == null || learnDataIds.isEmpty()) {
return ResponseEntity.badRequest()
.body(Map.of("error", "변환할 학습 데이터 ID가 없습니다."));
return ResponseEntity.badRequest().body(Map.of("error", "변환할 학습 데이터 ID가 없습니다."));
}
log.info("특정 학습 데이터 Geometry 변환 요청: {}", learnDataIds);
List<Long> geometryIds = geometryConversionService.convertToGeometryData(learnDataIds);
return ResponseEntity.ok(Map.of(
"message", "Geometry 변환이 완료되었습니다.",
"inputCount", learnDataIds.size(),
"outputCount", geometryIds.size(),
"geometryIds", geometryIds,
"status", "success"
));
return ResponseEntity.ok(
Map.of(
"message",
"Geometry 변환이 완료되었습니다.",
"inputCount",
learnDataIds.size(),
"outputCount",
geometryIds.size(),
"geometryIds",
geometryIds,
"status",
"success"));
} catch (Exception e) {
log.error("특정 Geometry 변환 실패: {}", learnDataIds, e);
return ResponseEntity.internalServerError()
.body(Map.of(
"error", "Geometry 변환 실패: " + e.getMessage(),
"status", "error"
));
.body(Map.of("error", "Geometry 변환 실패: " + e.getMessage(), "status", "error"));
}
}
}

View File

@@ -1,6 +1,9 @@
package com.kamco.cd.kamcoback.geojson.service;
import com.kamco.cd.kamcoback.geojson.config.GeoJsonMonitorConfig;
import java.io.*;
import java.nio.file.*;
import java.util.*;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.compress.archivers.ArchiveEntry;
@@ -10,15 +13,7 @@ import org.apache.commons.compress.archivers.zip.ZipFile;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.springframework.stereotype.Service;
import java.io.*;
import java.nio.file.*;
import java.util.*;
import java.util.stream.Stream;
import java.util.zip.ZipInputStream;
/**
* 압축파일 처리 서비스
*/
/** 압축파일 처리 서비스 */
@Slf4j
@Service
@RequiredArgsConstructor
@@ -26,9 +21,7 @@ public class ArchiveExtractorService {
private final GeoJsonMonitorConfig config;
/**
* 압축파일에서 GeoJSON 파일들을 추출
*/
/** 압축파일에서 GeoJSON 파일들을 추출 */
public Map<String, String> extractGeoJsonFiles(Path archiveFile) throws IOException {
Map<String, String> geoJsonContents = new HashMap<>();
String fileName = archiveFile.getFileName().toString().toLowerCase();
@@ -38,7 +31,9 @@ public class ArchiveExtractorService {
try {
if (fileName.endsWith(".zip")) {
extractFromZip(archiveFile, geoJsonContents);
} else if (fileName.endsWith(".tar") || fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz")) {
} else if (fileName.endsWith(".tar")
|| fileName.endsWith(".tar.gz")
|| fileName.endsWith(".tgz")) {
extractFromTar(archiveFile, geoJsonContents);
} else {
throw new IllegalArgumentException("지원하지 않는 압축파일 형식: " + fileName);
@@ -52,10 +47,9 @@ public class ArchiveExtractorService {
return geoJsonContents;
}
/**
* ZIP 파일에서 GeoJSON 추출
*/
private void extractFromZip(Path zipFile, Map<String, String> geoJsonContents) throws IOException {
/** ZIP 파일에서 GeoJSON 추출 */
private void extractFromZip(Path zipFile, Map<String, String> geoJsonContents)
throws IOException {
try (ZipFile zip = new ZipFile(zipFile.toFile())) {
Enumeration<ZipArchiveEntry> entries = zip.getEntries();
@@ -73,10 +67,9 @@ public class ArchiveExtractorService {
}
}
/**
* TAR 파일에서 GeoJSON 추출
*/
private void extractFromTar(Path tarFile, Map<String, String> geoJsonContents) throws IOException {
/** TAR 파일에서 GeoJSON 추출 */
private void extractFromTar(Path tarFile, Map<String, String> geoJsonContents)
throws IOException {
String fileName = tarFile.getFileName().toString().toLowerCase();
InputStream fileInputStream = Files.newInputStream(tarFile);
@@ -106,9 +99,7 @@ public class ArchiveExtractorService {
}
}
/**
* InputStream에서 문자열 읽기
*/
/** InputStream에서 문자열 읽기 */
private String readInputStream(InputStream inputStream) throws IOException {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"))) {
StringBuilder content = new StringBuilder();
@@ -122,17 +113,13 @@ public class ArchiveExtractorService {
}
}
/**
* 파일이 GeoJSON 파일인지 확인
*/
/** 파일이 GeoJSON 파일인지 확인 */
private boolean isGeoJsonFile(String fileName) {
String lowerFileName = fileName.toLowerCase();
return lowerFileName.endsWith(".geojson") || lowerFileName.endsWith(".json");
}
/**
* 지원하는 압축파일인지 확인
*/
/** 지원하는 압축파일인지 확인 */
public boolean isSupportedArchive(Path file) {
String fileName = file.getFileName().toString().toLowerCase();
@@ -145,17 +132,18 @@ public class ArchiveExtractorService {
return false;
}
/**
* 파일 크기가 제한 범위 내인지 확인
*/
/** 파일 크기가 제한 범위 내인지 확인 */
public boolean isFileSizeValid(Path file) {
try {
long fileSize = Files.size(file);
boolean isValid = fileSize <= config.getMaxFileSize();
if (!isValid) {
log.warn("파일 크기가 제한을 초과했습니다: {} ({}MB > {}MB)",
file, fileSize / 1024 / 1024, config.getMaxFileSize() / 1024 / 1024);
log.warn(
"파일 크기가 제한을 초과했습니다: {} ({}MB > {}MB)",
file,
fileSize / 1024 / 1024,
config.getMaxFileSize() / 1024 / 1024);
}
return isValid;

View File

@@ -5,17 +5,13 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.time.ZonedDateTime;
import java.util.*;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.time.Instant;
import java.util.*;
/**
* GeoJSON 데이터 처리 서비스
*/
/** GeoJSON 데이터 처리 서비스 */
@Slf4j
@Service
@RequiredArgsConstructor
@@ -24,11 +20,10 @@ public class GeoJsonDataService {
private final MapSheetLearnDataRepository mapSheetLearnDataRepository;
private final ObjectMapper objectMapper;
/**
* GeoJSON 파일들을 데이터베이스에 저장
*/
/** GeoJSON 파일들을 데이터베이스에 저장 */
@Transactional
public List<Long> processGeoJsonFiles(Map<String, String> geoJsonContents, String archiveFileName) {
public List<Long> processGeoJsonFiles(
Map<String, String> geoJsonContents, String archiveFileName) {
List<Long> savedIds = new ArrayList<>();
log.info("GeoJSON 파일 처리 시작: {} ({}개 파일)", archiveFileName, geoJsonContents.size());
@@ -49,15 +44,16 @@ public class GeoJsonDataService {
}
}
log.info("GeoJSON 파일 처리 완료: {} (성공: {}개, 전체: {}개)",
archiveFileName, savedIds.size(), geoJsonContents.size());
log.info(
"GeoJSON 파일 처리 완료: {} (성공: {}개, 전체: {}개)",
archiveFileName,
savedIds.size(),
geoJsonContents.size());
return savedIds;
}
/**
* 개별 GeoJSON 파일을 MapSheetLearnDataEntity로 변환하여 저장
*/
/** 개별 GeoJSON 파일을 MapSheetLearnDataEntity로 변환하여 저장 */
private Long processGeoJsonFile(String fileName, String geoJsonContent, String archiveFileName) {
try {
// GeoJSON 파싱 및 검증
@@ -66,7 +62,8 @@ public class GeoJsonDataService {
// 파일이 이미 처리되었는지 확인
String dataPath = generateDataPath(archiveFileName, fileName);
Optional<MapSheetLearnDataEntity> existingData = mapSheetLearnDataRepository.findByDataPath(dataPath);
Optional<MapSheetLearnDataEntity> existingData =
mapSheetLearnDataRepository.findByDataPath(dataPath);
if (existingData.isPresent()) {
log.warn("이미 처리된 파일입니다: {}", dataPath);
@@ -74,7 +71,8 @@ public class GeoJsonDataService {
}
// 새 엔티티 생성 및 저장
MapSheetLearnDataEntity entity = createMapSheetLearnDataEntity(fileName, geoJsonContent, archiveFileName, geoJsonNode);
MapSheetLearnDataEntity entity =
createMapSheetLearnDataEntity(fileName, geoJsonContent, archiveFileName, geoJsonNode);
MapSheetLearnDataEntity savedEntity = mapSheetLearnDataRepository.save(entity);
return savedEntity.getId();
@@ -85,9 +83,7 @@ public class GeoJsonDataService {
}
}
/**
* GeoJSON 구조 검증
*/
/** GeoJSON 구조 검증 */
private void validateGeoJsonStructure(JsonNode geoJsonNode) {
if (!geoJsonNode.has("type")) {
throw new IllegalArgumentException("유효하지 않은 GeoJSON: 'type' 필드가 없습니다.");
@@ -99,9 +95,7 @@ public class GeoJsonDataService {
}
}
/**
* MapSheetLearnDataEntity 생성
*/
/** MapSheetLearnDataEntity 생성 */
private MapSheetLearnDataEntity createMapSheetLearnDataEntity(
String fileName, String geoJsonContent, String archiveFileName, JsonNode geoJsonNode) {
@@ -146,9 +140,7 @@ public class GeoJsonDataService {
return entity;
}
/**
* CRS 정보 설정
*/
/** CRS 정보 설정 */
private void setCrsInformation(MapSheetLearnDataEntity entity, JsonNode geoJsonNode) {
if (geoJsonNode.has("crs")) {
JsonNode crsNode = geoJsonNode.get("crs");
@@ -169,9 +161,7 @@ public class GeoJsonDataService {
}
}
/**
* 연도 정보 추출
*/
/** 연도 정보 추출 */
private void setYearInformation(MapSheetLearnDataEntity entity, String fileName) {
// 파일명에서 연도 추출 시도 (예: kamco_2021_2022_35813023.geojson)
String[] parts = fileName.split("_");
@@ -194,9 +184,7 @@ public class GeoJsonDataService {
}
}
/**
* 제목 추출
*/
/** 제목 추출 */
private String extractTitle(String fileName, JsonNode geoJsonNode) {
// GeoJSON 메타데이터에서 제목 추출 시도
if (geoJsonNode.has("properties")) {
@@ -218,16 +206,12 @@ public class GeoJsonDataService {
return fileName;
}
/**
* 데이터 경로 생성
*/
/** 데이터 경로 생성 */
private String generateDataPath(String archiveFileName, String fileName) {
return archiveFileName + "/" + fileName;
}
/**
* 처리 가능한 파일 개수 확인
*/
/** 처리 가능한 파일 개수 확인 */
public boolean isProcessable(Map<String, String> geoJsonContents) {
if (geoJsonContents == null || geoJsonContents.isEmpty()) {
return false;

View File

@@ -1,25 +1,20 @@
package com.kamco.cd.kamcoback.geojson.service;
import com.kamco.cd.kamcoback.geojson.config.GeoJsonMonitorConfig;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import jakarta.annotation.PostConstruct;
import java.io.IOException;
import java.nio.file.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
/**
* GeoJSON 파일 모니터링 서비스
* 지정된 폴더를 주기적으로 모니터링하여 압축파일을 자동으로 처리합니다.
*/
/** GeoJSON 파일 모니터링 서비스 지정된 폴더를 주기적으로 모니터링하여 압축파일을 자동으로 처리합니다. */
@Slf4j
@Service
@RequiredArgsConstructor
@@ -32,16 +27,17 @@ public class GeoJsonFileMonitorService {
private final MapSheetLearnDataRepository learnDataRepository;
private final MapSheetLearnDataGeomRepository geomRepository;
/**
* 애플리케이션 시작 시 필요한 디렉토리들을 미리 생성
*/
/** 애플리케이션 시작 시 필요한 디렉토리들을 미리 생성 */
@PostConstruct
public void initializeDirectories() {
try {
log.info("GeoJSON 모니터링 시스템 초기화 중...");
log.info("설정된 경로 - Watch: {}, Processed: {}, Error: {}, Temp: {}",
config.getWatchDirectory(), config.getProcessedDirectory(),
config.getErrorDirectory(), config.getTempDirectory());
log.info(
"설정된 경로 - Watch: {}, Processed: {}, Error: {}, Temp: {}",
config.getWatchDirectory(),
config.getProcessedDirectory(),
config.getErrorDirectory(),
config.getTempDirectory());
ensureDirectoriesExist();
log.info("GeoJSON 모니터링 시스템 초기화 완료");
@@ -51,11 +47,8 @@ public class GeoJsonFileMonitorService {
}
}
/**
* 스케줄러를 통한 파일 모니터링
* 설정된 cron 표현식에 따라 주기적으로 실행
*/
// @Scheduled(cron = "#{@geoJsonMonitorConfig.cronExpression}")
/** 스케줄러를 통한 파일 모니터링 설정된 cron 표현식에 따라 주기적으로 실행 */
// @Scheduled(cron = "#{@geoJsonMonitorConfig.cronExpression}")
public void monitorFiles() {
log.debug("파일 모니터링 시작");
@@ -78,9 +71,7 @@ public class GeoJsonFileMonitorService {
log.debug("파일 모니터링 완료");
}
/**
* 필요한 디렉토리들이 존재하는지 확인하고 생성
*/
/** 필요한 디렉토리들이 존재하는지 확인하고 생성 */
private void ensureDirectoriesExist() {
boolean hasError = false;
try {
@@ -123,9 +114,7 @@ public class GeoJsonFileMonitorService {
}
}
/**
* 디렉토리가 존재하지 않으면 생성
*/
/** 디렉토리가 존재하지 않으면 생성 */
private void createDirectoryIfNotExists(String directory) throws IOException {
if (directory == null || directory.trim().isEmpty()) {
throw new IllegalArgumentException("디렉토리 경로가 비어있습니다.");
@@ -143,8 +132,7 @@ public class GeoJsonFileMonitorService {
if (!System.getProperty("os.name").toLowerCase().contains("windows")) {
// rwxrwxr-x 권한 설정
java.nio.file.attribute.PosixFilePermissions.asFileAttribute(
java.nio.file.attribute.PosixFilePermissions.fromString("rwxrwxr-x")
);
java.nio.file.attribute.PosixFilePermissions.fromString("rwxrwxr-x"));
}
} catch (Exception permissionException) {
log.debug("권한 설정 실패 (무시됨): {}", permissionException.getMessage());
@@ -163,9 +151,7 @@ public class GeoJsonFileMonitorService {
}
}
/**
* 모니터링 폴더에서 압축파일들을 찾아서 처리
*/
/** 모니터링 폴더에서 압축파일들을 찾아서 처리 */
private void processArchiveFiles() {
Path watchDir = Paths.get(config.getWatchDirectory());
@@ -186,7 +172,8 @@ public class GeoJsonFileMonitorService {
}
try (Stream<Path> files = Files.list(watchDir)) {
files.filter(Files::isRegularFile)
files
.filter(Files::isRegularFile)
.filter(archiveExtractorService::isSupportedArchive)
.filter(archiveExtractorService::isFileSizeValid)
.forEach(this::processArchiveFile);
@@ -196,16 +183,15 @@ public class GeoJsonFileMonitorService {
}
}
/**
* 개별 압축파일 처리
*/
/** 개별 압축파일 처리 */
private void processArchiveFile(Path archiveFile) {
String fileName = archiveFile.getFileName().toString();
log.info("압축파일 처리 시작: {}", fileName);
try {
// 1. 압축파일에서 GeoJSON 파일들 추출
Map<String, String> geoJsonContents = archiveExtractorService.extractGeoJsonFiles(archiveFile);
Map<String, String> geoJsonContents =
archiveExtractorService.extractGeoJsonFiles(archiveFile);
if (geoJsonContents.isEmpty()) {
log.warn("압축파일에서 GeoJSON 파일을 찾을 수 없습니다: {}", fileName);
@@ -221,7 +207,8 @@ public class GeoJsonFileMonitorService {
}
// 3. GeoJSON 데이터를 데이터베이스에 저장
List<Long> savedLearnDataIds = geoJsonDataService.processGeoJsonFiles(geoJsonContents, fileName);
List<Long> savedLearnDataIds =
geoJsonDataService.processGeoJsonFiles(geoJsonContents, fileName);
if (savedLearnDataIds.isEmpty()) {
log.warn("저장된 학습 데이터가 없습니다: {}", fileName);
@@ -235,8 +222,11 @@ public class GeoJsonFileMonitorService {
// 5. 처리 완료된 파일을 처리된 폴더로 이동
moveFileToProcessed(archiveFile);
log.info("압축파일 처리 완료: {} (학습 데이터: {}개, Geometry: {}개)",
fileName, savedLearnDataIds.size(), geometryIds.size());
log.info(
"압축파일 처리 완료: {} (학습 데이터: {}개, Geometry: {}개)",
fileName,
savedLearnDataIds.size(),
geometryIds.size());
} catch (Exception e) {
log.error("압축파일 처리 실패: {}", fileName, e);
@@ -248,9 +238,7 @@ public class GeoJsonFileMonitorService {
}
}
/**
* 미처리된 Geometry 변환 작업 수행
*/
/** 미처리된 Geometry 변환 작업 수행 */
private void processUnprocessedGeometryData() {
try {
List<Long> processedIds = geometryConversionService.processUnprocessedLearnData();
@@ -262,9 +250,7 @@ public class GeoJsonFileMonitorService {
}
}
/**
* 처리 완료된 파일을 processed 폴더로 이동
*/
/** 처리 완료된 파일을 processed 폴더로 이동 */
private void moveFileToProcessed(Path sourceFile) throws IOException {
String fileName = sourceFile.getFileName().toString();
String timestampedFileName = addTimestamp(fileName);
@@ -274,9 +260,7 @@ public class GeoJsonFileMonitorService {
log.info("파일을 처리된 폴더로 이동: {} -> {}", fileName, timestampedFileName);
}
/**
* 오류가 발생한 파일을 error 폴더로 이동
*/
/** 오류가 발생한 파일을 error 폴더로 이동 */
private void moveFileToError(Path sourceFile, String errorReason) throws IOException {
String fileName = sourceFile.getFileName().toString();
String errorFileName = addTimestamp(fileName) + ".error";
@@ -287,16 +271,15 @@ public class GeoJsonFileMonitorService {
// 오류 정보를 별도 파일로 저장
String errorInfoFileName = errorFileName + ".info";
Path errorInfoPath = Paths.get(config.getErrorDirectory(), errorInfoFileName);
String errorInfo = String.format("파일: %s%n오류 시간: %s%n오류 원인: %s%n",
fileName, java.time.Instant.now(), errorReason);
String errorInfo =
String.format(
"파일: %s%n오류 시간: %s%n오류 원인: %s%n", fileName, java.time.Instant.now(), errorReason);
Files.write(errorInfoPath, errorInfo.getBytes());
log.warn("파일을 오류 폴더로 이동: {} (원인: {})", fileName, errorReason);
}
/**
* 파일명에 타임스탬프 추가
*/
/** 파일명에 타임스탬프 추가 */
private String addTimestamp(String fileName) {
int lastDotIndex = fileName.lastIndexOf('.');
String name = (lastDotIndex > 0) ? fileName.substring(0, lastDotIndex) : fileName;
@@ -305,9 +288,7 @@ public class GeoJsonFileMonitorService {
return String.format("%s_%d%s", name, System.currentTimeMillis(), extension);
}
/**
* 수동으로 특정 파일 처리 (테스트/관리 목적)
*/
/** 수동으로 특정 파일 처리 (테스트/관리 목적) */
public void processFileManually(String filePath) {
Path archiveFile = Paths.get(filePath);
@@ -325,9 +306,7 @@ public class GeoJsonFileMonitorService {
processArchiveFile(archiveFile);
}
/**
* 디렉토리 초기화를 수동으로 실행 (API에서 호출 가능)
*/
/** 디렉토리 초기화를 수동으로 실행 (API에서 호출 가능) */
public void initializeDirectoriesManually() {
log.info("디렉토리 수동 초기화 시작");
try {
@@ -339,9 +318,7 @@ public class GeoJsonFileMonitorService {
}
}
/**
* 모니터링 상태 정보 반환
*/
/** 모니터링 상태 정보 반환 */
public Map<String, Object> getMonitorStatus() {
return Map.of(
"watchDirectory", config.getWatchDirectory(),
@@ -350,13 +327,10 @@ public class GeoJsonFileMonitorService {
"cronExpression", config.getCronExpression(),
"supportedExtensions", config.getSupportedExtensions(),
"maxFileSize", config.getMaxFileSize(),
"maxFileSizeMB", config.getMaxFileSize() / 1024 / 1024
);
"maxFileSizeMB", config.getMaxFileSize() / 1024 / 1024);
}
/**
* 시스템 통계 정보 조회
*/
/** 시스템 통계 정보 조회 */
public Map<String, Object> getSystemStats() {
Map<String, Object> stats = new HashMap<>();
@@ -366,23 +340,25 @@ public class GeoJsonFileMonitorService {
long totalGeomData = geomRepository.count();
long pendingAnalysis = learnDataRepository.countByAnalState("PENDING");
stats.put("database", Map.of(
stats.put(
"database",
Map.of(
"totalLearnData", totalLearnData,
"totalGeomData", totalGeomData,
"pendingAnalysis", pendingAnalysis
));
"pendingAnalysis", pendingAnalysis));
// 파일 시스템 통계
stats.put("fileSystem", getFileSystemStats());
// 모니터링 설정
stats.put("monitoring", Map.of(
stats.put(
"monitoring",
Map.of(
"isActive", true,
"cronExpression", "0/30 * * * * *",
"watchDirectory", config.getWatchDirectory(),
"processedDirectory", config.getProcessedDirectory(),
"errorDirectory", config.getErrorDirectory()
));
"errorDirectory", config.getErrorDirectory()));
} catch (Exception e) {
log.error("통계 정보 조회 실패", e);
@@ -392,9 +368,7 @@ public class GeoJsonFileMonitorService {
return stats;
}
/**
* 파일 시스템 통계 조회
*/
/** 파일 시스템 통계 조회 */
private Map<String, Object> getFileSystemStats() {
Map<String, Object> fileStats = new HashMap<>();
@@ -416,9 +390,7 @@ public class GeoJsonFileMonitorService {
return fileStats;
}
/**
* 디렉토리 내 파일 개수 계산
*/
/** 디렉토리 내 파일 개수 계산 */
private long countFilesInDirectory(Path directory) {
if (!Files.exists(directory) || !Files.isDirectory(directory)) {
return 0;

View File

@@ -7,18 +7,14 @@ import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.time.ZonedDateTime;
import java.util.*;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.locationtech.jts.geom.*;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.time.Instant;
import java.util.*;
/**
* Geometry 데이터 변환 서비스
*/
/** Geometry 데이터 변환 서비스 */
@Slf4j
@Service
@RequiredArgsConstructor
@@ -29,9 +25,7 @@ public class GeometryConversionService {
private final ObjectMapper objectMapper;
private final GeometryFactory geometryFactory = new GeometryFactory();
/**
* MapSheetLearnData의 JSON 데이터를 기반으로 Geometry 테이블에 저장
*/
/** MapSheetLearnData의 JSON 데이터를 기반으로 Geometry 테이블에 저장 */
@Transactional
public List<Long> convertToGeometryData(List<Long> learnDataIds) {
List<Long> processedIds = new ArrayList<>();
@@ -41,7 +35,8 @@ public class GeometryConversionService {
for (Long dataId : learnDataIds) {
try {
if (dataId != null) {
Optional<MapSheetLearnDataEntity> learnDataOpt = mapSheetLearnDataRepository.findById(dataId);
Optional<MapSheetLearnDataEntity> learnDataOpt =
mapSheetLearnDataRepository.findById(dataId);
if (learnDataOpt.isPresent()) {
List<Long> geometryIds = processLearnDataToGeometry(learnDataOpt.get());
processedIds.addAll(geometryIds);
@@ -56,13 +51,12 @@ public class GeometryConversionService {
}
}
log.info("Geometry 변환 완료: {} 개 처리, {} 개의 geometry 생성", learnDataIds.size(), processedIds.size());
log.info(
"Geometry 변환 완료: {} 개 처리, {} 개의 geometry 생성", learnDataIds.size(), processedIds.size());
return processedIds;
}
/**
* 개별 학습 데이터를 Geometry 데이터로 변환
*/
/** 개별 학습 데이터를 Geometry 데이터로 변환 */
private List<Long> processLearnDataToGeometry(MapSheetLearnDataEntity learnData) {
List<Long> geometryIds = new ArrayList<>();
@@ -116,10 +110,9 @@ public class GeometryConversionService {
return geometryIds;
}
/**
* FeatureCollection 처리
*/
private List<Long> processFeatureCollection(JsonNode featureCollectionNode, MapSheetLearnDataEntity learnData) {
/** FeatureCollection 처리 */
private List<Long> processFeatureCollection(
JsonNode featureCollectionNode, MapSheetLearnDataEntity learnData) {
List<Long> geometryIds = new ArrayList<>();
if (!featureCollectionNode.has("features")) {
@@ -144,9 +137,7 @@ public class GeometryConversionService {
return geometryIds;
}
/**
* Feature 처리
*/
/** Feature 처리 */
private Long processFeature(JsonNode featureNode, MapSheetLearnDataEntity learnData) {
try {
if (!featureNode.has("geometry")) {
@@ -155,7 +146,8 @@ public class GeometryConversionService {
}
JsonNode geometryNode = featureNode.get("geometry");
JsonNode propertiesNode = featureNode.has("properties") ? featureNode.get("properties") : null;
JsonNode propertiesNode =
featureNode.has("properties") ? featureNode.get("properties") : null;
return createGeometryEntity(geometryNode, propertiesNode, learnData);
@@ -165,17 +157,14 @@ public class GeometryConversionService {
}
}
/**
* 직접 Geometry 처리
*/
/** 직접 Geometry 처리 */
private Long processDirectGeometry(JsonNode geometryNode, MapSheetLearnDataEntity learnData) {
return createGeometryEntity(geometryNode, null, learnData);
}
/**
* GeometryEntity 생성 및 저장
*/
private Long createGeometryEntity(JsonNode geometryNode, JsonNode propertiesNode, MapSheetLearnDataEntity learnData) {
/** GeometryEntity 생성 및 저장 */
private Long createGeometryEntity(
JsonNode geometryNode, JsonNode propertiesNode, MapSheetLearnDataEntity learnData) {
try {
MapSheetLearnDataGeomEntity geometryEntity = new MapSheetLearnDataGeomEntity();
@@ -211,7 +200,8 @@ public class GeometryConversionService {
geometryEntity.setUpdatedDttm(now);
// 저장
MapSheetLearnDataGeomEntity savedEntity = mapSheetLearnDataGeomRepository.save(geometryEntity);
MapSheetLearnDataGeomEntity savedEntity =
mapSheetLearnDataGeomRepository.save(geometryEntity);
return savedEntity.getId();
} catch (Exception e) {
@@ -220,9 +210,7 @@ public class GeometryConversionService {
}
}
/**
* GeoJSON 노드에서 JTS Geometry 객체 생성
*/
/** GeoJSON 노드에서 JTS Geometry 객체 생성 */
private Geometry parseGeometryFromGeoJson(JsonNode geometryNode) {
try {
if (!geometryNode.has("type") || !geometryNode.has("coordinates")) {
@@ -349,10 +337,11 @@ public class GeometryConversionService {
return geometryFactory.createMultiPolygon(polygons.toArray(new Polygon[0]));
}
/**
* Properties에서 추가 정보 추출
*/
private void extractPropertiesData(MapSheetLearnDataGeomEntity geometryEntity, JsonNode propertiesNode, MapSheetLearnDataEntity learnData) {
/** Properties에서 추가 정보 추출 */
private void extractPropertiesData(
MapSheetLearnDataGeomEntity geometryEntity,
JsonNode propertiesNode,
MapSheetLearnDataEntity learnData) {
// CD 정확도 정보
if (propertiesNode.has("cd_prob")) {
try {
@@ -410,13 +399,12 @@ public class GeometryConversionService {
}
}
/**
* 미처리된 학습 데이터들을 찾아서 자동으로 geometry 변환 수행
*/
/** 미처리된 학습 데이터들을 찾아서 자동으로 geometry 변환 수행 */
@Transactional
public List<Long> processUnprocessedLearnData() {
// 분석 상태가 PENDING인 학습 데이터 조회
List<MapSheetLearnDataEntity> unprocessedData = mapSheetLearnDataRepository.findByAnalState("PENDING");
List<MapSheetLearnDataEntity> unprocessedData =
mapSheetLearnDataRepository.findByAnalState("PENDING");
if (unprocessedData.isEmpty()) {
log.debug("처리할 미완료 학습 데이터가 없습니다.");

View File

@@ -4,7 +4,6 @@ import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Detail;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchReq;
import com.kamco.cd.kamcoback.inference.service.InferenceResultService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
@@ -29,10 +28,7 @@ public class InferenceResultApiController {
private final InferenceResultService inferenceResultService;
@Operation(
summary = "추론관리 분석결과 목록 조회",
description =
"분석상태, 제목으로 분석결과를 조회 합니다.")
@Operation(summary = "추론관리 분석결과 목록 조회", description = "분석상태, 제목으로 분석결과를 조회 합니다.")
@ApiResponses(
value = {
@ApiResponse(
@@ -47,8 +43,7 @@ public class InferenceResultApiController {
})
@GetMapping("/list")
public ApiResponseDto<Page<InferenceResultDto.AnalResList>> getInferenceResultList(
@Parameter(description = "분석상태", example = "0000")
@RequestParam(required = false)
@Parameter(description = "분석상태", example = "0000") @RequestParam(required = false)
String statCode,
@Parameter(description = "제목", example = "2023_2024년도") @RequestParam(required = false)
String title,
@@ -58,17 +53,15 @@ public class InferenceResultApiController {
int size,
@Parameter(description = "정렬 조건 (형식: 필드명,방향)", example = "name,asc")
@RequestParam(required = false)
String sort
) {
InferenceResultDto.SearchReq searchReq = new InferenceResultDto.SearchReq(statCode, title, page, size, sort);
Page<InferenceResultDto.AnalResList> analResList = inferenceResultService.getInferenceResultList(searchReq);
String sort) {
InferenceResultDto.SearchReq searchReq =
new InferenceResultDto.SearchReq(statCode, title, page, size, sort);
Page<InferenceResultDto.AnalResList> analResList =
inferenceResultService.getInferenceResultList(searchReq);
return ApiResponseDto.ok(analResList);
}
@Operation(
summary = "추론관리 분석결과 요약정보",
description =
"분석결과 요약정보를 조회합니다.")
@Operation(summary = "추론관리 분석결과 요약정보", description = "분석결과 요약정보를 조회합니다.")
@ApiResponses(
value = {
@ApiResponse(
@@ -83,16 +76,11 @@ public class InferenceResultApiController {
})
@GetMapping("/summary")
public ApiResponseDto<InferenceResultDto.AnalResSummary> getInferenceResultSummary(
@Parameter(description = "목록 id", example = "1")
@RequestParam Long id) {
@Parameter(description = "목록 id", example = "1") @RequestParam Long id) {
return ApiResponseDto.ok(inferenceResultService.getInferenceResultSummary(id));
}
@Operation(
summary = "추론관리 분석결과 상세",
description =
"분석결과 상제 정보 Summary, DashBoard")
@Operation(summary = "추론관리 분석결과 상세", description = "분석결과 상제 정보 Summary, DashBoard")
@ApiResponses(
value = {
@ApiResponse(
@@ -107,19 +95,16 @@ public class InferenceResultApiController {
})
@GetMapping("/detail")
public ApiResponseDto<InferenceResultDto.Detail> getInferenceDetail(
@Parameter(description = "목록 id", example = "1")
@RequestParam Long id) {
@Parameter(description = "목록 id", example = "1") @RequestParam Long id) {
// summary
InferenceResultDto.AnalResSummary summary = inferenceResultService.getInferenceResultSummary(id);
//dashBoard
InferenceResultDto.AnalResSummary summary =
inferenceResultService.getInferenceResultSummary(id);
// dashBoard
List<InferenceResultDto.Dashboard> dashboardList = this.getInferenceResultDashboard(id);
return ApiResponseDto.ok(new Detail(summary, dashboardList));
}
@Operation(
summary = "추론관리 분석결과 상세 목록",
description =
"추론관리 분석결과 상세 목록 geojson 데이터 조회")
@Operation(summary = "추론관리 분석결과 상세 목록", description = "추론관리 분석결과 상세 목록 geojson 데이터 조회")
@ApiResponses(
value = {
@ApiResponse(
@@ -134,27 +119,34 @@ public class InferenceResultApiController {
})
@GetMapping("/geom")
public ApiResponseDto<Page<InferenceResultDto.Geom>> getInferenceResultGeomList(
@Parameter(description = "기준년도 분류", example = "0001") @RequestParam(required = false) String targetClass,
@Parameter(description = "비교년도 분류", example = "0002") @RequestParam(required = false) String compareClass,
@Parameter(description = "5000:1 도협번호 37801011,37801012") @RequestParam(required = false) List<Long> mapSheetNum,
@Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0") int page,
@Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20") int size,
@Parameter(description = "정렬 조건 (형식: 필드명,방향)", example = "name,asc") @RequestParam(required = false) String sort
) {
InferenceResultDto.SearchGeoReq searchGeoReq = new InferenceResultDto.SearchGeoReq(targetClass, compareClass, mapSheetNum, page, size, sort);
Page<InferenceResultDto.Geom> geomList = inferenceResultService.getInferenceResultGeomList(searchGeoReq);
@Parameter(description = "기준년도 분류", example = "0001") @RequestParam(required = false)
String targetClass,
@Parameter(description = "비교년도 분류", example = "0002") @RequestParam(required = false)
String compareClass,
@Parameter(description = "5000:1 도협번호 37801011,37801012") @RequestParam(required = false)
List<Long> mapSheetNum,
@Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0")
int page,
@Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20")
int size,
@Parameter(description = "정렬 조건 (형식: 필드명,방향)", example = "name,asc")
@RequestParam(required = false)
String sort) {
InferenceResultDto.SearchGeoReq searchGeoReq =
new InferenceResultDto.SearchGeoReq(
targetClass, compareClass, mapSheetNum, page, size, sort);
Page<InferenceResultDto.Geom> geomList =
inferenceResultService.getInferenceResultGeomList(searchGeoReq);
return ApiResponseDto.ok(geomList);
}
/**
* 분석결과 상세 대시보드 조회
*
* @param id
* @return
*/
private List<Dashboard> getInferenceResultDashboard(Long id) {
return inferenceResultService.getInferenceResultBasic(id);
}
}

View File

@@ -8,7 +8,6 @@ import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
@@ -23,10 +22,8 @@ public class InferenceResultDto {
private String dataName;
private Long mapSheepNum;
private Long detectingCnt;
@JsonFormatDttm
private ZonedDateTime analStrtDttm;
@JsonFormatDttm
private ZonedDateTime analEndDttm;
@JsonFormatDttm private ZonedDateTime analStrtDttm;
@JsonFormatDttm private ZonedDateTime analEndDttm;
private Long analSec;
private String analState;
@@ -38,8 +35,7 @@ public class InferenceResultDto {
ZonedDateTime analStrtDttm,
ZonedDateTime analEndDttm,
Long analSec,
String analState
) {
String analState) {
this.id = id;
this.dataName = dataName;
this.mapSheepNum = mapSheepNum;
@@ -58,10 +54,8 @@ public class InferenceResultDto {
private String analTitle;
private String analMapSheet;
private Long detectingCnt;
@JsonFormatDttm
private ZonedDateTime analStrtDttm;
@JsonFormatDttm
private ZonedDateTime analEndDttm;
@JsonFormatDttm private ZonedDateTime analStrtDttm;
@JsonFormatDttm private ZonedDateTime analEndDttm;
private Long analSec;
private Long analPredSec;
private String analState;
@@ -79,8 +73,7 @@ public class InferenceResultDto {
Long analPredSec,
String analState,
String analStateNm,
String gukyuinUsed
) {
String gukyuinUsed) {
this.id = id;
this.analTitle = analTitle;
this.analMapSheet = analMapSheet;
@@ -103,10 +96,8 @@ public class InferenceResultDto {
private Integer targetYyyy;
private Integer compareYyyy;
private String analMapSheet;
@JsonFormatDttm
private ZonedDateTime analStrtDttm;
@JsonFormatDttm
private ZonedDateTime analEndDttm;
@JsonFormatDttm private ZonedDateTime analStrtDttm;
@JsonFormatDttm private ZonedDateTime analEndDttm;
private Long analSec;
private Long analPredSec;
private String resultUrl;
@@ -129,8 +120,7 @@ public class InferenceResultDto {
Long detectingCnt,
Double accuracy,
String analState,
String analStateNm
) {
String analStateNm) {
this.id = id;
this.modelInfo = modelInfo;
this.targetYyyy = targetYyyy;
@@ -157,11 +147,9 @@ public class InferenceResultDto {
String classAfterName;
Long classBeforeCnt;
Long classAfterCnt;
@JsonFormatDttm
ZonedDateTime createdDttm;
@JsonFormatDttm ZonedDateTime createdDttm;
Long createdUid;
@JsonFormatDttm
ZonedDateTime updatedDttm;
@JsonFormatDttm ZonedDateTime updatedDttm;
Long updatedUid;
Long refMapSheetNum;
Long dataUid;
@@ -179,8 +167,7 @@ public class InferenceResultDto {
ZonedDateTime updatedDttm,
Long updatedUid,
Long refMapSheetNum,
Long dataUid
) {
Long dataUid) {
this.compareYyyy = compareYyyy;
this.targetYyyy = targetYyyy;
this.mapSheetNum = mapSheetNum;
@@ -202,10 +189,7 @@ public class InferenceResultDto {
AnalResSummary summary;
List<Dashboard> dashboard;
public Detail(
AnalResSummary summary,
List<Dashboard> dashboard
) {
public Detail(AnalResSummary summary, List<Dashboard> dashboard) {
this.summary = summary;
this.dashboard = dashboard;
}
@@ -232,8 +216,7 @@ public class InferenceResultDto {
String classAfterCd,
String classAfterName,
Double classAfterProb,
Long mapSheetNum
) {
Long mapSheetNum) {
this.compareYyyy = compareYyyy;
this.targetYyyy = targetYyyy;
this.classBeforeCd = classBeforeCd;
@@ -246,9 +229,6 @@ public class InferenceResultDto {
}
}
@Schema(name = "InferenceResultSearchReq", description = "분석결과 목록 요청 정보")
@Getter
@Setter
@@ -288,7 +268,7 @@ public class InferenceResultDto {
private String targetClass;
// 비교년도
private String compareClass;
//분석도엽
// 분석도엽
private List<Long> mapSheetNum;
// 페이징 파라미터

View File

@@ -1,7 +1,6 @@
package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Basic;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import java.util.List;
@@ -19,15 +18,18 @@ public class InferenceResultService {
/**
* 추론관리 > 분석결과 목록 조회
*
* @param searchReq
* @return
*/
public Page<InferenceResultDto.AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq) {
public Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq) {
return inferenceResultCoreService.getInferenceResultList(searchReq);
}
/**
* 분석결과 요약정보
*
* @param id
* @return
*/
@@ -37,6 +39,7 @@ public class InferenceResultService {
/**
* 분석결과 대시보드 조회
*
* @param id
* @return
*/
@@ -46,12 +49,12 @@ public class InferenceResultService {
/**
* 분석결과 상세 목록
*
* @param searchGeoReq
* @return
*/
public Page<InferenceResultDto.Geom> getInferenceResultGeomList(InferenceResultDto.SearchGeoReq searchGeoReq) {
public Page<InferenceResultDto.Geom> getInferenceResultGeomList(
InferenceResultDto.SearchGeoReq searchGeoReq) {
return inferenceResultCoreService.getInferenceResultGeomList(searchGeoReq);
}
}

View File

@@ -3,7 +3,6 @@ package com.kamco.cd.kamcoback.log;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
import com.kamco.cd.kamcoback.log.service.AuditLogService;
import com.kamco.cd.kamcoback.postgres.core.AuditLogCoreService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.time.LocalDate;
@@ -29,8 +28,7 @@ public class AuditLogApiController {
@RequestParam(required = false) LocalDate endDate,
@RequestParam int page,
@RequestParam(defaultValue = "20") int size) {
AuditLogDto.searchReq searchReq =
new AuditLogDto.searchReq(page, size, "created_dttm,desc");
AuditLogDto.searchReq searchReq = new AuditLogDto.searchReq(page, size, "created_dttm,desc");
Page<AuditLogDto.DailyAuditList> result =
auditLogService.getLogByDaily(searchReq, startDate, endDate);
@@ -44,10 +42,8 @@ public class AuditLogApiController {
@RequestParam LocalDate logDate,
@RequestParam int page,
@RequestParam(defaultValue = "20") int size) {
AuditLogDto.searchReq searchReq =
new AuditLogDto.searchReq(page, size, "created_dttm,desc");
Page<AuditLogDto.DailyDetail> result =
auditLogService.getLogByDailyResult(searchReq, logDate);
AuditLogDto.searchReq searchReq = new AuditLogDto.searchReq(page, size, "created_dttm,desc");
Page<AuditLogDto.DailyDetail> result = auditLogService.getLogByDailyResult(searchReq, logDate);
return ApiResponseDto.ok(result);
}
@@ -58,8 +54,7 @@ public class AuditLogApiController {
@RequestParam(required = false) String searchValue,
@RequestParam int page,
@RequestParam(defaultValue = "20") int size) {
AuditLogDto.searchReq searchReq =
new AuditLogDto.searchReq(page, size, "created_dttm,desc");
AuditLogDto.searchReq searchReq = new AuditLogDto.searchReq(page, size, "created_dttm,desc");
Page<AuditLogDto.MenuAuditList> result = auditLogService.getLogByMenu(searchReq, searchValue);
return ApiResponseDto.ok(result);
@@ -71,10 +66,8 @@ public class AuditLogApiController {
@RequestParam String menuId,
@RequestParam int page,
@RequestParam(defaultValue = "20") int size) {
AuditLogDto.searchReq searchReq =
new AuditLogDto.searchReq(page, size, "created_dttm,desc");
Page<AuditLogDto.MenuDetail> result =
auditLogService.getLogByMenuResult(searchReq, menuId);
AuditLogDto.searchReq searchReq = new AuditLogDto.searchReq(page, size, "created_dttm,desc");
Page<AuditLogDto.MenuDetail> result = auditLogService.getLogByMenuResult(searchReq, menuId);
return ApiResponseDto.ok(result);
}
@@ -85,8 +78,7 @@ public class AuditLogApiController {
@RequestParam(required = false) String searchValue,
@RequestParam int page,
@RequestParam(defaultValue = "20") int size) {
AuditLogDto.searchReq searchReq =
new AuditLogDto.searchReq(page, size, "created_dttm,desc");
AuditLogDto.searchReq searchReq = new AuditLogDto.searchReq(page, size, "created_dttm,desc");
Page<AuditLogDto.UserAuditList> result =
auditLogService.getLogByAccount(searchReq, searchValue);
@@ -99,10 +91,8 @@ public class AuditLogApiController {
@RequestParam Long userUid,
@RequestParam int page,
@RequestParam(defaultValue = "20") int size) {
AuditLogDto.searchReq searchReq =
new AuditLogDto.searchReq(page, size, "created_dttm,desc");
Page<AuditLogDto.UserDetail> result =
auditLogService.getLogByAccountResult(searchReq, userUid);
AuditLogDto.searchReq searchReq = new AuditLogDto.searchReq(page, size, "created_dttm,desc");
Page<AuditLogDto.UserDetail> result = auditLogService.getLogByAccountResult(searchReq, userUid);
return ApiResponseDto.ok(result);
}

View File

@@ -4,7 +4,6 @@ import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.log.dto.ErrorLogDto;
import com.kamco.cd.kamcoback.log.dto.EventType;
import com.kamco.cd.kamcoback.log.service.ErrorLogService;
import com.kamco.cd.kamcoback.postgres.core.ErrorLogCoreService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.time.LocalDate;

View File

@@ -3,8 +3,6 @@ package com.kamco.cd.kamcoback.log.dto;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm;
import io.swagger.v3.oas.annotations.media.Schema;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZonedDateTime;
import lombok.AllArgsConstructor;
import lombok.Getter;
@@ -72,7 +70,13 @@ public class AuditLogDto {
public static class DailyAuditList extends AuditCommon {
private final String baseDate;
public DailyAuditList(int readCount, int cudCount, int printCount, int downloadCount, Long totalCount, String baseDate) {
public DailyAuditList(
int readCount,
int cudCount,
int printCount,
int downloadCount,
Long totalCount,
String baseDate) {
super(readCount, cudCount, printCount, downloadCount, totalCount);
this.baseDate = baseDate;
}
@@ -84,7 +88,14 @@ public class AuditLogDto {
private final String menuId;
private final String menuName;
public MenuAuditList(String menuId, String menuName, int readCount, int cudCount, int printCount, int downloadCount, Long totalCount) {
public MenuAuditList(
String menuId,
String menuName,
int readCount,
int cudCount,
int printCount,
int downloadCount,
Long totalCount) {
super(readCount, cudCount, printCount, downloadCount, totalCount);
this.menuId = menuId;
this.menuName = menuName;
@@ -98,7 +109,15 @@ public class AuditLogDto {
private final String loginId;
private final String username;
public UserAuditList(Long accountId, String loginId, String username, int readCount, int cudCount, int printCount, int downloadCount, Long totalCount) {
public UserAuditList(
Long accountId,
String loginId,
String username,
int readCount,
int cudCount,
int printCount,
int downloadCount,
Long totalCount) {
super(readCount, cudCount, printCount, downloadCount, totalCount);
this.accountId = accountId;
this.loginId = loginId;
@@ -122,13 +141,20 @@ public class AuditLogDto {
private final String loginId;
private final String menuName;
public DailyDetail(Long logId, String userName, String loginId, String menuName, EventType eventType, LogDetail detail){
public DailyDetail(
Long logId,
String userName,
String loginId,
String menuName,
EventType eventType,
LogDetail detail) {
super(logId, eventType, detail);
this.userName = userName;
this.loginId = loginId;
this.menuName = menuName;
}
}
@Schema(name = "MenuDetail", description = "메뉴별 로그 상세")
@Getter
public static class MenuDetail extends AuditDetail {
@@ -136,20 +162,28 @@ public class AuditLogDto {
private final String userName;
private final String loginId;
public MenuDetail(Long logId, String logDateTime, String userName, String loginId, EventType eventType, LogDetail detail){
public MenuDetail(
Long logId,
String logDateTime,
String userName,
String loginId,
EventType eventType,
LogDetail detail) {
super(logId, eventType, detail);
this.logDateTime = logDateTime;
this.userName = userName;
this.loginId = loginId;
}
}
@Schema(name = "UserDetail", description = "사용자별 로그 상세")
@Getter
public static class UserDetail extends AuditDetail {
private final String logDateTime;
private final String menuNm;
public UserDetail(Long logId, String logDateTime, String menuNm, EventType eventType, LogDetail detail){
public UserDetail(
Long logId, String logDateTime, String menuNm, EventType eventType, LogDetail detail) {
super(logId, eventType, detail);
this.logDateTime = logDateTime;
this.menuNm = menuNm;

View File

@@ -2,13 +2,12 @@ package com.kamco.cd.kamcoback.log.service;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
import com.kamco.cd.kamcoback.postgres.core.AuditLogCoreService;
import java.time.LocalDate;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.time.LocalDate;
@Service
@RequiredArgsConstructor
@Transactional(readOnly = true)
@@ -44,6 +43,4 @@ public class AuditLogService {
AuditLogDto.searchReq searchRange, Long accountId) {
return auditLogCoreService.getLogByAccountResult(searchRange, accountId);
}
}

View File

@@ -2,7 +2,6 @@ package com.kamco.cd.kamcoback.model;
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.model.dto.ModelVerDto;
import com.kamco.cd.kamcoback.model.service.ModelMngService;
@@ -13,13 +12,12 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.transaction.Transactional;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.*;
import java.time.LocalDate;
import java.util.List;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.*;
@Tag(name = "모델 관리", description = "모델 관리 API")
@RequiredArgsConstructor
@@ -50,6 +48,7 @@ public class ModelMngApiController {
/**
* 최종 등록 모델 정보
*
* @return ModelMngDto.FinalModelDto
*/
@Operation(summary = "최종 등록 모델 조회", description = "최종 등록 모델 조회")
@@ -60,6 +59,7 @@ public class ModelMngApiController {
/**
* 모델 등록 => 모델, 버전 동시 등록 (UI 상 따로 등록하는 곳 없음)
*
* @param addReq 모델 입력 값
* @return ModelVerDto.Basic
*/
@@ -71,7 +71,8 @@ public class ModelMngApiController {
@Operation(summary = "모델 수정", description = "모델 수정")
@PutMapping("/{id}")
public ApiResponseDto<Long> update(@PathVariable Long id, @RequestBody ModelMngDto.AddReq addReq) {
public ApiResponseDto<Long> update(
@PathVariable Long id, @RequestBody ModelMngDto.AddReq addReq) {
return ApiResponseDto.ok(modelMngService.update(id, addReq));
}
@@ -89,11 +90,11 @@ public class ModelMngApiController {
@RequestParam int page,
@RequestParam(defaultValue = "20") int size,
@RequestParam(required = false) String searchVal,
@RequestParam(required = false) String searchColumn
) {
@RequestParam(required = false) String searchColumn) {
ModelMngDto.searchReq searchReq =
new ModelMngDto.searchReq(page, size, Optional.ofNullable(searchColumn).orElse("createdDate") + ",desc");
//searchColumn:: Entity 컬럼명칭으로 -> 기본값 = 등록일 createdDate, (선택) 배포일 deployDttm
new ModelMngDto.searchReq(
page, size, Optional.ofNullable(searchColumn).orElse("createdDate") + ",desc");
// searchColumn:: Entity 컬럼명칭으로 -> 기본값 = 등록일 createdDate, (선택) 배포일 deployDttm
Page<ModelMngDto.ModelRegHistory> result =
modelMngService.getRegHistoryList(searchReq, startDate, endDate, searchVal);

View File

@@ -3,6 +3,7 @@ package com.kamco.cd.kamcoback.model.dto;
import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotEmpty;
import java.time.ZonedDateTime;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
@@ -11,8 +12,6 @@ import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import java.time.ZonedDateTime;
public class ModelMngDto {
@Schema(name = "ModelMng Basic", description = "모델관리 엔티티 기본 정보")
@@ -24,12 +23,10 @@ public class ModelMngDto {
private final String modelCate;
private final String modelPath;
@JsonFormatDttm
private final ZonedDateTime createdDttm;
@JsonFormatDttm private final ZonedDateTime createdDttm;
private final Long createdUid;
@JsonFormatDttm
private final ZonedDateTime updatedDttm;
@JsonFormatDttm private final ZonedDateTime updatedDttm;
private final Long updatedUid;
private final String modelCntnt;
@@ -42,8 +39,7 @@ public class ModelMngDto {
Long createdUid,
ZonedDateTime updatedDttm,
Long updatedUid,
String modelCntnt
) {
String modelCntnt) {
this.id = id;
this.modelNm = modelNm;
this.modelCate = modelCate;
@@ -70,8 +66,17 @@ public class ModelMngDto {
private final String deployState;
private final String modelPath;
public FinalModelDto(Long modelUid, String modelNm, String modelCate, Long modelVerUid, String modelVer,
String usedState, String modelState, Double qualityProb, String deployState, String modelPath) {
public FinalModelDto(
Long modelUid,
String modelNm,
String modelCate,
Long modelVerUid,
String modelVer,
String usedState,
String modelState,
Double qualityProb,
String deployState,
String modelPath) {
this.modelUid = modelUid;
this.modelNm = modelNm;
this.modelCate = modelCate;

View File

@@ -2,13 +2,8 @@ package com.kamco.cd.kamcoback.model.dto;
import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotEmpty;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import java.time.ZonedDateTime;
import lombok.Getter;
public class ModelVerDto {
@@ -28,12 +23,10 @@ public class ModelVerDto {
private final String deployState;
private final String modelPath;
@JsonFormatDttm
private final ZonedDateTime createdDttm;
@JsonFormatDttm private final ZonedDateTime createdDttm;
private final Long createdUid;
@JsonFormatDttm
private final ZonedDateTime updatedDttm;
@JsonFormatDttm private final ZonedDateTime updatedDttm;
private final Long updatedUid;
public Basic(
@@ -49,8 +42,7 @@ public class ModelVerDto {
ZonedDateTime createdDttm,
Long createdUid,
ZonedDateTime updatedDttm,
Long updatedUid
) {
Long updatedUid) {
this.id = id;
this.modelUid = modelUid;
this.modelCate = modelCate;
@@ -66,5 +58,4 @@ public class ModelVerDto {
this.updatedUid = updatedUid;
}
}
}

View File

@@ -1,18 +1,16 @@
package com.kamco.cd.kamcoback.model.service;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.model.dto.ModelVerDto;
import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService;
import java.time.LocalDate;
import java.util.List;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.time.LocalDate;
import java.util.List;
import java.util.Optional;
@Service
@RequiredArgsConstructor
@Transactional(readOnly = true)
@@ -20,7 +18,7 @@ public class ModelMngService {
private final ModelMngCoreService modelMngCoreService;
public List<ModelMngDto.Basic> findModelMngAll(){
public List<ModelMngDto.Basic> findModelMngAll() {
return modelMngCoreService.findModelMngAll();
}
@@ -40,7 +38,8 @@ public class ModelMngService {
return modelMngCoreService.delete(id);
}
public Page<ModelMngDto.ModelRegHistory> getRegHistoryList(ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal) {
public Page<ModelMngDto.ModelRegHistory> getRegHistoryList(
ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal) {
return modelMngCoreService.getRegHistoryList(searchReq, startDate, endDate, searchVal);
}
}

View File

@@ -8,16 +8,18 @@ import org.springframework.data.domain.Pageable;
public class QuerydslOrderUtil {
/**
* Pageable의 Sort 정보를 QueryDSL OrderSpecifier 배열로 변환
*
* @param pageable Spring Pageable
* @param entityClass 엔티티 클래스 (예: User.class)
* @param alias Q 엔티티 alias (예: "user")
*/
public static <T> OrderSpecifier<?>[] getOrderSpecifiers(Pageable pageable, Class<T> entityClass, String alias) {
public static <T> OrderSpecifier<?>[] getOrderSpecifiers(
Pageable pageable, Class<T> entityClass, String alias) {
PathBuilder<T> entityPath = new PathBuilder<>(entityClass, alias);
return pageable.getSort()
.stream()
.map(sort -> {
return pageable.getSort().stream()
.map(
sort -> {
Order order = sort.isAscending() ? Order.ASC : Order.DESC;
// PathBuilder.get()는 컬럼명(String)을 동적 Path로 반환
return new OrderSpecifier<>(order, entityPath.get(sort.getProperty(), String.class));

View File

@@ -1,17 +1,15 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.changedetection.ChangeDetectionRepository;
import java.util.List;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.Point;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
@Service
@RequiredArgsConstructor
public class ChangeDetectionCoreService {
@@ -21,17 +19,14 @@ public class ChangeDetectionCoreService {
public List<ChangeDetectionDto> getPolygonToPoint() {
List<MapSheetAnalDataGeomEntity> list = changeDetectionRepository.findAll();
return list.stream().map(p -> {
return list.stream()
.map(
p -> {
Geometry polygon = p.getGeom();
// 중심 좌표 계산
Point centroid = polygon.getCentroid();
return new ChangeDetectionDto(
p.getId(),
polygon,
centroid.getX(),
centroid.getY()
);
return new ChangeDetectionDto(p.getId(), polygon, centroid.getX(), centroid.getY());
})
.collect(Collectors.toList());
}

View File

@@ -75,6 +75,7 @@ public class CommonCodeCoreService
/**
* 공통코드 이름 조회
*
* @param parentCodeCd
* @param childCodeCd
* @return

View File

@@ -6,7 +6,6 @@ import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalSttcEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
import jakarta.persistence.EntityNotFoundException;
import java.util.List;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
@@ -19,41 +18,49 @@ public class InferenceResultCoreService {
/**
* 추론관리 > 분석결과 목록 조회
*
* @param searchReq
* @return
*/
public Page<InferenceResultDto.AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq) {
public Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq) {
return inferenceResultRepository.getInferenceResultList(searchReq);
}
/**
* 분석결과 요약정보
*
* @param id
* @return
*/
public InferenceResultDto.AnalResSummary getInferenceResultSummary(Long id) {
InferenceResultDto.AnalResSummary summary = inferenceResultRepository.getInferenceResultSummary(id).orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
InferenceResultDto.AnalResSummary summary =
inferenceResultRepository
.getInferenceResultSummary(id)
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
return summary;
}
/**
* 분석결과 대시보드 조회
*
* @param id
* @return
*/
public List<Dashboard> getInferenceResultDashboard(Long id) {
return inferenceResultRepository.getInferenceResultDashboard(id)
.stream()
return inferenceResultRepository.getInferenceResultDashboard(id).stream()
.map(MapSheetAnalSttcEntity::toDto)
.toList();
}
/**
* 분석결과 상세 목록
*
* @param searchGeoReq
* @return
*/
public Page<InferenceResultDto.Geom> getInferenceResultGeomList(InferenceResultDto.SearchGeoReq searchGeoReq) {
public Page<InferenceResultDto.Geom> getInferenceResultGeomList(
InferenceResultDto.SearchGeoReq searchGeoReq) {
return inferenceResultRepository.getInferenceGeomList(searchGeoReq);
}
}

View File

@@ -7,13 +7,12 @@ import com.kamco.cd.kamcoback.postgres.entity.ModelVerEntity;
import com.kamco.cd.kamcoback.postgres.repository.model.ModelMngRepository;
import com.kamco.cd.kamcoback.postgres.repository.model.ModelVerRepository;
import jakarta.persistence.EntityNotFoundException;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
import java.time.LocalDate;
import java.util.List;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
@@ -26,37 +25,57 @@ public class ModelMngCoreService {
return modelMngRepository.findModelMngAll().stream().map(ModelMngEntity::toDto).toList();
}
public Optional<ModelMngDto.FinalModelDto> getFinalModelInfo(){
public Optional<ModelMngDto.FinalModelDto> getFinalModelInfo() {
return modelMngRepository.getFinalModelInfo();
}
public ModelVerDto.Basic save(ModelMngDto.AddReq addReq) {
ModelMngEntity modelMngEntity = new ModelMngEntity(addReq.getModelNm(), addReq.getModelCate(), addReq.getModelPath(),
1L, 1L, addReq.getModelCntnt()); //TODO: 로그인 기능 붙이면 Uid 넣어야 함
ModelMngEntity modelMngEntity =
new ModelMngEntity(
addReq.getModelNm(),
addReq.getModelCate(),
addReq.getModelPath(),
1L,
1L,
addReq.getModelCntnt()); // TODO: 로그인 기능 붙이면 Uid 넣어야 함
ModelMngEntity saved = modelMngRepository.save(modelMngEntity);
ModelVerEntity modelVerEntity = new ModelVerEntity(saved.getId(), addReq.getModelCate(), addReq.getModelVer(), "NONE", "NONE",
0.0, "NONE", addReq.getModelPath(), 1L, 1L);
ModelVerEntity modelVerEntity =
new ModelVerEntity(
saved.getId(),
addReq.getModelCate(),
addReq.getModelVer(),
"NONE",
"NONE",
0.0,
"NONE",
addReq.getModelPath(),
1L,
1L);
return modelVerRepository.save(modelVerEntity).toDto();
}
public Long update(Long id, ModelMngDto.AddReq addReq) {
//조회
ModelVerEntity existData = modelVerRepository.findModelVerById(id)
.orElseThrow(EntityNotFoundException::new); //데이터 없는 경우 exception
// 조회
ModelVerEntity existData =
modelVerRepository
.findModelVerById(id)
.orElseThrow(EntityNotFoundException::new); // 데이터 없는 경우 exception
existData.update(addReq);
//TODO: 추후 수정 단계에서 도커파일 업로드하면 버전 업데이트 하는 로직 필요
// TODO: 추후 수정 단계에서 도커파일 업로드하면 버전 업데이트 하는 로직 필요
return existData.getId();
}
public Long delete(Long id) {
//조회
ModelVerEntity verEntity = modelVerRepository.findModelVerById(id)
// 조회
ModelVerEntity verEntity =
modelVerRepository
.findModelVerById(id)
.orElseThrow(() -> new EntityNotFoundException("버전 id 에 대한 정보를 찾을 수 없습니다. id : " + id));
//usedState가 USED 이거나 이미 삭제된 상태이면 삭제 불가
// usedState가 USED 이거나 이미 삭제된 상태이면 삭제 불가
if (verEntity.getUsedState().equals("USED") || verEntity.isDeleted().equals(true)) {
throw new IllegalStateException("해당 모델이 사용중이라 삭제 불가"); //TODO: 추후 규칙 정의되면 수정 필요
throw new IllegalStateException("해당 모델이 사용중이라 삭제 불가"); // TODO: 추후 규칙 정의되면 수정 필요
}
// id 코드 deleted = true 업데이트
@@ -64,7 +83,8 @@ public class ModelMngCoreService {
return verEntity.getId();
}
public Page<ModelMngDto.ModelRegHistory> getRegHistoryList(ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal) {
public Page<ModelMngDto.ModelRegHistory> getRegHistoryList(
ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal) {
return modelMngRepository.getRegHistoryList(searchReq, startDate, endDate, searchVal);
}
}

View File

@@ -8,8 +8,6 @@ import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.Instant;
import java.time.LocalTime;
import java.time.ZonedDateTime;
import java.util.Map;
import lombok.Getter;
@@ -26,7 +24,10 @@ public class MapSheetAnalDataEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_anal_data_id_gen")
@SequenceGenerator(name = "tb_map_sheet_anal_data_id_gen", sequenceName = "tb_map_sheet_learn_data_data_uid", allocationSize = 1)
@SequenceGenerator(
name = "tb_map_sheet_anal_data_id_gen",
sequenceName = "tb_map_sheet_learn_data_data_uid",
allocationSize = 1)
@Column(name = "data_uid", nullable = false)
private Long id;
@@ -103,5 +104,4 @@ public class MapSheetAnalDataEntity {
@Column(name = "detecting_cnt")
private Long detectingCnt;
}

View File

@@ -2,12 +2,11 @@ package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.*;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import lombok.Getter;
import lombok.Setter;
import org.locationtech.jts.geom.Geometry;
import java.time.ZonedDateTime;
@Getter
@Setter
@Entity
@@ -15,8 +14,13 @@ import java.time.ZonedDateTime;
public class MapSheetAnalDataGeomEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_anal_data_geom_id_gen")
@SequenceGenerator(name = "tb_map_sheet_anal_data_geom_id_gen", sequenceName = "tb_map_sheet_learn_data_geom_geom_uid", allocationSize = 1)
@GeneratedValue(
strategy = GenerationType.SEQUENCE,
generator = "tb_map_sheet_anal_data_geom_id_gen")
@SequenceGenerator(
name = "tb_map_sheet_anal_data_geom_id_gen",
sequenceName = "tb_map_sheet_learn_data_geom_geom_uid",
allocationSize = 1)
@Column(name = "geo_uid", nullable = false)
private Long id;

View File

@@ -2,16 +2,12 @@ package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.Instant;
import java.time.ZonedDateTime;
import lombok.Getter;
import lombok.Setter;
@@ -25,7 +21,10 @@ public class MapSheetAnalEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_anal_id_gen")
@SequenceGenerator(name = "tb_map_sheet_anal_id_gen", sequenceName = "tb_map_sheet_anal_anal_uid", allocationSize = 1)
@SequenceGenerator(
name = "tb_map_sheet_anal_id_gen",
sequenceName = "tb_map_sheet_anal_anal_uid",
allocationSize = 1)
@Column(name = "anal_uid", nullable = false)
private Long id;
@@ -92,5 +91,4 @@ public class MapSheetAnalEntity {
@Column(name = "detecting_cnt")
private Long detectingCnt;
}

View File

@@ -1,13 +1,11 @@
package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import jakarta.persistence.Column;
import jakarta.persistence.EmbeddedId;
import jakarta.persistence.Entity;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import lombok.Getter;
import lombok.Setter;
@@ -19,8 +17,7 @@ import org.hibernate.annotations.ColumnDefault;
@Table(name = "tb_map_sheet_anal_sttc")
public class MapSheetAnalSttcEntity {
@EmbeddedId
private MapSheetAnalSttcEntityId id;
@EmbeddedId private MapSheetAnalSttcEntityId id;
@Column(name = "class_before_cnt")
private Long classBeforeCnt;
@@ -64,7 +61,6 @@ public class MapSheetAnalSttcEntity {
this.updatedDttm,
this.updatedUid,
this.refMapSheetNum,
this.dataUid
);
this.dataUid);
}
}

View File

@@ -16,6 +16,7 @@ import org.hibernate.Hibernate;
public class MapSheetAnalSttcEntityId implements Serializable {
private static final long serialVersionUID = -8630519290255405042L;
@NotNull
@Column(name = "compare_yyyy", nullable = false)
private Integer compareYyyy;
@@ -47,16 +48,15 @@ public class MapSheetAnalSttcEntityId implements Serializable {
return false;
}
MapSheetAnalSttcEntityId entity = (MapSheetAnalSttcEntityId) o;
return Objects.equals(this.targetYyyy, entity.targetYyyy) &&
Objects.equals(this.classBeforeName, entity.classBeforeName) &&
Objects.equals(this.classAfterName, entity.classAfterName) &&
Objects.equals(this.compareYyyy, entity.compareYyyy) &&
Objects.equals(this.mapSheetNum, entity.mapSheetNum);
return Objects.equals(this.targetYyyy, entity.targetYyyy)
&& Objects.equals(this.classBeforeName, entity.classBeforeName)
&& Objects.equals(this.classAfterName, entity.classAfterName)
&& Objects.equals(this.compareYyyy, entity.compareYyyy)
&& Objects.equals(this.mapSheetNum, entity.mapSheetNum);
}
@Override
public int hashCode() {
return Objects.hash(targetYyyy, classBeforeName, classAfterName, compareYyyy, mapSheetNum);
}
}

View File

@@ -5,8 +5,6 @@ import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import java.util.Map;
import lombok.Getter;
@@ -104,5 +102,4 @@ public class MapSheetLearnDataEntity {
@Column(name = "updated_uid")
private Long updatedUid;
}

View File

@@ -2,18 +2,12 @@ package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.OnDelete;
import org.hibernate.annotations.OnDeleteAction;
import org.locationtech.jts.geom.Geometry;
@Getter
@@ -75,5 +69,4 @@ public class MapSheetLearnDataGeomEntity {
@Column(name = "updated_uid")
private Long updatedUid;
}

View File

@@ -2,11 +2,10 @@ package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.postgres.CommonDateEntity;
import jakarta.persistence.*;
import java.time.ZonedDateTime;
import lombok.Getter;
import lombok.Setter;
import java.time.ZonedDateTime;
@Getter
@Setter
@Entity
@@ -14,7 +13,10 @@ import java.time.ZonedDateTime;
public class ModelDeployHstEntity extends CommonDateEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_model_deploy_hst_id_gen")
@SequenceGenerator(name = "tb_model_deploy_hst_id_gen", sequenceName = "tb_model_deploy_hst_deploy_uid", allocationSize = 1)
@SequenceGenerator(
name = "tb_model_deploy_hst_id_gen",
sequenceName = "tb_model_deploy_hst_deploy_uid",
allocationSize = 1)
@Column(name = "deploy_uid", nullable = false)
private Long id;

View File

@@ -8,8 +8,6 @@ import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
import java.time.ZonedDateTime;
@Getter
@Setter
@Entity
@@ -18,7 +16,10 @@ public class ModelMngEntity extends CommonDateEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_model_mng_id_gen")
@SequenceGenerator(name = "tb_model_mng_id_gen", sequenceName = "tb_model_mng_model_uid", allocationSize = 1)
@SequenceGenerator(
name = "tb_model_mng_id_gen",
sequenceName = "tb_model_mng_model_uid",
allocationSize = 1)
@Column(name = "model_uid", nullable = false)
private Long id;
@@ -46,8 +47,13 @@ public class ModelMngEntity extends CommonDateEntity {
@Column(name = "model_cntnt", columnDefinition = "TEXT")
private String modelCntnt;
public ModelMngEntity(String modelNm, String modelCate, String modelPath,
Long createdUid, Long updatedUid, String modelCntnt) {
public ModelMngEntity(
String modelNm,
String modelCate,
String modelPath,
Long createdUid,
Long updatedUid,
String modelCntnt) {
this.modelNm = modelNm;
this.modelCate = modelCate;
this.modelPath = modelPath;

View File

@@ -19,7 +19,10 @@ public class ModelVerEntity extends CommonDateEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_model_ver_id_gen")
@SequenceGenerator(name = "tb_model_ver_id_gen", sequenceName = "tb_model_ver_model_ver_uid", allocationSize = 1)
@SequenceGenerator(
name = "tb_model_ver_id_gen",
sequenceName = "tb_model_ver_model_ver_uid",
allocationSize = 1)
@Column(name = "model_ver_uid", nullable = false)
private Long id;
@@ -62,8 +65,19 @@ public class ModelVerEntity extends CommonDateEntity {
private Boolean deleted = false;
public ModelVerEntity(Long id, Long modelUid, String modelCate, String modelVer, String usedState, String modelState,
Double qualityProb, String deployState, String modelPath, Long createdUid, Long updatedUid, Boolean deleted) {
public ModelVerEntity(
Long id,
Long modelUid,
String modelCate,
String modelVer,
String usedState,
String modelState,
Double qualityProb,
String deployState,
String modelPath,
Long createdUid,
Long updatedUid,
Boolean deleted) {
this.id = id;
this.modelUid = modelUid;
this.modelCate = modelCate;
@@ -78,8 +92,17 @@ public class ModelVerEntity extends CommonDateEntity {
this.deleted = deleted;
}
public ModelVerEntity(Long modelUid, String modelCate, String modelVer, String usedState, String modelState,
Double qualityProb, String deployState, String modelPath, Long createdUid, Long updatedUid) {
public ModelVerEntity(
Long modelUid,
String modelCate,
String modelVer,
String usedState,
String modelState,
Double qualityProb,
String deployState,
String modelPath,
Long createdUid,
Long updatedUid) {
this.modelUid = modelUid;
this.modelCate = modelCate;
this.modelVer = modelVer;
@@ -119,7 +142,7 @@ public class ModelVerEntity extends CommonDateEntity {
return deleted;
}
public void deleted(){
public void deleted() {
this.deleted = true;
}
}

View File

@@ -3,6 +3,5 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface InferenceResultRepository extends JpaRepository<MapSheetAnalEntity, Long>, InferenceResultRepositoryCustom {
}
public interface InferenceResultRepository
extends JpaRepository<MapSheetAnalEntity, Long>, InferenceResultRepositoryCustom {}

View File

@@ -1,17 +1,18 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalSttcEntity;
import java.util.List;
import java.util.Optional;
import org.springframework.data.domain.Page;
public interface InferenceResultRepositoryCustom {
Page<InferenceResultDto.AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq);
Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq);
Optional<InferenceResultDto.AnalResSummary> getInferenceResultSummary(Long id);
List<MapSheetAnalSttcEntity> getInferenceResultDashboard(Long id);
Page<InferenceResultDto.Geom> getInferenceGeomList(InferenceResultDto.SearchGeoReq searchGeoReq);
}

View File

@@ -2,13 +2,13 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalSttcEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.jpa.JPAExpressions;
@@ -21,7 +21,6 @@ import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Repository;
import com.querydsl.core.BooleanBuilder;
@Repository
@RequiredArgsConstructor
@@ -31,18 +30,20 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
private final QMapSheetAnalEntity mapSheetAnal = QMapSheetAnalEntity.mapSheetAnalEntity;
private final QModelMngEntity tmm = QModelMngEntity.modelMngEntity;
private final QModelVerEntity tmv = QModelVerEntity.modelVerEntity;
private final QMapSheetAnalSttcEntity mapSheetAnalSttc = QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
private final QMapSheetAnalDataGeomEntity mapSheetAnalDataGeom = QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
private final QMapSheetAnalSttcEntity mapSheetAnalSttc =
QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
private final QMapSheetAnalDataGeomEntity mapSheetAnalDataGeom =
QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
/**
* 분석결과 목록 조회
*
* @param searchReq
* @return
*/
@Override
public Page<InferenceResultDto.AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq) {
public Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq) {
Pageable pageable = searchReq.toPageable();
// "0000" 전체조회
@@ -56,8 +57,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
builder.and(mapSheetAnal.analTitle.like("%" + searchReq.getTitle() + "%"));
}
List<InferenceResultDto.AnalResList> content = queryFactory
.select(Projections.constructor(InferenceResultDto.AnalResList.class,
List<InferenceResultDto.AnalResList> content =
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResList.class,
mapSheetAnal.id,
mapSheetAnal.analTitle,
mapSheetAnal.analMapSheet,
@@ -67,47 +71,41 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
mapSheetAnal.analSec,
mapSheetAnal.analPredSec,
mapSheetAnal.analState,
Expressions.stringTemplate("fn_code_name({0}, {1})", "0002", mapSheetAnal.analState),
mapSheetAnal.gukyuinUsed
))
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnal.analState),
mapSheetAnal.gukyuinUsed))
.from(mapSheetAnal)
.where(
builder
)
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(mapSheetAnal.createdDttm.desc())
.fetch();
long total = queryFactory
.select(mapSheetAnal.id)
.from(mapSheetAnal)
.where(
builder
)
.fetchCount();
long total =
queryFactory.select(mapSheetAnal.id).from(mapSheetAnal).where(builder).fetchCount();
return new PageImpl<>(content, pageable, total);
}
/**
* 분석결과 요약정보
*
* @param id
* @return
*/
@Override
public Optional<InferenceResultDto.AnalResSummary> getInferenceResultSummary(Long id) {
// 1. 최신 버전 UID를 가져오는 서브쿼리
JPQLQuery<Long> latestVerUidSub = JPAExpressions
.select(tmv.id.max())
.from(tmv)
.where(tmv.modelUid.eq(tmm.id));
// 1. 최신 버전 UID를 가져오는 서브쿼리
JPQLQuery<Long> latestVerUidSub =
JPAExpressions.select(tmv.id.max()).from(tmv).where(tmv.modelUid.eq(tmm.id));
Optional<InferenceResultDto.AnalResSummary> content = Optional.ofNullable(queryFactory
.select(Projections.constructor(InferenceResultDto.AnalResSummary.class,
Optional<InferenceResultDto.AnalResSummary> content =
Optional.ofNullable(
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResSummary.class,
mapSheetAnal.id,
tmm.modelNm.concat(" ").concat(tmv.modelVer).as("modelInfo"),
mapSheetAnal.targetYyyy,
@@ -121,22 +119,21 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
mapSheetAnal.detectingCnt,
mapSheetAnal.accuracy,
mapSheetAnal.analState,
Expressions.stringTemplate("fn_code_name({0}, {1})", "0002", mapSheetAnal.analState)
))
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnal.analState)))
.from(mapSheetAnal)
.leftJoin(tmm).on(mapSheetAnal.modelUid.eq(tmm.id))
.leftJoin(tmv).on(
tmv.modelUid.eq(tmm.id)
.and(tmv.id.eq(latestVerUidSub))
)
.leftJoin(tmm)
.on(mapSheetAnal.modelUid.eq(tmm.id))
.leftJoin(tmv)
.on(tmv.modelUid.eq(tmm.id).and(tmv.id.eq(latestVerUidSub)))
.where(mapSheetAnal.id.eq(id))
.fetchOne()
);
.fetchOne());
return content;
}
/**
* 분석결과 상세 대시보드 조회
*
* @param id
* @return
*/
@@ -151,6 +148,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
/**
* 분석결과 상세 목록
*
* @param searchGeoReq
* @return
*/
@@ -160,44 +158,46 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
BooleanBuilder builder = new BooleanBuilder();
// 기준년도 분류
if(searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")){
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) {
builder.and(mapSheetAnalDataGeom.classAfterCd.eq(searchGeoReq.getTargetClass()));
}
// 비교년도 분류
if(searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")){
if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")) {
builder.and(mapSheetAnalDataGeom.classBeforeCd.eq(searchGeoReq.getCompareClass()));
}
// 분석도엽
if(searchGeoReq.getMapSheetNum() != null && !searchGeoReq.getMapSheetNum().isEmpty()){
if (searchGeoReq.getMapSheetNum() != null && !searchGeoReq.getMapSheetNum().isEmpty()) {
List<Long> mapSheetNum = searchGeoReq.getMapSheetNum();
builder.and(mapSheetAnalDataGeom.mapSheetNum.in(mapSheetNum));
}
List<InferenceResultDto.Geom> content = queryFactory
.select(Projections.constructor(InferenceResultDto.Geom.class,
List<InferenceResultDto.Geom> content =
queryFactory
.select(
Projections.constructor(
InferenceResultDto.Geom.class,
mapSheetAnalDataGeom.compareYyyy,
mapSheetAnalDataGeom.targetYyyy,
mapSheetAnalDataGeom.classBeforeCd,
Expressions.stringTemplate("fn_code_name({0}, {1})", "0000", mapSheetAnalDataGeom.classBeforeCd),
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0000", mapSheetAnalDataGeom.classBeforeCd),
mapSheetAnalDataGeom.classBeforeProb,
mapSheetAnalDataGeom.classAfterCd,
Expressions.stringTemplate("fn_code_name({0}, {1})", "0000", mapSheetAnalDataGeom.classAfterCd),
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0000", mapSheetAnalDataGeom.classAfterCd),
mapSheetAnalDataGeom.classAfterProb,
mapSheetAnalDataGeom.mapSheetNum))
.from(mapSheetAnalDataGeom)
.where(builder)
.fetch()
;
.fetch();
long total = queryFactory
long total =
queryFactory
.select(mapSheetAnalDataGeom.id)
.from(mapSheetAnalDataGeom)
.where(
builder
)
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total);

View File

@@ -1,36 +1,27 @@
package com.kamco.cd.kamcoback.postgres.repository;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface MapSheetLearnDataGeomRepository extends JpaRepository<MapSheetLearnDataGeomEntity, Long> {
public interface MapSheetLearnDataGeomRepository
extends JpaRepository<MapSheetLearnDataGeomEntity, Long> {
/**
* 데이터 UID로 지오메트리 정보 조회
*/
/** 데이터 UID로 지오메트리 정보 조회 */
List<MapSheetLearnDataGeomEntity> findByDataUid(Long dataUid);
/**
* 도엽 번호로 지오메트리 정보 조회
*/
/** 도엽 번호로 지오메트리 정보 조회 */
List<MapSheetLearnDataGeomEntity> findByMapSheetNum(Long mapSheetNum);
/**
* 연도 범위로 지오메트리 정보 조회
*/
List<MapSheetLearnDataGeomEntity> findByBeforeYyyyAndAfterYyyy(Integer beforeYyyy, Integer afterYyyy);
/** 연도 범위로 지오메트리 정보 조회 */
List<MapSheetLearnDataGeomEntity> findByBeforeYyyyAndAfterYyyy(
Integer beforeYyyy, Integer afterYyyy);
/**
* 지오메트리 타입별 조회
*/
/** 지오메트리 타입별 조회 */
List<MapSheetLearnDataGeomEntity> findByGeoType(String geoType);
/**
* 데이터 UID로 기존 지오메트리 데이터 삭제 (재생성 전에 사용)
*/
/** 데이터 UID로 기존 지오메트리 데이터 삭제 (재생성 전에 사용) */
void deleteByDataUid(Long dataUid);
}

View File

@@ -1,47 +1,32 @@
package com.kamco.cd.kamcoback.postgres.repository;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.List;
import java.util.Optional;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface MapSheetLearnDataRepository extends JpaRepository<MapSheetLearnDataEntity, Long> {
/**
* 데이터 이름으로 조회
*/
/** 데이터 이름으로 조회 */
Optional<MapSheetLearnDataEntity> findByDataName(String dataName);
/**
* 데이터 경로로 조회
*/
/** 데이터 경로로 조회 */
Optional<MapSheetLearnDataEntity> findByDataPath(String dataPath);
/**
* 처리 상태별 조회
*/
/** 처리 상태별 조회 */
List<MapSheetLearnDataEntity> findByDataState(String dataState);
/**
* 데이터 타입별 조회
*/
/** 데이터 타입별 조회 */
List<MapSheetLearnDataEntity> findByDataType(String dataType);
/**
* 분석 상태별 조회
*/
/** 분석 상태별 조회 */
List<MapSheetLearnDataEntity> findByAnalState(String analState);
/**
* 분석 상태별 개수 조회
*/
/** 분석 상태별 개수 조회 */
long countByAnalState(String analState);
/**
* 처리되지 않은 데이터 조회 (data_state가 'PENDING' 또는 null인 것들)
*/
/** 처리되지 않은 데이터 조회 (data_state가 'PENDING' 또는 null인 것들) */
List<MapSheetLearnDataEntity> findByDataStateIsNullOrDataState(String dataState);
}

View File

@@ -3,4 +3,5 @@ package com.kamco.cd.kamcoback.postgres.repository.changedetection;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface ChangeDetectionRepository extends JpaRepository<MapSheetAnalDataGeomEntity, Long>, ChangeDetectionRepositoryCustom {}
public interface ChangeDetectionRepository
extends JpaRepository<MapSheetAnalDataGeomEntity, Long>, ChangeDetectionRepositoryCustom {}

View File

@@ -1,9 +1,6 @@
package com.kamco.cd.kamcoback.postgres.repository.changedetection;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
public interface ChangeDetectionRepositoryCustom {
String getPolygonToPoint();
}

View File

@@ -1,12 +1,11 @@
package com.kamco.cd.kamcoback.postgres.repository.changedetection;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
import java.util.List;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
implements ChangeDetectionRepositoryCustom {

View File

@@ -88,12 +88,12 @@ public class CommonCodeRepositoryImpl extends QuerydslRepositorySupport
QCommonCodeEntity parent = QCommonCodeEntity.commonCodeEntity;
QCommonCodeEntity child = new QCommonCodeEntity("child");
String result = queryFactory
String result =
queryFactory
.select(child.name)
.from(child)
.join(child.parent, parent)
.where(parent.code.eq(parentCodeCd)
.and(child.code.eq(childCodeCd)))
.where(parent.code.eq(parentCodeCd).and(child.code.eq(childCodeCd)))
.fetchFirst(); // 단일 결과만
return Optional.ofNullable(result);

View File

@@ -18,8 +18,7 @@ public interface AuditLogRepositoryCustom {
Page<AuditLogDto.DailyDetail> findLogByDailyResult(
AuditLogDto.searchReq searchReq, LocalDate logDate);
Page<AuditLogDto.MenuDetail> findLogByMenuResult(
AuditLogDto.searchReq searchReq, String menuId);
Page<AuditLogDto.MenuDetail> findLogByMenuResult(AuditLogDto.searchReq searchReq, String menuId);
Page<AuditLogDto.UserDetail> findLogByAccountResult(
AuditLogDto.searchReq searchReq, Long accountId);

View File

@@ -52,9 +52,7 @@ public class AuditLogRepositoryImpl extends QuerydslRepositorySupport
printCount().as("printCount"),
downloadCount().as("downloadCount"),
auditLogEntity.count().as("totalCount"),
groupDateTime.as("baseDate")
)
)
groupDateTime.as("baseDate")))
.from(auditLogEntity)
.where(eventEndedAtBetween(startDate, endDate))
.groupBy(groupDateTime)
@@ -242,7 +240,9 @@ public class AuditLogRepositoryImpl extends QuerydslRepositorySupport
Projections.constructor(
AuditLogDto.MenuDetail.class,
auditLogEntity.id.as("logId"),
Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')", auditLogEntity.createdDate).as("logDateTime"), //??
Expressions.stringTemplate(
"to_char({0}, 'YYYY-MM-DD')", auditLogEntity.createdDate)
.as("logDateTime"), // ??
userEntity.userNm.as("userName"),
userEntity.userId.as("loginId"),
auditLogEntity.eventType.as("eventType"),
@@ -307,7 +307,9 @@ public class AuditLogRepositoryImpl extends QuerydslRepositorySupport
Projections.constructor(
AuditLogDto.UserDetail.class,
auditLogEntity.id.as("logId"),
Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')", auditLogEntity.createdDate).as("logDateTime"),
Expressions.stringTemplate(
"to_char({0}, 'YYYY-MM-DD')", auditLogEntity.createdDate)
.as("logDateTime"),
menuEntity.menuNm.as("menuName"),
auditLogEntity.eventType.as("eventType"),
Projections.constructor(

View File

@@ -3,4 +3,5 @@ package com.kamco.cd.kamcoback.postgres.repository.model;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface ModelMngRepository extends JpaRepository<ModelMngEntity, Long>, ModelMngRepositoryCustom {}
public interface ModelMngRepository
extends JpaRepository<ModelMngEntity, Long>, ModelMngRepositoryCustom {}

View File

@@ -2,11 +2,10 @@ package com.kamco.cd.kamcoback.postgres.repository.model;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import org.springframework.data.domain.Page;
import java.time.LocalDate;
import java.util.List;
import java.util.Optional;
import org.springframework.data.domain.Page;
public interface ModelMngRepositoryCustom {
@@ -14,5 +13,6 @@ public interface ModelMngRepositoryCustom {
Optional<ModelMngDto.FinalModelDto> getFinalModelInfo();
Page<ModelMngDto.ModelRegHistory> getRegHistoryList(ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal);
Page<ModelMngDto.ModelRegHistory> getRegHistoryList(
ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal);
}

View File

@@ -1,5 +1,9 @@
package com.kamco.cd.kamcoback.postgres.repository.model;
import static com.kamco.cd.kamcoback.postgres.entity.QModelDeployHstEntity.modelDeployHstEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity.modelMngEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity.modelVerEntity;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.QuerydslOrderUtil;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
@@ -10,21 +14,16 @@ import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import io.micrometer.common.util.StringUtils;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import static com.kamco.cd.kamcoback.postgres.entity.QModelDeployHstEntity.modelDeployHstEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity.modelMngEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity.modelVerEntity;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
implements ModelMngRepositoryCustom {
@@ -39,14 +38,11 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
@Override
public List<ModelMngEntity> findModelMngAll() {
return queryFactory
.selectFrom(modelMngEntity)
.orderBy(modelMngEntity.id.desc())
.fetch();
return queryFactory.selectFrom(modelMngEntity).orderBy(modelMngEntity.id.desc()).fetch();
}
@Override
public Optional<ModelMngDto.FinalModelDto> getFinalModelInfo(){
public Optional<ModelMngDto.FinalModelDto> getFinalModelInfo() {
return queryFactory
.select(
Projections.constructor(
@@ -60,20 +56,19 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
modelVerEntity.modelState,
modelVerEntity.qualityProb,
modelVerEntity.deployState,
modelVerEntity.modelPath
)
)
modelVerEntity.modelPath))
.from(modelMngEntity)
.innerJoin(modelVerEntity)
.on(modelMngEntity.id.eq(modelVerEntity.modelUid))
.where(modelVerEntity.usedState.eq("USED")) //USED 인 것 중에
.orderBy(modelVerEntity.modelVer.desc()) //Version 높은 것 기준
.where(modelVerEntity.usedState.eq("USED")) // USED 인 것 중에
.orderBy(modelVerEntity.modelVer.desc()) // Version 높은 것 기준
.stream()
.findFirst();
}
@Override
public Page<ModelMngDto.ModelRegHistory> getRegHistoryList(ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal) {
public Page<ModelMngDto.ModelRegHistory> getRegHistoryList(
ModelMngDto.searchReq searchReq, LocalDate startDate, LocalDate endDate, String searchVal) {
Pageable pageable = searchReq.toPageable();
List<ModelMngDto.ModelRegHistory> foundContent =
@@ -84,27 +79,30 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
modelMngEntity.modelNm,
modelMngEntity.modelCate,
modelVerEntity.modelVer,
Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')", modelVerEntity.createdDate).as("createdDttm"),
Expressions.stringTemplate(
"to_char({0}, 'YYYY-MM-DD')", modelVerEntity.createdDate)
.as("createdDttm"),
modelVerEntity.usedState,
modelVerEntity.deployState,
Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')", modelDeployHstEntity.deployDttm).as("deployDttm")
)
)
Expressions.stringTemplate(
"to_char({0}, 'YYYY-MM-DD')", modelDeployHstEntity.deployDttm)
.as("deployDttm")))
.from(modelMngEntity)
.innerJoin(modelVerEntity)
.on(modelMngEntity.id.eq(modelVerEntity.modelUid))
.leftJoin(modelDeployHstEntity)
.on(
modelVerEntity.id.eq(modelDeployHstEntity.modelVerUid)
.and(modelDeployHstEntity.serverId.eq(1L)) //1건만 조회해야 하기에 1번 서버만 조회하기
)
.where(
eventEndedAtBetween(startDate, endDate),
searchModelVerLike(searchVal)
modelVerEntity
.id
.eq(modelDeployHstEntity.modelVerUid)
.and(modelDeployHstEntity.serverId.eq(1L)) // 1건만 조회해야 하기에 1번 서버만 조회하기
)
.where(eventEndedAtBetween(startDate, endDate), searchModelVerLike(searchVal))
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(QuerydslOrderUtil.getOrderSpecifiers(pageable, ModelVerEntity.class, "modelVerEntity"))
.orderBy(
QuerydslOrderUtil.getOrderSpecifiers(
pageable, ModelVerEntity.class, "modelVerEntity"))
.fetch();
Long countQuery =
@@ -113,10 +111,7 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
.from(modelMngEntity)
.innerJoin(modelVerEntity)
.on(modelMngEntity.id.eq(modelVerEntity.modelUid))
.where(
eventEndedAtBetween(startDate, endDate),
searchModelVerLike(searchVal)
)
.where(eventEndedAtBetween(startDate, endDate), searchModelVerLike(searchVal))
.fetchOne();
return new PageImpl<>(foundContent, pageable, countQuery);
@@ -128,11 +123,13 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
}
LocalDateTime startDateTime = startDate.atStartOfDay();
LocalDateTime endDateTime = endDate.plusDays(1).atStartOfDay();
return modelMngEntity.createdDate.goe(ZonedDateTime.from(startDateTime))
return modelMngEntity
.createdDate
.goe(ZonedDateTime.from(startDateTime))
.and(modelMngEntity.modifiedDate.lt(ZonedDateTime.from(endDateTime)));
}
private BooleanExpression searchModelVerLike(String searchVal){
private BooleanExpression searchModelVerLike(String searchVal) {
if (StringUtils.isBlank(searchVal)) {
return null;
}

View File

@@ -1,7 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.model;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import com.kamco.cd.kamcoback.postgres.entity.ModelVerEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface ModelVerRepository extends JpaRepository<ModelVerEntity, Long>, ModelVerRepositoryCustom {}
public interface ModelVerRepository
extends JpaRepository<ModelVerEntity, Long>, ModelVerRepositoryCustom {}

View File

@@ -1,10 +1,6 @@
package com.kamco.cd.kamcoback.postgres.repository.model;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import com.kamco.cd.kamcoback.postgres.entity.ModelVerEntity;
import java.util.List;
import java.util.Optional;
public interface ModelVerRepositoryCustom {

View File

@@ -1,15 +1,14 @@
package com.kamco.cd.kamcoback.postgres.repository.model;
import static com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity.modelVerEntity;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import com.kamco.cd.kamcoback.postgres.entity.ModelVerEntity;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
import java.util.Optional;
import static com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity.modelVerEntity;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
public class ModelVerRepositoryImpl extends QuerydslRepositorySupport
implements ModelVerRepositoryCustom {
@@ -24,10 +23,10 @@ public class ModelVerRepositoryImpl extends QuerydslRepositorySupport
@Override
public Optional<ModelVerEntity> findModelVerById(Long id) {
return Optional.ofNullable(queryFactory
return Optional.ofNullable(
queryFactory
.selectFrom(modelVerEntity)
.where(modelVerEntity.id.eq(id)) //model_ver_uid
.fetchOne()
);
.where(modelVerEntity.id.eq(id)) // model_ver_uid
.fetchOne());
}
}