Merge pull request 'feat/infer_dev_260107' (#228) from feat/infer_dev_260107 into develop
Reviewed-on: https://kamco.gitea.gs.dabeeo.com/dabeeo/kamco-dabeeo-backoffice/pulls/228
This commit is contained in:
@@ -90,7 +90,8 @@ public class SecurityConfig {
|
||||
"/api/user/**",
|
||||
"/api/my/menus",
|
||||
"/api/common-code/**",
|
||||
"/api/training-data/label/**")
|
||||
"/api/training-data/label/**",
|
||||
"/api/training-data/review/**")
|
||||
.authenticated()
|
||||
.anyRequest()
|
||||
.access(menuAuthorizationManager)
|
||||
|
||||
@@ -173,6 +173,10 @@ public class ApiResponseDto<T> {
|
||||
+ "To reset your password again, please submit a new request through \"Forgot"
|
||||
+ " Password.\""),
|
||||
PAYLOAD_TOO_LARGE("업로드 용량 제한을 초과했습니다."),
|
||||
NOT_FOUND_TARGET_YEAR("기준년도 도엽을 찾을 수 없습니다."),
|
||||
NOT_FOUND_COMPARE_YEAR("비교년도 도엽을 찾을 수 없습니다."),
|
||||
FAIL_SAVE_MAP_SHEET("도엽 저장 중 오류가 발생했습니다."),
|
||||
FAIL_CREATE_MAP_SHEET_FILE("도엽 설정파일 생성 중 오류가 발생했습니다."),
|
||||
;
|
||||
// @formatter:on
|
||||
private final String message;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.kamco.cd.kamcoback.inference;
|
||||
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
|
||||
@@ -285,4 +286,25 @@ public class InferenceResultApiController {
|
||||
|
||||
return ApiResponseDto.ok(inferenceResultService.getInferenceStatus(uuid));
|
||||
}
|
||||
|
||||
@Operation(summary = "추론결과 기본정보", description = "추론결과 기본정보")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "검색 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema = @Schema(implementation = InferenceDetailDto.AnalResSummary.class))),
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@GetMapping("/infer-result-info")
|
||||
public ApiResponseDto<InferenceDetailDto.AnalResultInfo> getInferenceResultInfo(
|
||||
@Parameter(description = "회차 uuid", example = "932fbd72-2e8e-4a49-b189-09046787f9d1")
|
||||
@RequestParam
|
||||
String uuid) {
|
||||
return ApiResponseDto.ok(inferenceResultService.getInferenceResultInfo(uuid));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.kamco.cd.kamcoback.common.enums.DetectionClassification;
|
||||
import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import java.time.Duration;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
@@ -409,4 +410,51 @@ public class InferenceDetailDto {
|
||||
private UUID m2ModelUuid;
|
||||
private UUID m3ModelUuid;
|
||||
}
|
||||
|
||||
@Schema(name = "AnalResultInfo", description = "추론결과 기본정보")
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@NoArgsConstructor
|
||||
public static class AnalResultInfo {
|
||||
|
||||
private String analTitle;
|
||||
private String modelVer1;
|
||||
private String modelVer2;
|
||||
private String modelVer3;
|
||||
private Integer compareYyyy;
|
||||
private Integer targetYyyy;
|
||||
private String detectOption;
|
||||
private String mapSheetScope;
|
||||
@JsonFormatDttm private ZonedDateTime inferStartDttm;
|
||||
@JsonFormatDttm private ZonedDateTime inferEndDttm;
|
||||
|
||||
private Duration elapsedDuration;
|
||||
|
||||
public AnalResultInfo(
|
||||
String analTitle,
|
||||
String modelVer1,
|
||||
String modelVer2,
|
||||
String modelVer3,
|
||||
Integer compareYyyy,
|
||||
Integer targetYyyy,
|
||||
String detectOption,
|
||||
String mapSheetScope,
|
||||
ZonedDateTime inferStartDttm,
|
||||
ZonedDateTime inferEndDttm) {
|
||||
this.analTitle = analTitle;
|
||||
this.modelVer1 = modelVer1;
|
||||
this.modelVer2 = modelVer2;
|
||||
this.modelVer3 = modelVer3;
|
||||
this.compareYyyy = compareYyyy;
|
||||
this.targetYyyy = targetYyyy;
|
||||
this.detectOption = detectOption;
|
||||
this.mapSheetScope = mapSheetScope;
|
||||
this.inferStartDttm = inferStartDttm;
|
||||
this.inferEndDttm = inferEndDttm;
|
||||
this.elapsedDuration =
|
||||
(inferStartDttm != null && inferEndDttm != null)
|
||||
? Duration.between(inferStartDttm, inferEndDttm)
|
||||
: null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -181,12 +181,9 @@ public class InferenceResultDto {
|
||||
message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.")
|
||||
private String detectOption;
|
||||
|
||||
@Schema(
|
||||
description = "5k 도협 번호 목록",
|
||||
example =
|
||||
"[{\"mapSheetNum\":33605099,\"mapSheetName\":\"비양도\"},{\"mapSheetNum\":33605100,\"mapSheetName\":\"비양도\"},{\"mapSheetNum\":33606059,\"mapSheetName\":\"한림\"}]")
|
||||
@Schema(description = "5k 도협 번호 목록", example = "[33605,33606, 33610, 34802, 35603, 35611]")
|
||||
@NotNull
|
||||
private List<MapSheetNumDto> mapSheetNum;
|
||||
private List<String> mapSheetNum;
|
||||
}
|
||||
|
||||
@Getter
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
package com.kamco.cd.kamcoback.inference.service;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
|
||||
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient;
|
||||
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient.ExternalCallResult;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Detail;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
|
||||
@@ -14,7 +16,6 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.DetectOption;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetNumDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
|
||||
@@ -26,10 +27,10 @@ import com.kamco.cd.kamcoback.model.dto.ModelMngDto.Basic;
|
||||
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
|
||||
import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService;
|
||||
import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService;
|
||||
import jakarta.persistence.EntityNotFoundException;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
@@ -57,6 +58,7 @@ public class InferenceResultService {
|
||||
private final MapSheetMngCoreService mapSheetMngCoreService;
|
||||
private final ModelMngCoreService modelMngCoreService;
|
||||
private final ExternalHttpClient externalHttpClient;
|
||||
private final ObjectMapper objectMapper;
|
||||
|
||||
@Value("${inference.url}")
|
||||
private String inferenceUrl;
|
||||
@@ -91,34 +93,107 @@ public class InferenceResultService {
|
||||
@Transactional
|
||||
public UUID saveInferenceInfo(InferenceResultDto.RegReq req) {
|
||||
|
||||
// 분석대상 도엽이 전체일때
|
||||
if (MapSheetScope.ALL.getId().equals(req.getMapSheetScope())) {
|
||||
// 변화탐지 실행 가능 기준 년도 조회
|
||||
List<MngListDto> targetList = mapSheetMngCoreService.getHstMapSheetList(req);
|
||||
|
||||
// 기준년도 조회
|
||||
List<MngListDto> targetList = mapSheetMngCoreService.getHstMapSheetList(req.getTargetYyyy());
|
||||
req.setMapSheetNum(createdMngDto(req, targetList));
|
||||
|
||||
} else {
|
||||
// 부분
|
||||
|
||||
List<String> mapTargetIds = new ArrayList<>();
|
||||
req.getMapSheetNum().forEach(dto -> mapTargetIds.add(dto.getMapSheetNum()));
|
||||
|
||||
// 기준년도 조회
|
||||
List<MngListDto> targetList =
|
||||
mapSheetMngCoreService.getHstMapSheetList(req.getTargetYyyy(), mapTargetIds);
|
||||
req.setMapSheetNum(createdMngDto(req, targetList));
|
||||
if (targetList.isEmpty()) {
|
||||
throw new CustomApiException("NOT_FOUND_TARGET_YEAR", HttpStatus.NOT_FOUND);
|
||||
}
|
||||
|
||||
if (req.getMapSheetNum().isEmpty()) {
|
||||
throw new EntityNotFoundException("분석 대상 정보가 부족합니다.");
|
||||
List<String> mapTargetIds = new ArrayList<>();
|
||||
for (MngListDto target : targetList) {
|
||||
mapTargetIds.add(target.getMapSheetNum());
|
||||
}
|
||||
|
||||
// 추론 테이블 저장
|
||||
UUID uuid = inferenceResultCoreService.saveInferenceInfo(req);
|
||||
// 변화탐지 실행 가능 비교년도 조회
|
||||
List<MngListCompareDto> compareList =
|
||||
mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds);
|
||||
|
||||
// 추론 실행 API 호출
|
||||
startInference(req, uuid);
|
||||
if (compareList.isEmpty()) {
|
||||
throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND);
|
||||
}
|
||||
|
||||
List<Map<String, Object>> totalNumList = new ArrayList<>();
|
||||
|
||||
if (DetectOption.EXCL.getId().equals(req.getDetectOption())) {
|
||||
// "추론제외" 일때 전년도 이전 값이 있어도 전년도 도엽이 없으면 비교 안함
|
||||
for (MngListCompareDto dto : compareList) {
|
||||
if (Objects.equals(dto.getBeforeYear(), req.getCompareYyyy())) {
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("beforeYear", dto.getBeforeYear());
|
||||
map.put("mapSheetNum", dto.getMapSheetNum());
|
||||
totalNumList.add(map);
|
||||
}
|
||||
}
|
||||
} else if (DetectOption.PREV.getId().equals(req.getDetectOption())) {
|
||||
// "이전 년도 도엽 사용" 이면 전년도 이전 도엽도 사용
|
||||
for (MngListCompareDto dto : compareList) {
|
||||
if (dto.getBeforeYear() != 0) {
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("beforeYear", dto.getBeforeYear());
|
||||
map.put("mapSheetNum", dto.getMapSheetNum());
|
||||
totalNumList.add(map);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (totalNumList.isEmpty()) {
|
||||
throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND);
|
||||
}
|
||||
|
||||
for (MngListDto target : targetList) {
|
||||
for (Map<String, Object> map : totalNumList) {
|
||||
if (target.getMapSheetNum().equals(map.get("mapSheetNum").toString())) {
|
||||
target.setBeforeYear(map.get("beforeYear").toString());
|
||||
target.setIsSuccess(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 목록 및 추론 대상 도엽정보 저장
|
||||
UUID uuid = inferenceResultCoreService.saveInferenceInfo(req, targetList);
|
||||
|
||||
// 추론에 필요한 geojson 파일 생성
|
||||
List<String> mapSheetNumList =
|
||||
targetList.stream()
|
||||
.filter(t -> Boolean.TRUE.equals(t.getIsSuccess()))
|
||||
.map(MngListDto::getMapSheetNum)
|
||||
.toList();
|
||||
|
||||
// 비교년도 geojson 파일 생성하여 경로 받기
|
||||
String modelComparePath =
|
||||
getSceneInference(
|
||||
String.valueOf(req.getCompareYyyy()), mapSheetNumList, req.getMapSheetScope());
|
||||
|
||||
// 기준년도 geojson 파일 생성하여 경로 받기
|
||||
String modelTargetPath =
|
||||
getSceneInference(
|
||||
String.valueOf(req.getTargetYyyy()), mapSheetNumList, req.getMapSheetScope());
|
||||
|
||||
// ai 서버에 전달할 파라미터 생성
|
||||
pred_requests_areas predRequestsAreas = new pred_requests_areas();
|
||||
predRequestsAreas.setInput1_year(req.getCompareYyyy());
|
||||
predRequestsAreas.setInput2_year(req.getTargetYyyy());
|
||||
predRequestsAreas.setInput1_scene_path(modelComparePath);
|
||||
predRequestsAreas.setInput2_scene_path(modelTargetPath);
|
||||
|
||||
InferenceSendDto m1 = this.getModelInfo(req.getModel1Uuid());
|
||||
m1.setPred_requests_areas(predRequestsAreas);
|
||||
|
||||
// ai 추론 실행 api 호출
|
||||
Long batchId = ensureAccepted(m1);
|
||||
|
||||
// ai 추론 실행후 응답값 update
|
||||
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
|
||||
saveInferenceAiDto.setUuid(uuid);
|
||||
saveInferenceAiDto.setBatchId(batchId);
|
||||
saveInferenceAiDto.setStatus(Status.IN_PROGRESS.getId());
|
||||
saveInferenceAiDto.setType("M1");
|
||||
saveInferenceAiDto.setInferStartDttm(ZonedDateTime.now());
|
||||
saveInferenceAiDto.setModelComparePath(modelComparePath);
|
||||
saveInferenceAiDto.setModelTargetPath(modelTargetPath);
|
||||
saveInferenceAiDto.setModelStartDttm(ZonedDateTime.now());
|
||||
inferenceResultCoreService.update(saveInferenceAiDto);
|
||||
|
||||
return uuid;
|
||||
}
|
||||
@@ -185,104 +260,67 @@ public class InferenceResultService {
|
||||
return mapSheetNum;
|
||||
}
|
||||
|
||||
/**
|
||||
* 추론 실행 API 호출
|
||||
*
|
||||
* @param req
|
||||
*/
|
||||
private void startInference(InferenceResultDto.RegReq req, UUID uuid) {
|
||||
|
||||
List<MapSheetNumDto> mapSheetNum = req.getMapSheetNum();
|
||||
List<String> mapSheetNumList = new ArrayList<>();
|
||||
|
||||
for (MapSheetNumDto mapSheetDto : mapSheetNum) {
|
||||
mapSheetNumList.add(mapSheetDto.getMapSheetNum());
|
||||
}
|
||||
|
||||
String modelComparePath =
|
||||
getSceneInference(String.valueOf(req.getCompareYyyy()), mapSheetNumList);
|
||||
String modelTargetPath =
|
||||
getSceneInference(String.valueOf(req.getTargetYyyy()), mapSheetNumList);
|
||||
|
||||
pred_requests_areas predRequestsAreas = new pred_requests_areas();
|
||||
predRequestsAreas.setInput1_year(req.getCompareYyyy());
|
||||
predRequestsAreas.setInput2_year(req.getTargetYyyy());
|
||||
predRequestsAreas.setInput1_scene_path(modelComparePath);
|
||||
predRequestsAreas.setInput2_scene_path(modelTargetPath);
|
||||
|
||||
InferenceSendDto m1 = this.getModelInfo(req.getModel1Uuid());
|
||||
InferenceSendDto m2 = this.getModelInfo(req.getModel2Uuid());
|
||||
InferenceSendDto m3 = this.getModelInfo(req.getModel3Uuid());
|
||||
|
||||
m1.setPred_requests_areas(predRequestsAreas);
|
||||
m2.setPred_requests_areas(predRequestsAreas);
|
||||
m3.setPred_requests_areas(predRequestsAreas);
|
||||
|
||||
Long batchId = this.ensureAccepted(m1);
|
||||
|
||||
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
|
||||
saveInferenceAiDto.setUuid(uuid);
|
||||
saveInferenceAiDto.setBatchId(batchId);
|
||||
saveInferenceAiDto.setStatus(Status.IN_PROGRESS.getId());
|
||||
saveInferenceAiDto.setType("M1");
|
||||
saveInferenceAiDto.setInferStartDttm(ZonedDateTime.now());
|
||||
saveInferenceAiDto.setModelComparePath(modelComparePath);
|
||||
saveInferenceAiDto.setModelTargetPath(modelTargetPath);
|
||||
saveInferenceAiDto.setModelStartDttm(ZonedDateTime.now());
|
||||
inferenceResultCoreService.update(saveInferenceAiDto);
|
||||
}
|
||||
|
||||
/**
|
||||
* 추론 AI API 호출
|
||||
*
|
||||
* @param dto
|
||||
*/
|
||||
private Long ensureAccepted(InferenceSendDto dto) {
|
||||
log.info("dto null? {}", dto == null);
|
||||
ObjectMapper om = new ObjectMapper();
|
||||
try {
|
||||
log.info("dto json={}", om.writeValueAsString(dto));
|
||||
} catch (Exception e) {
|
||||
log.error(e.getMessage());
|
||||
|
||||
if (dto == null) {
|
||||
log.warn("not InferenceSendDto dto");
|
||||
throw new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
headers.setContentType(MediaType.APPLICATION_JSON);
|
||||
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
|
||||
// 1) 요청 로그 (debug 권장)
|
||||
try {
|
||||
log.debug("Inference request dto={}", objectMapper.writeValueAsString(dto));
|
||||
} catch (JsonProcessingException e) {
|
||||
log.warn("Failed to serialize inference dto", e);
|
||||
}
|
||||
|
||||
// TODO 추후 삭제
|
||||
// 2) local 환경 임시 처리 (NPE 방어)
|
||||
if ("local".equals(profile)) {
|
||||
if (dto.getPred_requests_areas() == null) {
|
||||
throw new IllegalStateException("pred_requests_areas is null");
|
||||
}
|
||||
dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson");
|
||||
dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson");
|
||||
}
|
||||
|
||||
// 3) HTTP 호출
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
headers.setContentType(MediaType.APPLICATION_JSON);
|
||||
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
|
||||
|
||||
ExternalCallResult<String> result =
|
||||
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
|
||||
|
||||
int status = result.statusCode();
|
||||
String body = result.body();
|
||||
|
||||
if (status < 200 || status >= 300) {
|
||||
if (result.statusCode() < 200 || result.statusCode() >= 300) {
|
||||
log.error("Inference API failed. status={}, body={}", result.statusCode(), result.body());
|
||||
throw new CustomApiException("BAD_GATEWAY", HttpStatus.BAD_GATEWAY);
|
||||
}
|
||||
|
||||
Long batchId = 0L;
|
||||
|
||||
// 4) 응답 파싱
|
||||
try {
|
||||
List<Map<String, Object>> list =
|
||||
om.readValue(body, new TypeReference<List<Map<String, Object>>>() {});
|
||||
objectMapper.readValue(result.body(), new TypeReference<>() {});
|
||||
|
||||
Integer batchIdInt = (Integer) list.get(0).get("batch_id");
|
||||
batchId = batchIdInt.longValue();
|
||||
if (list.isEmpty()) {
|
||||
throw new IllegalStateException("Inference response is empty");
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error(e.getMessage());
|
||||
}
|
||||
Object batchIdObj = list.get(0).get("batch_id");
|
||||
if (batchIdObj == null) {
|
||||
throw new IllegalStateException("batch_id not found in response");
|
||||
}
|
||||
|
||||
return batchId;
|
||||
return Long.valueOf(batchIdObj.toString());
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to parse inference response. body={}", result.body(), e);
|
||||
throw new CustomApiException("INVALID_INFERENCE_RESPONSE", HttpStatus.BAD_GATEWAY);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -335,11 +373,13 @@ public class InferenceResultService {
|
||||
/**
|
||||
* geojson 파일 생성
|
||||
*
|
||||
* @param yyyy
|
||||
* @param mapSheetNums
|
||||
* @param yyyy 영상관리 파일별 년도
|
||||
* @param mapSheetNums 5k 도엽 번호 리스트
|
||||
* @param mapSheetScope EXCL : 추론제외, PREV 이전 년도 도엽 사용
|
||||
* @return
|
||||
*/
|
||||
private String getSceneInference(String yyyy, List<String> mapSheetNums) {
|
||||
return mapSheetMngCoreService.getSceneInference(yyyy, mapSheetNums);
|
||||
private String getSceneInference(String yyyy, List<String> mapSheetNums, String mapSheetScope) {
|
||||
return mapSheetMngCoreService.getSceneInference(yyyy, mapSheetNums, mapSheetScope);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -441,4 +481,8 @@ public class InferenceResultService {
|
||||
|
||||
return dto;
|
||||
}
|
||||
|
||||
public AnalResultInfo getInferenceResultInfo(String uuid) {
|
||||
return inferenceResultCoreService.getInferenceResultInfo(uuid);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.WorkHistoryDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.UpdateClosedRequest;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerListResponse;
|
||||
import com.kamco.cd.kamcoback.label.service.LabelAllocateService;
|
||||
@@ -259,32 +260,32 @@ public class LabelAllocateApiController {
|
||||
name = "라벨링 종료",
|
||||
value =
|
||||
"""
|
||||
{"closedType": "LABELING", "closedYn": "Y"}
|
||||
"""),
|
||||
{"closedType": "LABELING", "closedYn": "Y"}
|
||||
"""),
|
||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||
name = "검수 종료",
|
||||
value =
|
||||
"""
|
||||
{"closedType": "INSPECTION", "closedYn": "Y"}
|
||||
"""),
|
||||
{"closedType": "INSPECTION", "closedYn": "Y"}
|
||||
"""),
|
||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||
name = "라벨링 재개",
|
||||
value =
|
||||
"""
|
||||
{"closedType": "LABELING", "closedYn": "N"}
|
||||
"""),
|
||||
{"closedType": "LABELING", "closedYn": "N"}
|
||||
"""),
|
||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||
name = "검수 재개",
|
||||
value =
|
||||
"""
|
||||
{"closedType": "INSPECTION", "closedYn": "N"}
|
||||
"""),
|
||||
{"closedType": "INSPECTION", "closedYn": "N"}
|
||||
"""),
|
||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||
name = "특정 프로젝트 라벨링 종료",
|
||||
name = "특정 프로젝트 라벨링 전체 종료",
|
||||
value =
|
||||
"""
|
||||
{"uuid": "f97dc186-e6d3-4645-9737-3173dde8dc64", "closedType": "LABELING", "closedYn": "Y"}
|
||||
""")
|
||||
{"uuid": "f97dc186-e6d3-4645-9737-3173dde8dc64", "closedType": "INSPECTION", "closedYn": "Y"}
|
||||
""")
|
||||
}))
|
||||
@RequestBody
|
||||
@Valid
|
||||
@@ -302,4 +303,21 @@ public class LabelAllocateApiController {
|
||||
return ApiResponseDto.okObject(
|
||||
new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, statusMessage));
|
||||
}
|
||||
|
||||
@Operation(summary = "라벨링작업 관리 > 상세 > 작업이력", description = "라벨링작업 관리 > 상세 > 작업이력")
|
||||
@GetMapping("/work-history-list")
|
||||
public ApiResponseDto<Page<WorkHistoryDto>> findWorkHistoryList(
|
||||
@RequestParam(defaultValue = "0", required = true) int page,
|
||||
@RequestParam(defaultValue = "20", required = true) int size,
|
||||
@Parameter(description = "사번", required = true, example = "123456") @RequestParam
|
||||
String userId,
|
||||
@Schema(
|
||||
allowableValues = {"LABELER", "REVIEWER"},
|
||||
defaultValue = "LABELER")
|
||||
@Parameter(description = "라벨러/검수자(LABELER/REVIEWER)", required = true)
|
||||
@RequestParam
|
||||
String type) {
|
||||
LabelAllocateDto.searchReq searchReq = new LabelAllocateDto.searchReq(page, size, "");
|
||||
return ApiResponseDto.ok(labelAllocateService.findWorkHistoryList(searchReq, userId, type));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -314,4 +314,48 @@ public class LabelAllocateDto {
|
||||
private Long totalCnt;
|
||||
private List<MoveUserList> moveUserList;
|
||||
}
|
||||
|
||||
@Schema(name = "WorkHistoryDto", description = "WorkHistoryDto")
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class WorkHistoryDto {
|
||||
|
||||
@Schema(description = "행 번호")
|
||||
private Integer rowNum;
|
||||
|
||||
@Schema(description = "변화탐지년도", example = "2021-2022")
|
||||
private String changeDetectionYear;
|
||||
|
||||
@Schema(description = "국유IN 회차")
|
||||
private Long stage;
|
||||
|
||||
@Schema(description = "반영일")
|
||||
private ZonedDateTime gukyuinApplyDttm;
|
||||
|
||||
@Schema(description = "할당건수")
|
||||
private Long assignedCnt;
|
||||
|
||||
@Schema(description = "완료건수")
|
||||
private Long completeCnt;
|
||||
|
||||
@Schema(description = "Skip건수")
|
||||
private Long skipCnt;
|
||||
|
||||
@Schema(description = "잔여건수")
|
||||
private Long remainCnt;
|
||||
|
||||
@Schema(description = "상태 (진행중/완료)")
|
||||
private String status;
|
||||
|
||||
@Schema(description = "진행률 (%)")
|
||||
private Double percent;
|
||||
|
||||
@Schema(description = "작업기간 시작일")
|
||||
private ZonedDateTime createdDttm;
|
||||
|
||||
@Schema(description = "작업기간 종료일")
|
||||
private ZonedDateTime projectCloseDttm;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,6 +58,10 @@ public class LabelWorkDto {
|
||||
private String labelingClosedYn;
|
||||
private String inspectionClosedYn;
|
||||
|
||||
private Long inspectorCompleteTotCnt;
|
||||
private Long inspectorRemainCnt;
|
||||
private ZonedDateTime projectCloseDttm;
|
||||
|
||||
@JsonProperty("detectYear")
|
||||
public String getDetectYear() {
|
||||
if (compareYyyy == null || targetYyyy == null) {
|
||||
|
||||
@@ -10,6 +10,8 @@ import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveInfo;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.TargetUser;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.WorkHistoryDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.searchReq;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerListResponse;
|
||||
@@ -250,4 +252,12 @@ public class LabelAllocateService {
|
||||
|
||||
labelAllocateCoreService.updateClosedYnByUuid(targetUuid, closedType, closedYn);
|
||||
}
|
||||
|
||||
public Page<WorkHistoryDto> findWorkHistoryList(searchReq searchReq, String userId, String type) {
|
||||
if (type.equals("LABELER")) {
|
||||
return labelAllocateCoreService.workLabelHistoryList(searchReq, userId);
|
||||
} else {
|
||||
return labelAllocateCoreService.workReviewerHistoryList(searchReq, userId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -427,6 +427,8 @@ public class MapSheetMngDto {
|
||||
private int mngYyyy;
|
||||
private String mapSheetNum;
|
||||
private String mapSheetName;
|
||||
private String beforeYear;
|
||||
private Boolean isSuccess;
|
||||
}
|
||||
|
||||
@Schema(name = "MngListDto", description = "영상파일내역 검색 목록")
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
package com.kamco.cd.kamcoback.postgres.core;
|
||||
|
||||
import com.kamco.cd.kamcoback.common.enums.CommonUseStatus;
|
||||
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
|
||||
import com.kamco.cd.kamcoback.common.utils.UserUtil;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.InferenceBatchSheet;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
|
||||
@@ -10,9 +11,9 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetNumDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListDto;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
|
||||
@@ -28,14 +29,17 @@ import jakarta.validation.constraints.NotNull;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.log4j.Log4j2;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
@Service
|
||||
@Log4j2
|
||||
@RequiredArgsConstructor
|
||||
public class InferenceResultCoreService {
|
||||
|
||||
@@ -63,17 +67,26 @@ public class InferenceResultCoreService {
|
||||
*
|
||||
* @param req
|
||||
*/
|
||||
public UUID saveInferenceInfo(InferenceResultDto.RegReq req) {
|
||||
public UUID saveInferenceInfo(InferenceResultDto.RegReq req, List<MngListDto> targetList) {
|
||||
String firstMapSheetName = null;
|
||||
String mapSheetName = "";
|
||||
int detectingCnt = 0;
|
||||
|
||||
String mapSheetName =
|
||||
req.getMapSheetNum().get(0).getMapSheetName()
|
||||
+ " 외 "
|
||||
+ (req.getMapSheetNum().size() - 1)
|
||||
+ "건";
|
||||
for (MngListDto dto : targetList) {
|
||||
if (Boolean.TRUE.equals(dto.getIsSuccess())) {
|
||||
if (detectingCnt == 0) {
|
||||
firstMapSheetName = dto.getMapSheetName();
|
||||
}
|
||||
detectingCnt++;
|
||||
}
|
||||
}
|
||||
|
||||
if (req.getMapSheetNum().size() == 1) {
|
||||
mapSheetName =
|
||||
req.getMapSheetNum().get(0).getMapSheetName() + " " + req.getMapSheetNum().size() + "건";
|
||||
if (detectingCnt == 0) {
|
||||
mapSheetName = "";
|
||||
} else if (detectingCnt == 1) {
|
||||
mapSheetName = firstMapSheetName + " 1건";
|
||||
} else {
|
||||
mapSheetName = firstMapSheetName + " 외 " + (detectingCnt - 1) + "건";
|
||||
}
|
||||
|
||||
MapSheetLearnEntity mapSheetLearnEntity = new MapSheetLearnEntity();
|
||||
@@ -87,7 +100,9 @@ public class InferenceResultCoreService {
|
||||
mapSheetLearnEntity.setDetectOption(req.getDetectOption());
|
||||
mapSheetLearnEntity.setCreatedUid(userUtil.getId());
|
||||
mapSheetLearnEntity.setMapSheetCnt(mapSheetName);
|
||||
mapSheetLearnEntity.setDetectingCnt((long) req.getMapSheetNum().size());
|
||||
mapSheetLearnEntity.setDetectingCnt((long) detectingCnt);
|
||||
mapSheetLearnEntity.setStage(
|
||||
mapSheetLearnRepository.getLearnStage(req.getCompareYyyy(), req.getTargetYyyy()));
|
||||
|
||||
// learn 테이블 저장
|
||||
MapSheetLearnEntity savedLearn = mapSheetLearnRepository.save(mapSheetLearnEntity);
|
||||
@@ -95,14 +110,16 @@ public class InferenceResultCoreService {
|
||||
final int CHUNK = 1000;
|
||||
List<MapSheetLearn5kEntity> buffer = new ArrayList<>(CHUNK);
|
||||
|
||||
// learn 도엽 저장
|
||||
for (MapSheetNumDto mapSheetNum : req.getMapSheetNum()) {
|
||||
MapSheetLearn5kEntity e = new MapSheetLearn5kEntity();
|
||||
e.setLearn(savedLearn);
|
||||
e.setMapSheetNum(Long.parseLong(mapSheetNum.getMapSheetNum()));
|
||||
e.setCreatedUid(userUtil.getId());
|
||||
buffer.add(e);
|
||||
// learn 도엽별 저장
|
||||
for (MngListDto mngDto : targetList) {
|
||||
MapSheetLearn5kEntity entity = new MapSheetLearn5kEntity();
|
||||
entity.setLearn(savedLearn);
|
||||
entity.setMapSheetNum(Long.parseLong(mngDto.getMapSheetNum()));
|
||||
entity.setBeforeYear(Integer.valueOf(mngDto.getBeforeYear()));
|
||||
entity.setIsSuccess(mngDto.getIsSuccess() != null && mngDto.getIsSuccess());
|
||||
entity.setCreatedUid(userUtil.getId());
|
||||
|
||||
buffer.add(entity);
|
||||
if (buffer.size() == CHUNK) {
|
||||
flushChunk(buffer);
|
||||
buffer.clear();
|
||||
@@ -117,33 +134,24 @@ public class InferenceResultCoreService {
|
||||
return savedLearn.getUuid();
|
||||
}
|
||||
|
||||
/**
|
||||
* 도엽별 저장
|
||||
*
|
||||
* @param buffer
|
||||
*/
|
||||
private void flushChunk(List<MapSheetLearn5kEntity> buffer) {
|
||||
|
||||
// 청크 번호 추출 in 조건 만들기
|
||||
List<String> chunkNums =
|
||||
buffer.stream().map(e -> String.valueOf(e.getMapSheetNum())).distinct().toList();
|
||||
|
||||
// 추론 제외
|
||||
List<MapInkx5kEntity> usedEntities =
|
||||
mapInkx5kRepository.findByMapSheetNumInAndUseInference(chunkNums, CommonUseStatus.USE);
|
||||
|
||||
// TODO 추론 제외 했으면 파일 있는지도 확인 해야함
|
||||
// 조회 결과에서 번호만 Set으로
|
||||
Set<String> usedSet =
|
||||
usedEntities.stream()
|
||||
.map(MapInkx5kEntity::getMapidcdNo)
|
||||
.collect(java.util.stream.Collectors.toSet());
|
||||
|
||||
// 필터 후 저장
|
||||
List<MapSheetLearn5kEntity> toSave =
|
||||
buffer.stream().filter(e -> usedSet.contains(String.valueOf(e.getMapSheetNum()))).toList();
|
||||
|
||||
if (!toSave.isEmpty()) {
|
||||
mapSheetLearn5kRepository.saveAll(toSave);
|
||||
mapSheetLearn5kRepository.flush();
|
||||
try {
|
||||
if (!buffer.isEmpty()) {
|
||||
mapSheetLearn5kRepository.saveAll(buffer);
|
||||
mapSheetLearn5kRepository.flush();
|
||||
}
|
||||
} catch (DataAccessException e) {
|
||||
log.error("FAIL_SAVE_MAP_SHEET(도엽 저장 중 오류가 발생했습니다.): bufferSize={}", buffer.size(), e);
|
||||
throw new CustomApiException("FAIL_SAVE_MAP_SHEET", HttpStatus.INTERNAL_SERVER_ERROR);
|
||||
} finally {
|
||||
entityManager.clear();
|
||||
}
|
||||
|
||||
entityManager.clear();
|
||||
}
|
||||
|
||||
/****/
|
||||
@@ -354,4 +362,17 @@ public class InferenceResultCoreService {
|
||||
public UUID getProcessing() {
|
||||
return mapSheetLearnRepository.getProcessing();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param compareYear 비교년도
|
||||
* @param targetYear 기준년도
|
||||
* @return
|
||||
*/
|
||||
public Integer getLearnStage(Integer compareYear, Integer targetYear) {
|
||||
return mapSheetLearnRepository.getLearnStage(compareYear, targetYear);
|
||||
}
|
||||
|
||||
public AnalResultInfo getInferenceResultInfo(String uuid) {
|
||||
return mapSheetLearnRepository.getInferenceResultInfo(uuid);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveInfo;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.WorkHistoryDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.searchReq;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
|
||||
@@ -145,4 +146,13 @@ public class LabelAllocateCoreService {
|
||||
public void updateClosedYnByUuid(String uuid, String closedType, String closedYn) {
|
||||
labelAllocateRepository.updateClosedYnByUuid(uuid, closedType, closedYn);
|
||||
}
|
||||
|
||||
public Page<WorkHistoryDto> workLabelHistoryList(
|
||||
LabelAllocateDto.searchReq searchReq, String userId) {
|
||||
return labelAllocateRepository.workLabelHistoryList(searchReq, userId);
|
||||
}
|
||||
|
||||
public Page<WorkHistoryDto> workReviewerHistoryList(searchReq searchReq, String userId) {
|
||||
return labelAllocateRepository.workReviewerHistoryList(searchReq, userId);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
package com.kamco.cd.kamcoback.postgres.core;
|
||||
|
||||
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
|
||||
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter;
|
||||
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListCompareDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListDto;
|
||||
@@ -14,6 +17,7 @@ import jakarta.persistence.EntityNotFoundException;
|
||||
import jakarta.validation.Valid;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
@@ -23,6 +27,7 @@ import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
@@ -206,49 +211,82 @@ public class MapSheetMngCoreService {
|
||||
}
|
||||
}
|
||||
|
||||
public String getSceneInference(String yyyy, List<String> scenes) {
|
||||
String outputPath = "";
|
||||
/**
|
||||
* 추론 실행에 필요한 geojson 파일 생성
|
||||
*
|
||||
* @param yyyy 영상관리 파일별 년도
|
||||
* @param scenes 5k 도엽 번호 리스트
|
||||
* @param mapSheetScope EXCL : 추론제외, PREV 이전 년도 도엽 사용
|
||||
* @return
|
||||
*/
|
||||
public String getSceneInference(String yyyy, List<String> scenes, String mapSheetScope) {
|
||||
|
||||
try {
|
||||
List<ImageFeature> sceneInference = mapSheetMngRepository.getSceneInference(yyyy, scenes);
|
||||
boolean isAll = MapSheetScope.ALL.getId().equals(mapSheetScope);
|
||||
|
||||
if (sceneInference == null || sceneInference.isEmpty()) {
|
||||
log.warn("No scene data found for year: {}", yyyy);
|
||||
return outputPath;
|
||||
}
|
||||
// 1) 경로/파일명 결정
|
||||
String targetDir =
|
||||
"local".equals(activeEnv) ? System.getProperty("user.home") + "/geojson" : inferenceDir;
|
||||
|
||||
if (activeEnv.equals("local")) {
|
||||
inferenceDir = System.getProperty("user.home") + "/geojson";
|
||||
}
|
||||
String filename = String.format("%s_%s.geojson", yyyy, activeEnv);
|
||||
outputPath = Paths.get(inferenceDir, filename).toString();
|
||||
String filename =
|
||||
isAll
|
||||
? String.format("%s_%s_ALL.geojson", yyyy, activeEnv)
|
||||
: String.format("%s_%s.geojson", yyyy, activeEnv);
|
||||
|
||||
// 디렉토리가 없으면 생성
|
||||
Files.createDirectories(Paths.get(inferenceDir));
|
||||
Path outputPath = Paths.get(targetDir, filename);
|
||||
|
||||
// GeoJSON 파일 생성 (EPSG:5186 - Korea 2000 / Central Belt 2010)
|
||||
GeoJsonFileWriter writer = new GeoJsonFileWriter();
|
||||
writer.exportToFile(sceneInference, "scene_inference_" + yyyy, 5186, outputPath);
|
||||
|
||||
log.info("GeoJSON file created successfully: {}", outputPath);
|
||||
log.info("Total features exported: {}", sceneInference.size());
|
||||
|
||||
} catch (IOException e) {
|
||||
log.error("Failed to create GeoJSON file for year: {}", yyyy, e);
|
||||
throw new RuntimeException("GeoJSON 파일 생성 실패: " + e.getMessage(), e);
|
||||
// 2) ALL일 때만 재사용
|
||||
if (isAll && Files.exists(outputPath)) {
|
||||
return outputPath.toString();
|
||||
}
|
||||
|
||||
return outputPath;
|
||||
// 3) 데이터 조회 (파일 없을 때만)
|
||||
List<ImageFeature> sceneInference = mapSheetMngRepository.getSceneInference(yyyy, scenes);
|
||||
if (sceneInference == null || sceneInference.isEmpty()) {
|
||||
log.warn(
|
||||
"NOT_FOUND_TARGET_YEAR: yyyy={}, isAll={}, scenesSize={}",
|
||||
yyyy,
|
||||
isAll,
|
||||
scenes == null ? 0 : scenes.size());
|
||||
throw new CustomApiException("NOT_FOUND_TARGET_YEAR", HttpStatus.NOT_FOUND);
|
||||
}
|
||||
|
||||
// 4) 파일 생성
|
||||
try {
|
||||
Files.createDirectories(outputPath.getParent());
|
||||
|
||||
new GeoJsonFileWriter()
|
||||
.exportToFile(sceneInference, "scene_inference_" + yyyy, 5186, outputPath.toString());
|
||||
|
||||
return outputPath.toString();
|
||||
|
||||
} catch (IOException e) {
|
||||
log.error(
|
||||
"FAIL_CREATE_MAP_SHEET_FILE: yyyy={}, isAll={}, path={}", yyyy, isAll, outputPath, e);
|
||||
throw new CustomApiException("FAIL_CREATE_MAP_SHEET_FILE", HttpStatus.INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
}
|
||||
|
||||
public List<MngListDto> getHstMapSheetList(int mngYyyy) {
|
||||
return mapSheetMngRepository.findByHstMapSheetTargetList(mngYyyy);
|
||||
/**
|
||||
* 변화탐지 실행 가능 기준 년도 조회
|
||||
*
|
||||
* @param req
|
||||
* @return
|
||||
*/
|
||||
public List<MngListDto> getHstMapSheetList(InferenceResultDto.RegReq req) {
|
||||
return mapSheetMngRepository.findByHstMapSheetTargetList(req);
|
||||
}
|
||||
|
||||
public List<MngListDto> getHstMapSheetList(int mngYyyy, List<String> mapIds) {
|
||||
return mapSheetMngRepository.findByHstMapSheetTargetList(mngYyyy, mapIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* 변화탐지 실행 가능 비교년도 조회
|
||||
*
|
||||
* @param mngYyyy
|
||||
* @param mapId
|
||||
* @return
|
||||
*/
|
||||
public List<MngListCompareDto> getByHstMapSheetCompareList(int mngYyyy, List<String> mapId) {
|
||||
return mapSheetMngRepository.findByHstMapSheetCompareList(mngYyyy, mapId);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.kamco.cd.kamcoback.postgres.core;
|
||||
|
||||
import com.kamco.cd.kamcoback.postgres.repository.trainingdata.TrainingDataLabelRepository;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.DefaultPaging;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.DetailRes;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.GeoFeatureRequest.Properties;
|
||||
@@ -79,4 +80,13 @@ public class TrainingDataLabelCoreService {
|
||||
public DefaultPaging getDefaultPagingNumber(String userId, Integer size, String assignmentUid) {
|
||||
return trainingDataLabelRepository.getDefaultPagingNumber(userId, size, assignmentUid);
|
||||
}
|
||||
|
||||
public void saveNewPolygon(TrainingDataLabelDto.NewPolygonRequest request) {
|
||||
trainingDataLabelRepository.saveNewPolygon(request);
|
||||
}
|
||||
|
||||
public TrainingDataLabelDto.CogImageResponse getCogImageUrl(
|
||||
String mapSheetNum, Integer beforeYear, Integer afterYear) {
|
||||
return trainingDataLabelRepository.getCogImageUrl(mapSheetNum, beforeYear, afterYear);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
package com.kamco.cd.kamcoback.postgres.core;
|
||||
|
||||
import com.kamco.cd.kamcoback.postgres.repository.trainingdata.TrainingDataReviewRepository;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DefaultPaging;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DetailRes;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.GeoFeatureRequest.Properties;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewGeometryInfo;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewListDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.SummaryRes;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.searchReq;
|
||||
import java.util.UUID;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class TrainingDataReviewCoreService {
|
||||
|
||||
private final TrainingDataReviewRepository trainingDataReviewRepository;
|
||||
|
||||
public Page<ReviewListDto> findReviewAssignedList(searchReq searchReq, String userId) {
|
||||
return trainingDataReviewRepository.findReviewAssignedList(searchReq, userId);
|
||||
}
|
||||
|
||||
public ReviewGeometryInfo findReviewAssignedGeom(String operatorUid) {
|
||||
return trainingDataReviewRepository.findReviewAssignedGeom(operatorUid);
|
||||
}
|
||||
|
||||
public Long findReviewOperatorGeoUid(String operatorUid) {
|
||||
return trainingDataReviewRepository.findReviewOperatorGeoUid(operatorUid);
|
||||
}
|
||||
|
||||
public void updateReviewStateOperator(String operatorUid, String status, String memo) {
|
||||
trainingDataReviewRepository.updateReviewStateOperator(operatorUid, status, memo);
|
||||
}
|
||||
|
||||
public void updateReviewExceptState(Long inferenceGeomUid, String status) {
|
||||
trainingDataReviewRepository.updateReviewExceptState(inferenceGeomUid, status);
|
||||
}
|
||||
|
||||
public void updateReviewPolygonClass(
|
||||
Long inferenceGeomUid, Geometry geometry, Properties properties, String status) {
|
||||
trainingDataReviewRepository.updateReviewPolygonClass(
|
||||
inferenceGeomUid, geometry, properties, status);
|
||||
}
|
||||
|
||||
/**
|
||||
* 검수자별 작업 통계 조회
|
||||
*
|
||||
* @param userId 검수자 사번
|
||||
* @return 전체/미작업/Today 건수
|
||||
*/
|
||||
public SummaryRes getSummary(String userId) {
|
||||
try {
|
||||
System.out.println("[CoreService] getSummary called with userId: " + userId);
|
||||
SummaryRes result = trainingDataReviewRepository.getSummary(userId);
|
||||
System.out.println("[CoreService] getSummary result: " + result);
|
||||
return result;
|
||||
} catch (Exception e) {
|
||||
System.err.println("[CoreService] getSummary ERROR: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
// 예외 발생 시에도 빈 통계 반환
|
||||
return SummaryRes.builder().totalCnt(0L).undoneCnt(0L).todayCnt(0L).build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 검수 작업 상세 정보 조회
|
||||
*
|
||||
* @param operatorUid 검수 작업 ID
|
||||
* @return 변화탐지정보 + 실태조사결과정보
|
||||
*/
|
||||
public DetailRes getDetail(UUID operatorUid) {
|
||||
return trainingDataReviewRepository.getDetail(operatorUid);
|
||||
}
|
||||
|
||||
public DefaultPaging getDefaultPagingNumber(String userId, Integer size, String operatorUid) {
|
||||
return trainingDataReviewRepository.getDefaultPagingNumber(userId, size, operatorUid);
|
||||
}
|
||||
|
||||
public void saveNewPolygon(TrainingDataReviewDto.NewPolygonRequest request) {
|
||||
trainingDataReviewRepository.saveNewPolygon(request);
|
||||
}
|
||||
|
||||
public TrainingDataReviewDto.CogImageResponse getCogImageUrl(
|
||||
String mapSheetNum, Integer beforeYear, Integer afterYear) {
|
||||
return trainingDataReviewRepository.getCogImageUrl(mapSheetNum, beforeYear, afterYear);
|
||||
}
|
||||
}
|
||||
@@ -48,4 +48,10 @@ public class MapSheetLearn5kEntity {
|
||||
|
||||
@Column(name = "created_uid")
|
||||
private Long createdUid;
|
||||
|
||||
@Column(name = "befroe_year")
|
||||
private Integer beforeYear;
|
||||
|
||||
@Column(name = "is_success")
|
||||
private Boolean isSuccess;
|
||||
}
|
||||
|
||||
@@ -106,15 +106,6 @@ public class MapSheetLearnEntity {
|
||||
@Column(name = "running_model_type")
|
||||
private String runningModelType;
|
||||
|
||||
@Column(name = "m1_model_batch_id")
|
||||
private Long m1ModelBatchId;
|
||||
|
||||
@Column(name = "m2_model_batch_id")
|
||||
private Long m2ModelBatchId;
|
||||
|
||||
@Column(name = "m3_model_batch_id")
|
||||
private Long m3ModelBatchId;
|
||||
|
||||
@Column(name = "detect_end_cnt")
|
||||
private Long detectEndCnt;
|
||||
|
||||
@@ -124,24 +115,77 @@ public class MapSheetLearnEntity {
|
||||
@Column(name = "model_target_path")
|
||||
private String modelTargetPath;
|
||||
|
||||
@Column(name = "stage")
|
||||
private Integer stage;
|
||||
|
||||
/* ===================== M1 ===================== */
|
||||
|
||||
@Column(name = "m1_model_batch_id")
|
||||
private Long m1ModelBatchId;
|
||||
|
||||
@Column(name = "m1_model_start_dttm")
|
||||
private ZonedDateTime m1ModelStartDttm;
|
||||
|
||||
@Column(name = "m2_model_start_dttm")
|
||||
private ZonedDateTime m2ModelStartDttm;
|
||||
|
||||
@Column(name = "m3_model_start_dttm")
|
||||
private ZonedDateTime m3ModelStartDttm;
|
||||
|
||||
@Column(name = "m1_model_end_dttm")
|
||||
private ZonedDateTime m1ModelEndDttm;
|
||||
|
||||
@Column(name = "m1_pending_jobs", nullable = false)
|
||||
private int m1PendingJobs = 0;
|
||||
|
||||
@Column(name = "m1_running_jobs", nullable = false)
|
||||
private int m1RunningJobs = 0;
|
||||
|
||||
@Column(name = "m1_completed_jobs", nullable = false)
|
||||
private int m1CompletedJobs = 0;
|
||||
|
||||
@Column(name = "m1_failed_jobs", nullable = false)
|
||||
private int m1FailedJobs = 0;
|
||||
|
||||
/* ===================== M2 ===================== */
|
||||
|
||||
@Column(name = "m2_model_batch_id")
|
||||
private Long m2ModelBatchId;
|
||||
|
||||
@Column(name = "m2_model_start_dttm")
|
||||
private ZonedDateTime m2ModelStartDttm;
|
||||
|
||||
@Column(name = "m2_model_end_dttm")
|
||||
private ZonedDateTime m2ModelEndDttm;
|
||||
|
||||
@Column(name = "m2_pending_jobs", nullable = false)
|
||||
private int m2PendingJobs = 0;
|
||||
|
||||
@Column(name = "m2_running_jobs", nullable = false)
|
||||
private int m2RunningJobs = 0;
|
||||
|
||||
@Column(name = "m2_completed_jobs", nullable = false)
|
||||
private int m2CompletedJobs = 0;
|
||||
|
||||
@Column(name = "m2_failed_jobs", nullable = false)
|
||||
private int m2FailedJobs = 0;
|
||||
|
||||
/* ===================== M3 ===================== */
|
||||
@Column(name = "m3_model_batch_id")
|
||||
private Long m3ModelBatchId;
|
||||
|
||||
@Column(name = "m3_model_start_dttm")
|
||||
private ZonedDateTime m3ModelStartDttm;
|
||||
|
||||
@Column(name = "m3_model_end_dttm")
|
||||
private ZonedDateTime m3ModelEndDttm;
|
||||
|
||||
@Column(name = "m3_pending_jobs", nullable = false)
|
||||
private int m3PendingJobs = 0;
|
||||
|
||||
@Column(name = "m3_running_jobs", nullable = false)
|
||||
private int m3RunningJobs = 0;
|
||||
|
||||
@Column(name = "m3_completed_jobs", nullable = false)
|
||||
private int m3CompletedJobs = 0;
|
||||
|
||||
@Column(name = "m3_failed_jobs", nullable = false)
|
||||
private int m3FailedJobs = 0;
|
||||
|
||||
public InferenceResultDto.ResultList toDto() {
|
||||
return new InferenceResultDto.ResultList(
|
||||
this.uuid,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.kamco.cd.kamcoback.postgres.repository.Inference;
|
||||
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
|
||||
@@ -25,4 +26,8 @@ public interface MapSheetLearnRepositoryCustom {
|
||||
InferenceStatusDetailDto getInferenceStatus(UUID uuid);
|
||||
|
||||
UUID getProcessing();
|
||||
|
||||
Integer getLearnStage(Integer compareYear, Integer targetYear);
|
||||
|
||||
AnalResultInfo getInferenceResultInfo(String uuid);
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapShe
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QSystemMetricEntity.systemMetricEntity;
|
||||
|
||||
import com.kamco.cd.kamcoback.common.utils.DateRange;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
|
||||
@@ -239,4 +240,57 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
.where(mapSheetLearnEntity.status.eq("IN_PROGRESS"))
|
||||
.fetchOne();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getLearnStage(Integer compareYear, Integer targetYear) {
|
||||
MapSheetLearnEntity entity = new MapSheetLearnEntity();
|
||||
entity.setCompareYyyy(compareYear);
|
||||
entity.setTargetYyyy(targetYear);
|
||||
|
||||
Integer stage =
|
||||
queryFactory
|
||||
.select(mapSheetLearnEntity.stage)
|
||||
.from(mapSheetLearnEntity)
|
||||
.where(
|
||||
mapSheetLearnEntity
|
||||
.compareYyyy
|
||||
.eq(compareYear)
|
||||
.and(mapSheetLearnEntity.targetYyyy.eq(targetYear)))
|
||||
.orderBy(mapSheetLearnEntity.stage.desc())
|
||||
.limit(1)
|
||||
.fetchOne();
|
||||
|
||||
return stage == null ? 1 : stage + 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AnalResultInfo getInferenceResultInfo(String uuid) {
|
||||
QModelMngEntity m1 = new QModelMngEntity("m1");
|
||||
QModelMngEntity m2 = new QModelMngEntity("m2");
|
||||
QModelMngEntity m3 = new QModelMngEntity("m3");
|
||||
|
||||
return queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
AnalResultInfo.class,
|
||||
mapSheetLearnEntity.title,
|
||||
m1.modelVer,
|
||||
m2.modelVer,
|
||||
m3.modelVer,
|
||||
mapSheetLearnEntity.compareYyyy,
|
||||
mapSheetLearnEntity.targetYyyy,
|
||||
mapSheetLearnEntity.detectOption,
|
||||
mapSheetLearnEntity.mapSheetScope,
|
||||
mapSheetLearnEntity.inferStartDttm,
|
||||
mapSheetLearnEntity.inferEndDttm))
|
||||
.from(mapSheetLearnEntity)
|
||||
.leftJoin(m1)
|
||||
.on(mapSheetLearnEntity.m1ModelUuid.eq(m1.uuid))
|
||||
.leftJoin(m2)
|
||||
.on(mapSheetLearnEntity.m2ModelUuid.eq(m2.uuid))
|
||||
.leftJoin(m3)
|
||||
.on(mapSheetLearnEntity.m3ModelUuid.eq(m3.uuid))
|
||||
.where(mapSheetLearnEntity.uuid.eq(UUID.fromString(uuid)))
|
||||
.fetchOne();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,8 @@ import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveInfo;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.WorkHistoryDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.searchReq;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerStatistics;
|
||||
@@ -86,4 +88,8 @@ public interface LabelAllocateRepositoryCustom {
|
||||
|
||||
// 프로젝트 종료 여부 업데이트 (uuid 기반)
|
||||
void updateClosedYnByUuid(String uuid, String closedType, String closedYn);
|
||||
|
||||
Page<WorkHistoryDto> workLabelHistoryList(LabelAllocateDto.searchReq searchReq, String userId);
|
||||
|
||||
Page<WorkHistoryDto> workReviewerHistoryList(searchReq searchReq, String userId);
|
||||
}
|
||||
|
||||
@@ -18,6 +18,8 @@ import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveInfo;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveUserList;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.WorkHistoryDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.searchReq;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerStatistics;
|
||||
@@ -1457,4 +1459,216 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
|
||||
em.flush();
|
||||
em.clear();
|
||||
}
|
||||
|
||||
public Page<WorkHistoryDto> workLabelHistoryList(
|
||||
LabelAllocateDto.searchReq searchReq, String userId) {
|
||||
|
||||
NumberExpression<Long> totalCnt = labelingAssignmentEntity.assignmentUid.count();
|
||||
|
||||
NumberExpression<Long> skipCnt =
|
||||
new CaseBuilder()
|
||||
.when(labelingAssignmentEntity.workState.eq(LabelState.SKIP.getId()))
|
||||
.then(1L)
|
||||
.otherwise((Long) null)
|
||||
.count();
|
||||
|
||||
NumberExpression<Long> completeCnt =
|
||||
new CaseBuilder()
|
||||
.when(labelingAssignmentEntity.workState.eq(LabelState.DONE.getId()))
|
||||
.then(1L)
|
||||
.otherwise((Long) null)
|
||||
.count();
|
||||
|
||||
Pageable pageable = searchReq.toPageable();
|
||||
List<WorkHistoryDto> list =
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.bean(
|
||||
WorkHistoryDto.class,
|
||||
Expressions.stringTemplate(
|
||||
"concat({0}, '-', {1})",
|
||||
mapSheetAnalInferenceEntity.compareYyyy,
|
||||
mapSheetAnalInferenceEntity.targetYyyy)
|
||||
.as("changeDetectionYear"),
|
||||
mapSheetAnalInferenceEntity.stage.longValue().as("stage"),
|
||||
mapSheetAnalInferenceEntity.gukyuinApplyDttm.as("gukyuinApplyDttm"),
|
||||
totalCnt.as("assignedCnt"),
|
||||
completeCnt.as("completeCnt"),
|
||||
skipCnt.as("skipCnt"),
|
||||
mapSheetAnalInferenceEntity.createdDttm.as("createdDttm"),
|
||||
new CaseBuilder()
|
||||
.when(mapSheetAnalInferenceEntity.inspectionClosedYn.eq("Y"))
|
||||
.then(mapSheetAnalInferenceEntity.updatedDttm)
|
||||
.otherwise((ZonedDateTime) null)
|
||||
.as("projectCloseDttm")))
|
||||
.from(labelingAssignmentEntity)
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
|
||||
.where(labelingAssignmentEntity.workerUid.eq(userId))
|
||||
.groupBy(
|
||||
mapSheetAnalInferenceEntity.id,
|
||||
mapSheetAnalInferenceEntity.compareYyyy,
|
||||
mapSheetAnalInferenceEntity.targetYyyy,
|
||||
mapSheetAnalInferenceEntity.stage,
|
||||
mapSheetAnalInferenceEntity.gukyuinApplyDttm,
|
||||
mapSheetAnalInferenceEntity.createdDttm,
|
||||
mapSheetAnalInferenceEntity.inspectionClosedYn,
|
||||
mapSheetAnalInferenceEntity.updatedDttm)
|
||||
.orderBy(
|
||||
// 진행중인 작업이 최상단 (remainCnt > 0)
|
||||
new CaseBuilder()
|
||||
.when(totalCnt.subtract(completeCnt).subtract(skipCnt).gt(0L))
|
||||
.then(0)
|
||||
.otherwise(1)
|
||||
.asc(),
|
||||
// 최신 작업순 (반영일 기준)
|
||||
mapSheetAnalInferenceEntity.gukyuinApplyDttm.desc())
|
||||
.offset(pageable.getOffset())
|
||||
.limit(pageable.getPageSize())
|
||||
.fetch();
|
||||
|
||||
// rowNum, remainCnt, percent, status를 Java에서 계산
|
||||
int startRow = (int) pageable.getOffset() + 1;
|
||||
for (int i = 0; i < list.size(); i++) {
|
||||
WorkHistoryDto dto = list.get(i);
|
||||
dto.setRowNum(startRow + i);
|
||||
|
||||
// remainCnt 계산
|
||||
Long assigned = dto.getAssignedCnt() != null ? dto.getAssignedCnt() : 0L;
|
||||
Long complete = dto.getCompleteCnt() != null ? dto.getCompleteCnt() : 0L;
|
||||
Long skip = dto.getSkipCnt() != null ? dto.getSkipCnt() : 0L;
|
||||
dto.setRemainCnt(assigned - complete - skip);
|
||||
|
||||
// percent 계산
|
||||
if (assigned > 0) {
|
||||
dto.setPercent(Math.round((double) complete / assigned * 100.0 * 100.0) / 100.0);
|
||||
} else {
|
||||
dto.setPercent(0.0);
|
||||
}
|
||||
|
||||
// status 계산 (잔여건수가 0이고 진행률이 100%면 "완료", 아니면 "진행중")
|
||||
if (dto.getRemainCnt() == 0 && dto.getPercent() >= 100.0) {
|
||||
dto.setStatus("완료");
|
||||
} else {
|
||||
dto.setStatus("진행중");
|
||||
}
|
||||
}
|
||||
|
||||
Long countQuery =
|
||||
queryFactory
|
||||
.select(mapSheetAnalInferenceEntity.id.countDistinct())
|
||||
.from(labelingAssignmentEntity)
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
|
||||
.where(labelingAssignmentEntity.workerUid.eq(userId))
|
||||
.fetchOne();
|
||||
|
||||
return new PageImpl<>(list, pageable, countQuery != null ? countQuery : 0L);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Page<WorkHistoryDto> workReviewerHistoryList(searchReq searchReq, String userId) {
|
||||
|
||||
NumberExpression<Long> totalCnt = labelingAssignmentEntity.assignmentUid.count();
|
||||
|
||||
NumberExpression<Long> skipCnt =
|
||||
new CaseBuilder()
|
||||
.when(labelingAssignmentEntity.inspectState.eq(InspectState.EXCEPT.getId()))
|
||||
.then(1L)
|
||||
.otherwise((Long) null)
|
||||
.count();
|
||||
|
||||
NumberExpression<Long> completeCnt =
|
||||
new CaseBuilder()
|
||||
.when(labelingAssignmentEntity.inspectState.eq(InspectState.COMPLETE.getId()))
|
||||
.then(1L)
|
||||
.otherwise((Long) null)
|
||||
.count();
|
||||
|
||||
Pageable pageable = searchReq.toPageable();
|
||||
List<WorkHistoryDto> list =
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.bean(
|
||||
WorkHistoryDto.class,
|
||||
Expressions.stringTemplate(
|
||||
"concat({0}, '-', {1})",
|
||||
mapSheetAnalInferenceEntity.compareYyyy,
|
||||
mapSheetAnalInferenceEntity.targetYyyy)
|
||||
.as("changeDetectionYear"),
|
||||
mapSheetAnalInferenceEntity.stage.longValue().as("stage"),
|
||||
mapSheetAnalInferenceEntity.gukyuinApplyDttm.as("gukyuinApplyDttm"),
|
||||
totalCnt.as("assignedCnt"),
|
||||
completeCnt.as("completeCnt"),
|
||||
skipCnt.as("skipCnt"),
|
||||
mapSheetAnalInferenceEntity.createdDttm.as("createdDttm"),
|
||||
new CaseBuilder()
|
||||
.when(mapSheetAnalInferenceEntity.inspectionClosedYn.eq("Y"))
|
||||
.then(mapSheetAnalInferenceEntity.updatedDttm)
|
||||
.otherwise((ZonedDateTime) null)
|
||||
.as("projectCloseDttm")))
|
||||
.from(labelingAssignmentEntity)
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
|
||||
.where(labelingAssignmentEntity.inspectorUid.eq(userId))
|
||||
.groupBy(
|
||||
mapSheetAnalInferenceEntity.id,
|
||||
mapSheetAnalInferenceEntity.compareYyyy,
|
||||
mapSheetAnalInferenceEntity.targetYyyy,
|
||||
mapSheetAnalInferenceEntity.stage,
|
||||
mapSheetAnalInferenceEntity.gukyuinApplyDttm,
|
||||
mapSheetAnalInferenceEntity.createdDttm,
|
||||
mapSheetAnalInferenceEntity.inspectionClosedYn,
|
||||
mapSheetAnalInferenceEntity.updatedDttm)
|
||||
.orderBy(
|
||||
// 진행중인 작업이 최상단 (remainCnt > 0)
|
||||
new CaseBuilder()
|
||||
.when(totalCnt.subtract(completeCnt).subtract(skipCnt).gt(0L))
|
||||
.then(0)
|
||||
.otherwise(1)
|
||||
.asc(),
|
||||
// 최신 작업순 (반영일 기준)
|
||||
mapSheetAnalInferenceEntity.gukyuinApplyDttm.desc())
|
||||
.offset(pageable.getOffset())
|
||||
.limit(pageable.getPageSize())
|
||||
.fetch();
|
||||
|
||||
// rowNum, remainCnt, percent, status를 Java에서 계산
|
||||
int startRow = (int) pageable.getOffset() + 1;
|
||||
for (int i = 0; i < list.size(); i++) {
|
||||
WorkHistoryDto dto = list.get(i);
|
||||
dto.setRowNum(startRow + i);
|
||||
|
||||
// remainCnt 계산
|
||||
Long assigned = dto.getAssignedCnt() != null ? dto.getAssignedCnt() : 0L;
|
||||
Long complete = dto.getCompleteCnt() != null ? dto.getCompleteCnt() : 0L;
|
||||
Long skip = dto.getSkipCnt() != null ? dto.getSkipCnt() : 0L;
|
||||
dto.setRemainCnt(assigned - complete - skip);
|
||||
|
||||
// percent 계산
|
||||
if (assigned > 0) {
|
||||
dto.setPercent(Math.round((double) complete / assigned * 100.0 * 100.0) / 100.0);
|
||||
} else {
|
||||
dto.setPercent(0.0);
|
||||
}
|
||||
|
||||
// status 계산 (잔여건수가 0이고 진행률이 100%면 "완료", 아니면 "진행중")
|
||||
if (dto.getRemainCnt() == 0 && dto.getPercent() >= 100.0) {
|
||||
dto.setStatus("완료");
|
||||
} else {
|
||||
dto.setStatus("진행중");
|
||||
}
|
||||
}
|
||||
|
||||
Long countQuery =
|
||||
queryFactory
|
||||
.select(mapSheetAnalInferenceEntity.id.countDistinct())
|
||||
.from(labelingAssignmentEntity)
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
|
||||
.where(labelingAssignmentEntity.inspectorUid.eq(userId))
|
||||
.fetchOne();
|
||||
|
||||
return new PageImpl<>(list, pageable, countQuery != null ? countQuery : 0L);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.kamco.cd.kamcoback.postgres.repository.label;
|
||||
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InspectState;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMng;
|
||||
@@ -204,7 +205,25 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom {
|
||||
// totalAssignmentCnt: 총 배정 건수 (서브쿼리)
|
||||
totalAssignmentCntSubQuery,
|
||||
mapSheetAnalInferenceEntity.labelingClosedYn,
|
||||
mapSheetAnalInferenceEntity.inspectionClosedYn))
|
||||
mapSheetAnalInferenceEntity.inspectionClosedYn,
|
||||
new CaseBuilder()
|
||||
.when(
|
||||
mapSheetAnalDataInferenceGeomEntity.testState.eq(
|
||||
InspectState.COMPLETE.getId()))
|
||||
.then(1L)
|
||||
.otherwise(0L)
|
||||
.sum(),
|
||||
new CaseBuilder()
|
||||
.when(
|
||||
mapSheetAnalDataInferenceGeomEntity.testState.eq(
|
||||
InspectState.UNCONFIRM.getId()))
|
||||
.then(1L)
|
||||
.otherwise(0L)
|
||||
.sum(),
|
||||
new CaseBuilder()
|
||||
.when(mapSheetAnalInferenceEntity.inspectionClosedYn.eq("Y"))
|
||||
.then(mapSheetAnalInferenceEntity.updatedDttm)
|
||||
.otherwise((ZonedDateTime) null)))
|
||||
.from(mapSheetAnalInferenceEntity)
|
||||
.innerJoin(mapSheetAnalDataInferenceEntity)
|
||||
.on(whereSubDataBuilder)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.kamco.cd.kamcoback.postgres.repository.mapsheet;
|
||||
|
||||
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListCompareDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListDto;
|
||||
@@ -61,7 +62,7 @@ public interface MapSheetMngRepositoryCustom {
|
||||
|
||||
List<MapSheetMngDto.MngFilesDto> findByHstUidMapSheetFileList(Long hstUid);
|
||||
|
||||
List<MngListDto> findByHstMapSheetTargetList(int mngYyyy);
|
||||
List<MngListDto> findByHstMapSheetTargetList(InferenceResultDto.RegReq req);
|
||||
|
||||
List<MngListDto> findByHstMapSheetTargetList(int mngYyyy, List<String> mapIds);
|
||||
|
||||
|
||||
@@ -5,8 +5,11 @@ import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngEntity.mapSheet
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngFileEntity.mapSheetMngFileEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngHstEntity.mapSheetMngHstEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QYearEntity.yearEntity;
|
||||
import static com.querydsl.core.types.dsl.Expressions.nullExpression;
|
||||
|
||||
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListCompareDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListDto;
|
||||
@@ -478,23 +481,57 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
|
||||
return foundContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* 기준년도 추론 실행 가능 도엽 조회
|
||||
*
|
||||
* @param req
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public List<MngListDto> findByHstMapSheetTargetList(int mngYyyy) {
|
||||
public List<MngListDto> findByHstMapSheetTargetList(InferenceResultDto.RegReq req) {
|
||||
BooleanBuilder whereBuilder = new BooleanBuilder();
|
||||
|
||||
whereBuilder.and(mapSheetMngHstEntity.dataState.eq("DONE"));
|
||||
whereBuilder.and(
|
||||
mapSheetMngHstEntity
|
||||
.syncState
|
||||
.eq("DONE")
|
||||
.or(mapSheetMngHstEntity.syncCheckState.eq("DONE")));
|
||||
whereBuilder.and(mapSheetMngHstEntity.useInference.eq("USE"));
|
||||
|
||||
whereBuilder.and(mapSheetMngHstEntity.mngYyyy.eq(req.getTargetYyyy()));
|
||||
|
||||
BooleanBuilder likeBuilder = new BooleanBuilder();
|
||||
|
||||
if (MapSheetScope.PART.getId().equals(req.getMapSheetScope())) {
|
||||
List<String> list = req.getMapSheetNum();
|
||||
if (list == null || list.isEmpty()) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
for (String prefix : list) {
|
||||
if (prefix == null || prefix.isBlank()) {
|
||||
continue;
|
||||
}
|
||||
likeBuilder.or(mapSheetMngHstEntity.mapSheetNum.like(prefix.trim() + "%"));
|
||||
}
|
||||
}
|
||||
|
||||
if (likeBuilder.hasValue()) {
|
||||
whereBuilder.and(likeBuilder);
|
||||
}
|
||||
|
||||
return queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
MngListDto.class,
|
||||
mapSheetMngHstEntity.mngYyyy,
|
||||
mapSheetMngHstEntity.mapSheetNum,
|
||||
mapSheetMngHstEntity.mapSheetName))
|
||||
mapSheetMngHstEntity.mapSheetName,
|
||||
nullExpression(String.class),
|
||||
nullExpression(Boolean.class)))
|
||||
.from(mapSheetMngHstEntity)
|
||||
.where(
|
||||
mapSheetMngHstEntity
|
||||
.mngYyyy
|
||||
.eq(mngYyyy)
|
||||
.and(mapSheetMngHstEntity.dataState.eq("DONE"))
|
||||
.and(mapSheetMngHstEntity.syncState.eq("DONE"))
|
||||
.and(mapSheetMngHstEntity.useInference.eq("USE")))
|
||||
.where(whereBuilder)
|
||||
.fetch();
|
||||
}
|
||||
|
||||
@@ -519,6 +556,13 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
|
||||
.fetch();
|
||||
}
|
||||
|
||||
/**
|
||||
* 변화탐지 실행 가능 비교년도 조회
|
||||
*
|
||||
* @param mngYyyy
|
||||
* @param mapIds
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public List<MngListCompareDto> findByHstMapSheetCompareList(int mngYyyy, List<String> mapIds) {
|
||||
|
||||
|
||||
@@ -22,7 +22,4 @@ public interface MapInkx5kRepositoryCustom {
|
||||
|
||||
Page<MapInkx5kEntity> getSceneListByPage(
|
||||
CommonUseStatus useInference, String searchVal, searchReq searchReq);
|
||||
|
||||
List<MapInkx5kEntity> findByMapSheetNumInAndUseInference(
|
||||
List<String> mapSheetNums, CommonUseStatus use);
|
||||
}
|
||||
|
||||
@@ -129,19 +129,6 @@ public class MapInkx5kRepositoryImpl extends QuerydslRepositorySupport
|
||||
return new PageImpl<>(content, pageable, count);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<MapInkx5kEntity> findByMapSheetNumInAndUseInference(
|
||||
List<String> mapSheetNums, CommonUseStatus use) {
|
||||
if (mapSheetNums == null || mapSheetNums.isEmpty()) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
return queryFactory
|
||||
.selectFrom(mapInkx5kEntity)
|
||||
.where(mapInkx5kEntity.mapidcdNo.in(mapSheetNums), mapInkx5kEntity.useInference.eq(use))
|
||||
.fetch();
|
||||
}
|
||||
|
||||
private BooleanExpression searchUseInference(CommonUseStatus useInference) {
|
||||
if (Objects.isNull(useInference)) {
|
||||
return null;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.kamco.cd.kamcoback.postgres.repository.trainingdata;
|
||||
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.DefaultPaging;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.DetailRes;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.GeoFeatureRequest.Properties;
|
||||
@@ -31,4 +32,9 @@ public interface TrainingDataLabelRepositoryCustom {
|
||||
DetailRes getDetail(UUID assignmentUid);
|
||||
|
||||
DefaultPaging getDefaultPagingNumber(String userId, Integer size, String assignmentUid);
|
||||
|
||||
void saveNewPolygon(TrainingDataLabelDto.NewPolygonRequest request);
|
||||
|
||||
TrainingDataLabelDto.CogImageResponse getCogImageUrl(
|
||||
String mapSheetNum, Integer beforeYear, Integer afterYear);
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.ChangeDetectionInfo;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.ClassificationInfo;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.DefaultPaging;
|
||||
@@ -519,8 +520,8 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
|
||||
}
|
||||
}
|
||||
|
||||
// 7. 라벨링 저장한 Geometry를 GeoJSON으로 변환
|
||||
LearnDataGeometry learnData =
|
||||
// 7. 라벨링 저장한 Geometry들을 GeoJSON으로 변환 (여러 개 가능)
|
||||
List<LearnDataGeometry> learnDataList =
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
@@ -536,7 +537,7 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
|
||||
.where(
|
||||
mapSheetLearnDataGeomEntity.geoUid.eq(
|
||||
mapSheetAnalDataInferenceGeomEntityEntity.getGeoUid()))
|
||||
.fetchOne();
|
||||
.fetch(); // fetchOne() -> fetch()로 변경
|
||||
|
||||
return DetailRes.builder()
|
||||
.assignmentUid(assignmentUid)
|
||||
@@ -546,7 +547,7 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
|
||||
.beforeCogUrl(beforeCogUrl)
|
||||
.afterCogUrl(afterCogUrl)
|
||||
.mapBox(mapBbox)
|
||||
.learnGeometry(learnData)
|
||||
.learnGeometries(learnDataList) // learnGeometry -> learnGeometries
|
||||
.build();
|
||||
|
||||
} catch (Exception e) {
|
||||
@@ -624,6 +625,183 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
|
||||
return DefaultPaging.builder().page(page).assignmentUid(firstAssignedUid).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void saveNewPolygon(TrainingDataLabelDto.NewPolygonRequest request) {
|
||||
try {
|
||||
if (request.getFeatures() == null || request.getFeatures().isEmpty()) {
|
||||
throw new RuntimeException("No polygons to save");
|
||||
}
|
||||
|
||||
System.out.println(
|
||||
"Saving "
|
||||
+ request.getFeatures().size()
|
||||
+ " new polygon(s) for mapSheetNum: "
|
||||
+ request.getMapSheetNum());
|
||||
|
||||
// mapSheetNum을 Long으로 변환
|
||||
Long mapSheetNumLong = null;
|
||||
try {
|
||||
if (request.getMapSheetNum() != null && !request.getMapSheetNum().isEmpty()) {
|
||||
mapSheetNumLong = Long.parseLong(request.getMapSheetNum());
|
||||
}
|
||||
} catch (NumberFormatException e) {
|
||||
System.err.println("Invalid mapSheetNum format: " + request.getMapSheetNum());
|
||||
}
|
||||
|
||||
int savedCount = 0;
|
||||
for (TrainingDataLabelDto.NewPolygonRequest.PolygonFeature feature : request.getFeatures()) {
|
||||
try {
|
||||
// 1. map_sheet_anal_data_inference_geom 테이블에 새 polygon 삽입
|
||||
queryFactory
|
||||
.insert(mapSheetAnalDataInferenceGeomEntity)
|
||||
.columns(
|
||||
mapSheetAnalDataInferenceGeomEntity.geom,
|
||||
mapSheetAnalDataInferenceGeomEntity.compareYyyy,
|
||||
mapSheetAnalDataInferenceGeomEntity.targetYyyy,
|
||||
mapSheetAnalDataInferenceGeomEntity.classBeforeCd,
|
||||
mapSheetAnalDataInferenceGeomEntity.classAfterCd,
|
||||
mapSheetAnalDataInferenceGeomEntity.dataUid,
|
||||
mapSheetAnalDataInferenceGeomEntity.mapSheetNum,
|
||||
mapSheetAnalDataInferenceGeomEntity.createdDttm,
|
||||
mapSheetAnalDataInferenceGeomEntity.updatedDttm,
|
||||
mapSheetAnalDataInferenceGeomEntity.labelState)
|
||||
.values(
|
||||
feature.getGeometry(),
|
||||
request.getCompareYyyy(),
|
||||
request.getTargetYyyy(),
|
||||
feature.getProperties().getBeforeClass().toLowerCase(),
|
||||
feature.getProperties().getAfterClass().toLowerCase(),
|
||||
request.getAnalUid(),
|
||||
mapSheetNumLong,
|
||||
ZonedDateTime.now(),
|
||||
ZonedDateTime.now(),
|
||||
"DONE")
|
||||
.execute();
|
||||
|
||||
// 2. 생성된 geoUid 조회 (가장 최근에 삽입된 레코드)
|
||||
Long geoUid =
|
||||
queryFactory
|
||||
.select(mapSheetAnalDataInferenceGeomEntity.geoUid)
|
||||
.from(mapSheetAnalDataInferenceGeomEntity)
|
||||
.where(
|
||||
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(request.getCompareYyyy()),
|
||||
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(request.getTargetYyyy()),
|
||||
mapSheetAnalDataInferenceGeomEntity.classBeforeCd.eq(
|
||||
feature.getProperties().getBeforeClass().toLowerCase()),
|
||||
mapSheetAnalDataInferenceGeomEntity.classAfterCd.eq(
|
||||
feature.getProperties().getAfterClass().toLowerCase()))
|
||||
.orderBy(mapSheetAnalDataInferenceGeomEntity.geoUid.desc())
|
||||
.fetchFirst();
|
||||
|
||||
if (geoUid == null) {
|
||||
System.err.println("Failed to get geo_uid for polygon #" + (savedCount + 1));
|
||||
continue;
|
||||
}
|
||||
|
||||
// 3. learn_data_geom 테이블에도 삽입
|
||||
queryFactory
|
||||
.insert(mapSheetLearnDataGeomEntity)
|
||||
.columns(
|
||||
mapSheetLearnDataGeomEntity.geoUid,
|
||||
mapSheetLearnDataGeomEntity.afterYyyy,
|
||||
mapSheetLearnDataGeomEntity.beforeYyyy,
|
||||
mapSheetLearnDataGeomEntity.classAfterCd,
|
||||
mapSheetLearnDataGeomEntity.classBeforeCd,
|
||||
mapSheetLearnDataGeomEntity.geom,
|
||||
mapSheetLearnDataGeomEntity.createdDate,
|
||||
mapSheetLearnDataGeomEntity.modifiedDate)
|
||||
.values(
|
||||
geoUid,
|
||||
request.getTargetYyyy(),
|
||||
request.getCompareYyyy(),
|
||||
feature.getProperties().getAfterClass().toLowerCase(),
|
||||
feature.getProperties().getBeforeClass().toLowerCase(),
|
||||
feature.getGeometry(),
|
||||
ZonedDateTime.now(),
|
||||
ZonedDateTime.now())
|
||||
.execute();
|
||||
|
||||
savedCount++;
|
||||
System.out.println(
|
||||
"Successfully saved polygon #" + savedCount + " with geo_uid: " + geoUid);
|
||||
|
||||
} catch (Exception e) {
|
||||
System.err.println("Error saving polygon #" + (savedCount + 1) + ": " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
// 개별 polygon 저장 실패해도 계속 진행
|
||||
}
|
||||
}
|
||||
|
||||
if (savedCount == 0) {
|
||||
throw new RuntimeException("Failed to save any polygons");
|
||||
}
|
||||
|
||||
System.out.println(
|
||||
"Successfully saved " + savedCount + "/" + request.getFeatures().size() + " polygon(s)");
|
||||
|
||||
} catch (Exception e) {
|
||||
System.err.println("saveNewPolygon Error: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
throw new RuntimeException("Failed to save new polygons", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public TrainingDataLabelDto.CogImageResponse getCogImageUrl(
|
||||
String mapSheetNum, Integer beforeYear, Integer afterYear) {
|
||||
try {
|
||||
// beforeYear COG URL 조회
|
||||
String beforeCogUrl =
|
||||
queryFactory
|
||||
.select(
|
||||
Expressions.stringTemplate(
|
||||
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
|
||||
.from(imageryEntity)
|
||||
.where(imageryEntity.scene5k.eq(mapSheetNum), imageryEntity.year.eq(beforeYear))
|
||||
.fetchFirst();
|
||||
|
||||
// afterYear COG URL 조회
|
||||
String afterCogUrl =
|
||||
queryFactory
|
||||
.select(
|
||||
Expressions.stringTemplate(
|
||||
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
|
||||
.from(imageryEntity)
|
||||
.where(imageryEntity.scene5k.eq(mapSheetNum), imageryEntity.year.eq(afterYear))
|
||||
.fetchFirst();
|
||||
|
||||
if (beforeCogUrl == null && afterCogUrl == null) {
|
||||
throw new RuntimeException(
|
||||
"COG images not found for mapSheetNum: "
|
||||
+ mapSheetNum
|
||||
+ ", years: "
|
||||
+ beforeYear
|
||||
+ ", "
|
||||
+ afterYear);
|
||||
}
|
||||
|
||||
return TrainingDataLabelDto.CogImageResponse.builder()
|
||||
.beforeCogUrl(beforeCogUrl != null ? beforeCogUrl : "")
|
||||
.afterCogUrl(afterCogUrl != null ? afterCogUrl : "")
|
||||
.beforeYear(beforeYear)
|
||||
.afterYear(afterYear)
|
||||
.mapSheetNum(mapSheetNum)
|
||||
.build();
|
||||
|
||||
} catch (Exception e) {
|
||||
System.err.println("getCogImageUrl Error: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
throw new RuntimeException(
|
||||
"Failed to get COG image URLs for mapSheetNum: "
|
||||
+ mapSheetNum
|
||||
+ ", years: "
|
||||
+ beforeYear
|
||||
+ ", "
|
||||
+ afterYear,
|
||||
e);
|
||||
}
|
||||
}
|
||||
|
||||
private StringExpression makeCogUrl(NumberPath<Integer> year) {
|
||||
return new CaseBuilder()
|
||||
.when(imageryEntity.year.eq(year))
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
package com.kamco.cd.kamcoback.postgres.repository.trainingdata;
|
||||
|
||||
import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
|
||||
public interface TrainingDataReviewRepository
|
||||
extends JpaRepository<LabelingAssignmentEntity, Long>, TrainingDataReviewRepositoryCustom {}
|
||||
@@ -0,0 +1,40 @@
|
||||
package com.kamco.cd.kamcoback.postgres.repository.trainingdata;
|
||||
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DefaultPaging;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DetailRes;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.GeoFeatureRequest.Properties;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewGeometryInfo;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewListDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.SummaryRes;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.searchReq;
|
||||
import java.util.UUID;
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
import org.springframework.data.domain.Page;
|
||||
|
||||
public interface TrainingDataReviewRepositoryCustom {
|
||||
|
||||
Page<ReviewListDto> findReviewAssignedList(searchReq searchReq, String userId);
|
||||
|
||||
ReviewGeometryInfo findReviewAssignedGeom(String operatorUid);
|
||||
|
||||
Long findReviewOperatorGeoUid(String operatorUid);
|
||||
|
||||
void updateReviewStateOperator(String operatorUid, String status, String memo);
|
||||
|
||||
void updateReviewExceptState(Long inferenceGeomUid, String status);
|
||||
|
||||
void updateReviewPolygonClass(
|
||||
Long inferenceGeomUid, Geometry geometry, Properties properties, String status);
|
||||
|
||||
SummaryRes getSummary(String userId);
|
||||
|
||||
DetailRes getDetail(UUID operatorUid);
|
||||
|
||||
DefaultPaging getDefaultPagingNumber(String userId, Integer size, String operatorUid);
|
||||
|
||||
void saveNewPolygon(TrainingDataReviewDto.NewPolygonRequest request);
|
||||
|
||||
TrainingDataReviewDto.CogImageResponse getCogImageUrl(
|
||||
String mapSheetNum, Integer beforeYear, Integer afterYear);
|
||||
}
|
||||
@@ -0,0 +1,860 @@
|
||||
package com.kamco.cd.kamcoback.postgres.repository.trainingdata;
|
||||
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QImageryEntity.imageryEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity.mapInkx5kEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnDataGeomEntity.mapSheetLearnDataGeomEntity;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InspectState;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ChangeDetectionInfo;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ClassificationInfo;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DefaultPaging;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DetailRes;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.GeoFeatureRequest.Properties;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.InferenceDataGeometry;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.InferenceDataGeometry.InferenceProperties;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.InspectionResultInfo;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.LearnDataGeometry;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.LearnDataGeometry.LearnProperties;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewGeometryInfo;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewListDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.SummaryRes;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.searchReq;
|
||||
import com.querydsl.core.Tuple;
|
||||
import com.querydsl.core.types.Projections;
|
||||
import com.querydsl.core.types.dsl.BooleanExpression;
|
||||
import com.querydsl.core.types.dsl.CaseBuilder;
|
||||
import com.querydsl.core.types.dsl.Expressions;
|
||||
import com.querydsl.core.types.dsl.NumberPath;
|
||||
import com.querydsl.core.types.dsl.StringExpression;
|
||||
import com.querydsl.jpa.impl.JPAQueryFactory;
|
||||
import jakarta.persistence.EntityNotFoundException;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
|
||||
|
||||
@Slf4j
|
||||
public class TrainingDataReviewRepositoryImpl extends QuerydslRepositorySupport
|
||||
implements TrainingDataReviewRepositoryCustom {
|
||||
|
||||
private final JPAQueryFactory queryFactory;
|
||||
|
||||
public TrainingDataReviewRepositoryImpl(JPAQueryFactory queryFactory) {
|
||||
super(LabelingAssignmentEntity.class);
|
||||
this.queryFactory = queryFactory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Page<ReviewListDto> findReviewAssignedList(searchReq searchReq, String userId) {
|
||||
|
||||
// 완료된 검수는 오늘만, 나머지는 전체 조회
|
||||
LocalDate today = LocalDate.now(ZoneId.of("Asia/Seoul"));
|
||||
ZonedDateTime start = today.atStartOfDay(ZoneId.of("Asia/Seoul"));
|
||||
ZonedDateTime end = start.plusDays(1);
|
||||
|
||||
BooleanExpression doneToday =
|
||||
labelingAssignmentEntity
|
||||
.inspectState
|
||||
.eq(InspectState.COMPLETE.getId())
|
||||
.and(labelingAssignmentEntity.inspectStatDttm.goe(start))
|
||||
.and(labelingAssignmentEntity.inspectStatDttm.lt(end));
|
||||
|
||||
BooleanExpression unconfirmOrExcept =
|
||||
labelingAssignmentEntity.inspectState.in(
|
||||
InspectState.EXCEPT.getId(), InspectState.UNCONFIRM.getId());
|
||||
|
||||
BooleanExpression dayStateCondition = doneToday.or(unconfirmOrExcept);
|
||||
|
||||
Pageable pageable = PageRequest.of(searchReq.getPage(), searchReq.getSize());
|
||||
List<ReviewListDto> list =
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
ReviewListDto.class,
|
||||
labelingAssignmentEntity.assignmentUid,
|
||||
labelingAssignmentEntity.inferenceGeomUid,
|
||||
labelingAssignmentEntity.inspectorUid,
|
||||
labelingAssignmentEntity.inspectState,
|
||||
labelingAssignmentEntity.assignGroupId,
|
||||
mapInkx5kEntity.mapidNm,
|
||||
mapSheetAnalDataInferenceGeomEntity.pnu))
|
||||
.from(labelingAssignmentEntity)
|
||||
.innerJoin(mapSheetAnalDataInferenceGeomEntity)
|
||||
.on(
|
||||
labelingAssignmentEntity.inferenceGeomUid.eq(
|
||||
mapSheetAnalDataInferenceGeomEntity.geoUid))
|
||||
.innerJoin(mapInkx5kEntity)
|
||||
.on(labelingAssignmentEntity.assignGroupId.eq(mapInkx5kEntity.mapidcdNo))
|
||||
.where(
|
||||
labelingAssignmentEntity.inspectorUid.eq(userId),
|
||||
dayStateCondition,
|
||||
labelingAssignmentEntity.workState.eq("DONE")) // 라벨링 완료된 것만 검수 대상
|
||||
.offset(pageable.getOffset())
|
||||
.limit(pageable.getPageSize())
|
||||
.orderBy(
|
||||
labelingAssignmentEntity.createdDate.asc(),
|
||||
labelingAssignmentEntity.inferenceGeomUid.asc())
|
||||
.fetch();
|
||||
|
||||
Long count =
|
||||
Optional.ofNullable(
|
||||
queryFactory
|
||||
.select(labelingAssignmentEntity.assignmentUid.count())
|
||||
.from(labelingAssignmentEntity)
|
||||
.innerJoin(mapSheetAnalDataInferenceGeomEntity)
|
||||
.on(
|
||||
labelingAssignmentEntity.inferenceGeomUid.eq(
|
||||
mapSheetAnalDataInferenceGeomEntity.geoUid))
|
||||
.innerJoin(mapInkx5kEntity)
|
||||
.on(labelingAssignmentEntity.assignGroupId.eq(mapInkx5kEntity.mapidcdNo))
|
||||
.where(
|
||||
labelingAssignmentEntity.inspectorUid.eq(userId),
|
||||
dayStateCondition,
|
||||
labelingAssignmentEntity.workState.eq("DONE"))
|
||||
.fetchOne())
|
||||
.orElse(0L);
|
||||
|
||||
return new PageImpl<>(list, pageable, count);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReviewGeometryInfo findReviewAssignedGeom(String operatorUid) {
|
||||
return queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
ReviewGeometryInfo.class,
|
||||
labelingAssignmentEntity.assignmentUid,
|
||||
labelingAssignmentEntity.inferenceGeomUid,
|
||||
Expressions.stringTemplate(
|
||||
"ST_AsGeoJSON({0})", mapSheetAnalDataInferenceGeomEntity.geom),
|
||||
makeCogUrl(mapSheetAnalDataInferenceGeomEntity.compareYyyy)
|
||||
.max()
|
||||
.as("beforeCogUrl"),
|
||||
makeCogUrl(mapSheetAnalDataInferenceGeomEntity.targetYyyy).max().as("afterCogUrl"),
|
||||
Expressions.stringTemplate("ST_AsGeoJSON({0})", mapInkx5kEntity.geom)))
|
||||
.from(labelingAssignmentEntity)
|
||||
.innerJoin(mapSheetAnalDataInferenceGeomEntity)
|
||||
.on(
|
||||
labelingAssignmentEntity.inferenceGeomUid.eq(
|
||||
mapSheetAnalDataInferenceGeomEntity.geoUid))
|
||||
.innerJoin(mapInkx5kEntity)
|
||||
.on(labelingAssignmentEntity.assignGroupId.eq(mapInkx5kEntity.mapidcdNo))
|
||||
.leftJoin(imageryEntity)
|
||||
.on(
|
||||
imageryEntity
|
||||
.scene5k
|
||||
.eq(labelingAssignmentEntity.assignGroupId)
|
||||
.and(
|
||||
imageryEntity
|
||||
.year
|
||||
.eq(mapSheetAnalDataInferenceGeomEntity.compareYyyy)
|
||||
.or(imageryEntity.year.eq(mapSheetAnalDataInferenceGeomEntity.targetYyyy))))
|
||||
.where(labelingAssignmentEntity.assignmentUid.eq(UUID.fromString(operatorUid)))
|
||||
.groupBy(
|
||||
labelingAssignmentEntity.assignmentUid,
|
||||
labelingAssignmentEntity.inferenceGeomUid,
|
||||
labelingAssignmentEntity.inspectorUid,
|
||||
labelingAssignmentEntity.inspectState,
|
||||
labelingAssignmentEntity.assignGroupId,
|
||||
mapInkx5kEntity.mapidNm,
|
||||
mapSheetAnalDataInferenceGeomEntity.pnu,
|
||||
mapSheetAnalDataInferenceGeomEntity.geom,
|
||||
mapInkx5kEntity.geom)
|
||||
.fetchOne();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long findReviewOperatorGeoUid(String operatorUid) {
|
||||
return queryFactory
|
||||
.select(labelingAssignmentEntity.inferenceGeomUid)
|
||||
.from(labelingAssignmentEntity)
|
||||
.where(labelingAssignmentEntity.assignmentUid.eq(UUID.fromString(operatorUid)))
|
||||
.fetchOne();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateReviewStateOperator(String operatorUid, String status, String memo) {
|
||||
queryFactory
|
||||
.update(labelingAssignmentEntity)
|
||||
.set(labelingAssignmentEntity.inspectState, status)
|
||||
.set(labelingAssignmentEntity.inspectStatDttm, ZonedDateTime.now())
|
||||
.where(labelingAssignmentEntity.assignmentUid.eq(UUID.fromString(operatorUid)))
|
||||
.execute();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateReviewExceptState(Long inferenceGeomUid, String status) {
|
||||
// 검수 제외 시 assignment 테이블만 업데이트
|
||||
// (inference_geom 테이블에는 inspect_state 컬럼이 없음)
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateReviewPolygonClass(
|
||||
Long inferenceGeomUid, Geometry geometry, Properties properties, String status) {
|
||||
// inference_geom 테이블 정보 가져오기
|
||||
MapSheetAnalDataInferenceGeomEntity entity =
|
||||
queryFactory
|
||||
.selectFrom(mapSheetAnalDataInferenceGeomEntity)
|
||||
.where(mapSheetAnalDataInferenceGeomEntity.geoUid.eq(inferenceGeomUid))
|
||||
.fetchOne();
|
||||
|
||||
if (Objects.isNull(entity)) {
|
||||
throw new EntityNotFoundException(
|
||||
"MapSheetAnalDataInferenceGeomEntity not found for inferenceGeomUid: "
|
||||
+ inferenceGeomUid);
|
||||
}
|
||||
|
||||
// learn_data_geom 업데이트 또는 insert (검수 완료된 폴리곤 수정)
|
||||
Long existingLearnGeomUid =
|
||||
queryFactory
|
||||
.select(mapSheetLearnDataGeomEntity.geoUid)
|
||||
.from(mapSheetLearnDataGeomEntity)
|
||||
.where(mapSheetLearnDataGeomEntity.geoUid.eq(inferenceGeomUid))
|
||||
.fetchOne();
|
||||
|
||||
if (existingLearnGeomUid != null) {
|
||||
// 기존 데이터 업데이트
|
||||
queryFactory
|
||||
.update(mapSheetLearnDataGeomEntity)
|
||||
.set(mapSheetLearnDataGeomEntity.classAfterCd, properties.getAfterClass().toLowerCase())
|
||||
.set(mapSheetLearnDataGeomEntity.classBeforeCd, properties.getBeforeClass().toLowerCase())
|
||||
.set(mapSheetLearnDataGeomEntity.geom, geometry)
|
||||
.set(mapSheetLearnDataGeomEntity.modifiedDate, ZonedDateTime.now())
|
||||
.where(mapSheetLearnDataGeomEntity.geoUid.eq(inferenceGeomUid))
|
||||
.execute();
|
||||
} else {
|
||||
// 새로운 데이터 insert
|
||||
queryFactory
|
||||
.insert(mapSheetLearnDataGeomEntity)
|
||||
.columns(
|
||||
mapSheetLearnDataGeomEntity.geoUid,
|
||||
mapSheetLearnDataGeomEntity.afterYyyy,
|
||||
mapSheetLearnDataGeomEntity.beforeYyyy,
|
||||
mapSheetLearnDataGeomEntity.classAfterCd,
|
||||
mapSheetLearnDataGeomEntity.classBeforeCd,
|
||||
mapSheetLearnDataGeomEntity.geom,
|
||||
mapSheetLearnDataGeomEntity.createdDate,
|
||||
mapSheetLearnDataGeomEntity.modifiedDate)
|
||||
.values(
|
||||
inferenceGeomUid,
|
||||
entity.getTargetYyyy(),
|
||||
entity.getCompareYyyy(),
|
||||
properties.getAfterClass().toLowerCase(),
|
||||
properties.getBeforeClass().toLowerCase(),
|
||||
geometry,
|
||||
ZonedDateTime.now(),
|
||||
ZonedDateTime.now())
|
||||
.execute();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public SummaryRes getSummary(String userId) {
|
||||
// 기본값 설정
|
||||
Long totalCnt = 0L;
|
||||
Long undoneCnt = 0L;
|
||||
Long todayCnt = 0L;
|
||||
|
||||
try {
|
||||
System.out.println("=== getSummary START (Review) ===");
|
||||
System.out.println("userId: " + userId);
|
||||
|
||||
// 1. 전체 배정 건수 (라벨링 완료된 것만)
|
||||
try {
|
||||
Long result =
|
||||
queryFactory
|
||||
.select(labelingAssignmentEntity.count())
|
||||
.from(labelingAssignmentEntity)
|
||||
.where(
|
||||
labelingAssignmentEntity.inspectorUid.eq(userId),
|
||||
labelingAssignmentEntity.workState.eq("DONE"))
|
||||
.fetchOne();
|
||||
|
||||
totalCnt = (result != null) ? result : 0L;
|
||||
System.out.println("totalCnt: " + totalCnt);
|
||||
} catch (Exception e) {
|
||||
System.err.println(
|
||||
"totalCnt query error: " + e.getClass().getName() + " - " + e.getMessage());
|
||||
if (e.getCause() != null) {
|
||||
System.err.println("Caused by: " + e.getCause().getMessage());
|
||||
}
|
||||
totalCnt = 0L;
|
||||
}
|
||||
|
||||
// 2. 미작업 건수 (UNCONFIRM 상태)
|
||||
try {
|
||||
Long result =
|
||||
queryFactory
|
||||
.select(labelingAssignmentEntity.count())
|
||||
.from(labelingAssignmentEntity)
|
||||
.where(
|
||||
labelingAssignmentEntity.inspectorUid.eq(userId),
|
||||
labelingAssignmentEntity.workState.eq("DONE"),
|
||||
labelingAssignmentEntity.inspectState.eq("UNCONFIRM"))
|
||||
.fetchOne();
|
||||
|
||||
undoneCnt = (result != null) ? result : 0L;
|
||||
System.out.println("undoneCnt: " + undoneCnt);
|
||||
} catch (Exception e) {
|
||||
System.err.println(
|
||||
"undoneCnt query error: " + e.getClass().getName() + " - " + e.getMessage());
|
||||
if (e.getCause() != null) {
|
||||
System.err.println("Caused by: " + e.getCause().getMessage());
|
||||
}
|
||||
undoneCnt = 0L;
|
||||
}
|
||||
|
||||
// 3. 오늘 완료 건수
|
||||
try {
|
||||
// 오늘 날짜의 시작과 끝 시간 계산
|
||||
ZonedDateTime startOfToday = LocalDate.now().atStartOfDay(java.time.ZoneId.systemDefault());
|
||||
ZonedDateTime endOfToday = startOfToday.plusDays(1);
|
||||
System.out.println("startOfToday: " + startOfToday);
|
||||
System.out.println("endOfToday: " + endOfToday);
|
||||
|
||||
Long result =
|
||||
queryFactory
|
||||
.select(labelingAssignmentEntity.count())
|
||||
.from(labelingAssignmentEntity)
|
||||
.where(
|
||||
labelingAssignmentEntity.inspectorUid.eq(userId),
|
||||
labelingAssignmentEntity.inspectState.eq("COMPLETE"),
|
||||
labelingAssignmentEntity.inspectStatDttm.isNotNull(),
|
||||
labelingAssignmentEntity.inspectStatDttm.goe(startOfToday),
|
||||
labelingAssignmentEntity.inspectStatDttm.lt(endOfToday))
|
||||
.fetchOne();
|
||||
|
||||
todayCnt = (result != null) ? result : 0L;
|
||||
System.out.println("todayCnt: " + todayCnt);
|
||||
} catch (Exception e) {
|
||||
System.err.println(
|
||||
"todayCnt query error: " + e.getClass().getName() + " - " + e.getMessage());
|
||||
if (e.getCause() != null) {
|
||||
System.err.println("Caused by: " + e.getCause().getMessage());
|
||||
}
|
||||
todayCnt = 0L;
|
||||
}
|
||||
|
||||
System.out.println("=== getSummary END (Review) ===");
|
||||
System.out.println(
|
||||
"Final result - totalCnt: "
|
||||
+ totalCnt
|
||||
+ ", undoneCnt: "
|
||||
+ undoneCnt
|
||||
+ ", todayCnt: "
|
||||
+ todayCnt);
|
||||
|
||||
} catch (Exception e) {
|
||||
// 최상위 예외 처리
|
||||
System.err.println("=== getSummary OUTER ERROR (Review) ===");
|
||||
System.err.println("Error: " + e.getClass().getName() + " - " + e.getMessage());
|
||||
if (e.getCause() != null) {
|
||||
System.err.println("Caused by: " + e.getCause().getMessage());
|
||||
}
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
// 항상 정상 응답 반환 (예외를 throw하지 않음)
|
||||
return SummaryRes.builder().totalCnt(totalCnt).undoneCnt(undoneCnt).todayCnt(todayCnt).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DetailRes getDetail(UUID operatorUid) {
|
||||
try {
|
||||
// 1. 작업 배정 정보 조회
|
||||
var assignment =
|
||||
queryFactory
|
||||
.selectFrom(labelingAssignmentEntity)
|
||||
.where(labelingAssignmentEntity.assignmentUid.eq(operatorUid))
|
||||
.fetchOne();
|
||||
|
||||
if (assignment == null) {
|
||||
throw new RuntimeException("Assignment not found: " + operatorUid);
|
||||
}
|
||||
|
||||
// 2. 추론 결과 정보 조회
|
||||
var mapSheetAnalDataInferenceGeomEntityEntity =
|
||||
queryFactory
|
||||
.selectFrom(mapSheetAnalDataInferenceGeomEntity)
|
||||
.where(
|
||||
mapSheetAnalDataInferenceGeomEntity.geoUid.eq(
|
||||
assignment.toDto().getInferenceGeomUid()))
|
||||
.fetchOne();
|
||||
|
||||
if (mapSheetAnalDataInferenceGeomEntityEntity == null) {
|
||||
throw new RuntimeException(
|
||||
"Inference geometry not found: " + assignment.toDto().getInferenceGeomUid());
|
||||
}
|
||||
|
||||
// 3. 도엽 정보 조회
|
||||
var mapSheetEntity =
|
||||
queryFactory
|
||||
.selectFrom(mapInkx5kEntity)
|
||||
.where(mapInkx5kEntity.mapidcdNo.eq(assignment.toDto().getAssignGroupId()))
|
||||
.fetchOne();
|
||||
|
||||
// 4. COG URL 조회 - imagery만 사용
|
||||
String beforeCogUrl = "";
|
||||
String afterCogUrl = "";
|
||||
|
||||
try {
|
||||
var beforeImagery =
|
||||
queryFactory
|
||||
.select(
|
||||
Expressions.stringTemplate(
|
||||
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
|
||||
.from(imageryEntity)
|
||||
.where(
|
||||
imageryEntity.scene5k.eq(assignment.toDto().getAssignGroupId()),
|
||||
imageryEntity.year.eq(
|
||||
mapSheetAnalDataInferenceGeomEntityEntity.getCompareYyyy()))
|
||||
.fetchFirst();
|
||||
beforeCogUrl = beforeImagery != null ? beforeImagery : "";
|
||||
|
||||
var afterImagery =
|
||||
queryFactory
|
||||
.select(
|
||||
Expressions.stringTemplate(
|
||||
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
|
||||
.from(imageryEntity)
|
||||
.where(
|
||||
imageryEntity.scene5k.eq(assignment.toDto().getAssignGroupId()),
|
||||
imageryEntity.year.eq(
|
||||
mapSheetAnalDataInferenceGeomEntityEntity.getTargetYyyy()))
|
||||
.fetchFirst();
|
||||
afterCogUrl = afterImagery != null ? afterImagery : "";
|
||||
} catch (Exception e) {
|
||||
System.err.println("COG URL retrieval error: " + e.getMessage());
|
||||
// COG URL 조회 실패 시 빈 문자열 유지
|
||||
}
|
||||
|
||||
// 5. DTO 생성
|
||||
var changeDetectionInfo =
|
||||
ChangeDetectionInfo.builder()
|
||||
.mapSheetInfo(mapSheetEntity != null ? mapSheetEntity.getMapidNm() : "")
|
||||
.detectionYear(
|
||||
(mapSheetAnalDataInferenceGeomEntityEntity.getCompareYyyy() != null
|
||||
? mapSheetAnalDataInferenceGeomEntityEntity.getCompareYyyy()
|
||||
: 0)
|
||||
+ "-"
|
||||
+ (mapSheetAnalDataInferenceGeomEntityEntity.getTargetYyyy() != null
|
||||
? mapSheetAnalDataInferenceGeomEntityEntity.getTargetYyyy()
|
||||
: 0))
|
||||
.beforeClass(
|
||||
ClassificationInfo.builder()
|
||||
.classification(
|
||||
mapSheetAnalDataInferenceGeomEntityEntity.getClassBeforeCd() != null
|
||||
? mapSheetAnalDataInferenceGeomEntityEntity.getClassBeforeCd()
|
||||
: "")
|
||||
.probability(
|
||||
mapSheetAnalDataInferenceGeomEntityEntity.getClassBeforeProb() != null
|
||||
? mapSheetAnalDataInferenceGeomEntityEntity.getClassBeforeProb()
|
||||
: 0.0)
|
||||
.build())
|
||||
.afterClass(
|
||||
ClassificationInfo.builder()
|
||||
.classification(
|
||||
mapSheetAnalDataInferenceGeomEntityEntity.getClassAfterCd() != null
|
||||
? mapSheetAnalDataInferenceGeomEntityEntity.getClassAfterCd()
|
||||
: "")
|
||||
.probability(
|
||||
mapSheetAnalDataInferenceGeomEntityEntity.getClassAfterProb() != null
|
||||
? mapSheetAnalDataInferenceGeomEntityEntity.getClassAfterProb()
|
||||
: 0.0)
|
||||
.build())
|
||||
.area(
|
||||
mapSheetAnalDataInferenceGeomEntityEntity.getArea() != null
|
||||
? mapSheetAnalDataInferenceGeomEntityEntity.getArea()
|
||||
: 0.0)
|
||||
.detectionAccuracy(
|
||||
mapSheetAnalDataInferenceGeomEntityEntity.getCdProb() != null
|
||||
? mapSheetAnalDataInferenceGeomEntityEntity.getCdProb()
|
||||
: 0.0)
|
||||
.pnu(
|
||||
mapSheetAnalDataInferenceGeomEntityEntity.getPnu() != null
|
||||
? mapSheetAnalDataInferenceGeomEntityEntity.getPnu()
|
||||
: 0L)
|
||||
.build();
|
||||
|
||||
var inspectionResultInfo =
|
||||
InspectionResultInfo.builder()
|
||||
.verificationResult(convertInspectState(assignment.toDto().getInspectState()))
|
||||
.inappropriateReason("")
|
||||
.memo("")
|
||||
.build();
|
||||
|
||||
// 6. Geometry를 GeoJSON으로 변환
|
||||
InferenceDataGeometry inferData =
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
InferenceDataGeometry.class,
|
||||
Expressions.stringTemplate("{0}", "Feature"),
|
||||
Expressions.stringTemplate(
|
||||
"ST_AsGeoJSON({0})", mapSheetAnalDataInferenceGeomEntity.geom),
|
||||
Projections.constructor(
|
||||
InferenceProperties.class,
|
||||
mapSheetAnalDataInferenceGeomEntity.classBeforeCd,
|
||||
mapSheetAnalDataInferenceGeomEntity.classAfterCd)))
|
||||
.from(mapSheetAnalDataInferenceGeomEntity)
|
||||
.where(
|
||||
mapSheetAnalDataInferenceGeomEntity.geoUid.eq(
|
||||
mapSheetAnalDataInferenceGeomEntityEntity.getGeoUid()))
|
||||
.fetchOne();
|
||||
|
||||
// 도엽 bbox json으로 가져오기
|
||||
JsonNode mapBbox = null;
|
||||
if (mapSheetEntity.getGeom() != null) {
|
||||
try {
|
||||
String bboxString =
|
||||
queryFactory
|
||||
.select(Expressions.stringTemplate("ST_AsGeoJSON({0})", mapInkx5kEntity.geom))
|
||||
.from(mapInkx5kEntity)
|
||||
.where(mapInkx5kEntity.mapidcdNo.eq(assignment.toDto().getAssignGroupId()))
|
||||
.fetchOne();
|
||||
if (bboxString != null) {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapBbox = mapper.readTree(bboxString);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("GeoJSON parsing error: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// 7. 라벨링 저장한 Geometry들을 GeoJSON으로 변환 (여러 개 가능)
|
||||
List<LearnDataGeometry> learnDataList =
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
LearnDataGeometry.class,
|
||||
Expressions.stringTemplate("{0}", "Feature"),
|
||||
Expressions.stringTemplate(
|
||||
"ST_AsGeoJSON({0})", mapSheetLearnDataGeomEntity.geom),
|
||||
Projections.constructor(
|
||||
LearnProperties.class,
|
||||
mapSheetLearnDataGeomEntity.classBeforeCd,
|
||||
mapSheetLearnDataGeomEntity.classAfterCd)))
|
||||
.from(mapSheetLearnDataGeomEntity)
|
||||
.where(
|
||||
mapSheetLearnDataGeomEntity.geoUid.eq(
|
||||
mapSheetAnalDataInferenceGeomEntityEntity.getGeoUid()))
|
||||
.fetch(); // fetchOne() -> fetch()로 변경
|
||||
|
||||
return DetailRes.builder()
|
||||
.operatorUid(operatorUid)
|
||||
.changeDetectionInfo(changeDetectionInfo)
|
||||
.inspectionResultInfo(inspectionResultInfo)
|
||||
.geom(inferData)
|
||||
.beforeCogUrl(beforeCogUrl)
|
||||
.afterCogUrl(afterCogUrl)
|
||||
.mapBox(mapBbox)
|
||||
.learnGeometries(learnDataList) // learnGeometry -> learnGeometries
|
||||
.build();
|
||||
|
||||
} catch (Exception e) {
|
||||
System.err.println("getDetail Error: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
throw new RuntimeException("Failed to get detail for operatorUid: " + operatorUid, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public DefaultPaging getDefaultPagingNumber(String userId, Integer size, String operatorUid) {
|
||||
|
||||
ZoneId KST = ZoneId.of("Asia/Seoul");
|
||||
ZonedDateTime todayStart = ZonedDateTime.now(KST).toLocalDate().atStartOfDay(KST);
|
||||
ZonedDateTime todayEnd = todayStart.plusDays(1);
|
||||
|
||||
BooleanExpression doneToday =
|
||||
labelingAssignmentEntity
|
||||
.inspectState
|
||||
.eq(InspectState.COMPLETE.getId())
|
||||
.and(labelingAssignmentEntity.inspectStatDttm.goe(todayStart))
|
||||
.and(labelingAssignmentEntity.inspectStatDttm.lt(todayEnd));
|
||||
|
||||
BooleanExpression unconfirmOrExcept =
|
||||
labelingAssignmentEntity.inspectState.in(
|
||||
InspectState.EXCEPT.getId(), InspectState.UNCONFIRM.getId());
|
||||
|
||||
BooleanExpression stateCondition = doneToday.or(unconfirmOrExcept);
|
||||
|
||||
Tuple firstAssigned =
|
||||
queryFactory
|
||||
.select(
|
||||
labelingAssignmentEntity.assignmentUid,
|
||||
labelingAssignmentEntity.createdDate,
|
||||
labelingAssignmentEntity.inferenceGeomUid)
|
||||
.from(labelingAssignmentEntity)
|
||||
.where(
|
||||
labelingAssignmentEntity.inspectorUid.eq(userId),
|
||||
labelingAssignmentEntity.workState.eq("DONE"),
|
||||
stateCondition,
|
||||
operatorUid == null
|
||||
? labelingAssignmentEntity.inspectState.eq(InspectState.UNCONFIRM.getId())
|
||||
: labelingAssignmentEntity.assignmentUid.eq(UUID.fromString(operatorUid)))
|
||||
.orderBy(
|
||||
labelingAssignmentEntity.createdDate.asc(),
|
||||
labelingAssignmentEntity.inferenceGeomUid.asc())
|
||||
.limit(1)
|
||||
.fetchOne();
|
||||
|
||||
if (firstAssigned == null) {
|
||||
return DefaultPaging.builder().page(0).operatorUid(null).build();
|
||||
}
|
||||
|
||||
UUID firstAssignedUid = firstAssigned.get(labelingAssignmentEntity.assignmentUid);
|
||||
ZonedDateTime createdDttm = firstAssigned.get(labelingAssignmentEntity.createdDate);
|
||||
Long inferenceGeomUid = firstAssigned.get(labelingAssignmentEntity.inferenceGeomUid);
|
||||
|
||||
BooleanExpression beforeCondition =
|
||||
labelingAssignmentEntity
|
||||
.createdDate
|
||||
.lt(createdDttm)
|
||||
.or(
|
||||
labelingAssignmentEntity
|
||||
.createdDate
|
||||
.eq(createdDttm)
|
||||
.and(labelingAssignmentEntity.inferenceGeomUid.lt(inferenceGeomUid)));
|
||||
|
||||
Long beforeCnt =
|
||||
queryFactory
|
||||
.select(labelingAssignmentEntity.count())
|
||||
.from(labelingAssignmentEntity)
|
||||
.where(
|
||||
labelingAssignmentEntity.inspectorUid.eq(userId),
|
||||
labelingAssignmentEntity.workState.eq("DONE"),
|
||||
beforeCondition.and(stateCondition))
|
||||
.fetchOne();
|
||||
|
||||
int page = (int) (beforeCnt / size); // 기본 사이즈 20
|
||||
return DefaultPaging.builder().page(page).operatorUid(firstAssignedUid).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void saveNewPolygon(TrainingDataReviewDto.NewPolygonRequest request) {
|
||||
try {
|
||||
if (request.getFeatures() == null || request.getFeatures().isEmpty()) {
|
||||
throw new RuntimeException("No polygons to save");
|
||||
}
|
||||
|
||||
System.out.println(
|
||||
"Saving "
|
||||
+ request.getFeatures().size()
|
||||
+ " new polygon(s) for mapSheetNum: "
|
||||
+ request.getMapSheetNum());
|
||||
|
||||
// mapSheetNum을 Long으로 변환
|
||||
Long mapSheetNumLong = null;
|
||||
try {
|
||||
if (request.getMapSheetNum() != null && !request.getMapSheetNum().isEmpty()) {
|
||||
mapSheetNumLong = Long.parseLong(request.getMapSheetNum());
|
||||
}
|
||||
} catch (NumberFormatException e) {
|
||||
System.err.println("Invalid mapSheetNum format: " + request.getMapSheetNum());
|
||||
}
|
||||
|
||||
int savedCount = 0;
|
||||
for (TrainingDataReviewDto.NewPolygonRequest.PolygonFeature feature : request.getFeatures()) {
|
||||
try {
|
||||
// 1. map_sheet_anal_data_inference_geom 테이블에 새 polygon 삽입
|
||||
queryFactory
|
||||
.insert(mapSheetAnalDataInferenceGeomEntity)
|
||||
.columns(
|
||||
mapSheetAnalDataInferenceGeomEntity.geom,
|
||||
mapSheetAnalDataInferenceGeomEntity.compareYyyy,
|
||||
mapSheetAnalDataInferenceGeomEntity.targetYyyy,
|
||||
mapSheetAnalDataInferenceGeomEntity.classBeforeCd,
|
||||
mapSheetAnalDataInferenceGeomEntity.classAfterCd,
|
||||
mapSheetAnalDataInferenceGeomEntity.dataUid,
|
||||
mapSheetAnalDataInferenceGeomEntity.mapSheetNum,
|
||||
mapSheetAnalDataInferenceGeomEntity.createdDttm,
|
||||
mapSheetAnalDataInferenceGeomEntity.updatedDttm,
|
||||
mapSheetAnalDataInferenceGeomEntity.labelState)
|
||||
.values(
|
||||
feature.getGeometry(),
|
||||
request.getCompareYyyy(),
|
||||
request.getTargetYyyy(),
|
||||
feature.getProperties().getBeforeClass().toLowerCase(),
|
||||
feature.getProperties().getAfterClass().toLowerCase(),
|
||||
request.getAnalUid(),
|
||||
mapSheetNumLong,
|
||||
ZonedDateTime.now(),
|
||||
ZonedDateTime.now(),
|
||||
"DONE")
|
||||
.execute();
|
||||
|
||||
// 2. 생성된 geoUid 조회 (가장 최근에 삽입된 레코드)
|
||||
Long geoUid =
|
||||
queryFactory
|
||||
.select(mapSheetAnalDataInferenceGeomEntity.geoUid)
|
||||
.from(mapSheetAnalDataInferenceGeomEntity)
|
||||
.where(
|
||||
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(request.getCompareYyyy()),
|
||||
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(request.getTargetYyyy()),
|
||||
mapSheetAnalDataInferenceGeomEntity.classBeforeCd.eq(
|
||||
feature.getProperties().getBeforeClass().toLowerCase()),
|
||||
mapSheetAnalDataInferenceGeomEntity.classAfterCd.eq(
|
||||
feature.getProperties().getAfterClass().toLowerCase()))
|
||||
.orderBy(mapSheetAnalDataInferenceGeomEntity.geoUid.desc())
|
||||
.fetchFirst();
|
||||
|
||||
if (geoUid == null) {
|
||||
System.err.println("Failed to get geo_uid for polygon #" + (savedCount + 1));
|
||||
continue;
|
||||
}
|
||||
|
||||
// 3. learn_data_geom 테이블에도 삽입
|
||||
queryFactory
|
||||
.insert(mapSheetLearnDataGeomEntity)
|
||||
.columns(
|
||||
mapSheetLearnDataGeomEntity.geoUid,
|
||||
mapSheetLearnDataGeomEntity.afterYyyy,
|
||||
mapSheetLearnDataGeomEntity.beforeYyyy,
|
||||
mapSheetLearnDataGeomEntity.classAfterCd,
|
||||
mapSheetLearnDataGeomEntity.classBeforeCd,
|
||||
mapSheetLearnDataGeomEntity.geom,
|
||||
mapSheetLearnDataGeomEntity.createdDate,
|
||||
mapSheetLearnDataGeomEntity.modifiedDate)
|
||||
.values(
|
||||
geoUid,
|
||||
request.getTargetYyyy(),
|
||||
request.getCompareYyyy(),
|
||||
feature.getProperties().getAfterClass().toLowerCase(),
|
||||
feature.getProperties().getBeforeClass().toLowerCase(),
|
||||
feature.getGeometry(),
|
||||
ZonedDateTime.now(),
|
||||
ZonedDateTime.now())
|
||||
.execute();
|
||||
|
||||
savedCount++;
|
||||
System.out.println(
|
||||
"Successfully saved polygon #" + savedCount + " with geo_uid: " + geoUid);
|
||||
|
||||
} catch (Exception e) {
|
||||
System.err.println("Error saving polygon #" + (savedCount + 1) + ": " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
// 개별 polygon 저장 실패해도 계속 진행
|
||||
}
|
||||
}
|
||||
|
||||
if (savedCount == 0) {
|
||||
throw new RuntimeException("Failed to save any polygons");
|
||||
}
|
||||
|
||||
System.out.println(
|
||||
"Successfully saved " + savedCount + "/" + request.getFeatures().size() + " polygon(s)");
|
||||
|
||||
} catch (Exception e) {
|
||||
System.err.println("saveNewPolygon Error: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
throw new RuntimeException("Failed to save new polygons", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public TrainingDataReviewDto.CogImageResponse getCogImageUrl(
|
||||
String mapSheetNum, Integer beforeYear, Integer afterYear) {
|
||||
try {
|
||||
// beforeYear COG URL 조회
|
||||
String beforeCogUrl =
|
||||
queryFactory
|
||||
.select(
|
||||
Expressions.stringTemplate(
|
||||
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
|
||||
.from(imageryEntity)
|
||||
.where(imageryEntity.scene5k.eq(mapSheetNum), imageryEntity.year.eq(beforeYear))
|
||||
.fetchFirst();
|
||||
|
||||
// afterYear COG URL 조회
|
||||
String afterCogUrl =
|
||||
queryFactory
|
||||
.select(
|
||||
Expressions.stringTemplate(
|
||||
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
|
||||
.from(imageryEntity)
|
||||
.where(imageryEntity.scene5k.eq(mapSheetNum), imageryEntity.year.eq(afterYear))
|
||||
.fetchFirst();
|
||||
|
||||
if (beforeCogUrl == null && afterCogUrl == null) {
|
||||
throw new RuntimeException(
|
||||
"COG images not found for mapSheetNum: "
|
||||
+ mapSheetNum
|
||||
+ ", years: "
|
||||
+ beforeYear
|
||||
+ ", "
|
||||
+ afterYear);
|
||||
}
|
||||
|
||||
return TrainingDataReviewDto.CogImageResponse.builder()
|
||||
.beforeCogUrl(beforeCogUrl != null ? beforeCogUrl : "")
|
||||
.afterCogUrl(afterCogUrl != null ? afterCogUrl : "")
|
||||
.beforeYear(beforeYear)
|
||||
.afterYear(afterYear)
|
||||
.mapSheetNum(mapSheetNum)
|
||||
.build();
|
||||
|
||||
} catch (Exception e) {
|
||||
System.err.println("getCogImageUrl Error: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
throw new RuntimeException(
|
||||
"Failed to get COG image URLs for mapSheetNum: "
|
||||
+ mapSheetNum
|
||||
+ ", years: "
|
||||
+ beforeYear
|
||||
+ ", "
|
||||
+ afterYear,
|
||||
e);
|
||||
}
|
||||
}
|
||||
|
||||
private StringExpression makeCogUrl(NumberPath<Integer> year) {
|
||||
return new CaseBuilder()
|
||||
.when(imageryEntity.year.eq(year))
|
||||
.then(
|
||||
Expressions.stringTemplate(
|
||||
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
|
||||
.otherwise("");
|
||||
}
|
||||
|
||||
private BooleanExpression statusInInspectState(String status) {
|
||||
if (Objects.isNull(status)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String[] arrStatus = status.split(",");
|
||||
return labelingAssignmentEntity.inspectState.in(arrStatus);
|
||||
}
|
||||
|
||||
private String convertInspectState(String inspectState) {
|
||||
if (inspectState == null) {
|
||||
return "미확인";
|
||||
}
|
||||
switch (inspectState) {
|
||||
case "UNCONFIRM":
|
||||
return "미확인";
|
||||
case "EXCEPT":
|
||||
return "제외";
|
||||
case "COMPLETE":
|
||||
return "완료";
|
||||
default:
|
||||
return "미확인";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -132,7 +132,9 @@ public class TrainingDataLabelApiController {
|
||||
}
|
||||
}
|
||||
|
||||
@Operation(summary = "변화탐지정보 및 실태조사결과 조회", description = "선택한 작업의 변화탐지정보 및 실태조사결과를 조회합니다.")
|
||||
@Operation(
|
||||
summary = "변화탐지정보 및 실태조사결과 조회",
|
||||
description = "선택한 작업의 변화탐지정보 및 실태조사결과를 조회합니다. 저장된 여러 개의 polygon을 조회할 수 있습니다.")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
@@ -141,7 +143,155 @@ public class TrainingDataLabelApiController {
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema = @Schema(implementation = TrainingDataLabelDto.DetailRes.class))),
|
||||
schema = @Schema(implementation = TrainingDataLabelDto.DetailRes.class),
|
||||
examples = {
|
||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||
name = "단일 polygon 조회",
|
||||
description = "1개의 polygon이 저장된 경우 응답 예시",
|
||||
value =
|
||||
"""
|
||||
{
|
||||
"code": "OK",
|
||||
"message": null,
|
||||
"data": {
|
||||
"assignmentUid": "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02",
|
||||
"changeDetectionInfo": {
|
||||
"mapSheetInfo": "NI52-3-13-1",
|
||||
"detectionYear": "2023-2024",
|
||||
"beforeClass": {
|
||||
"classification": "waste",
|
||||
"probability": 0.95
|
||||
},
|
||||
"afterClass": {
|
||||
"classification": "land",
|
||||
"probability": 0.98
|
||||
},
|
||||
"area": 1250.5,
|
||||
"detectionAccuracy": 0.96,
|
||||
"pnu": 1234567890
|
||||
},
|
||||
"inspectionResultInfo": {
|
||||
"verificationResult": "완료",
|
||||
"inappropriateReason": ""
|
||||
},
|
||||
"geom": {
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.663, 34.588], [126.662, 34.587], [126.664, 34.589], [126.663, 34.588]]]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "waste",
|
||||
"afterClass": "land"
|
||||
}
|
||||
},
|
||||
"beforeCogUrl": "https://storage.example.com/cog/2023/NI52-3-13-1.tif",
|
||||
"afterCogUrl": "https://storage.example.com/cog/2024/NI52-3-13-1.tif",
|
||||
"mapBox": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.65, 34.58], [126.67, 34.58], [126.67, 34.60], [126.65, 34.60], [126.65, 34.58]]]
|
||||
},
|
||||
"learnGeometries": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.663, 34.588], [126.662, 34.587], [126.664, 34.589], [126.663, 34.588]]]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "waste",
|
||||
"afterClass": "land"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"""),
|
||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||
name = "여러 polygon 조회",
|
||||
description = "3개의 polygon이 저장된 경우 응답 예시",
|
||||
value =
|
||||
"""
|
||||
{
|
||||
"code": "OK",
|
||||
"message": null,
|
||||
"data": {
|
||||
"assignmentUid": "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02",
|
||||
"changeDetectionInfo": {
|
||||
"mapSheetInfo": "NI52-3-13-1",
|
||||
"detectionYear": "2023-2024",
|
||||
"beforeClass": {
|
||||
"classification": "waste",
|
||||
"probability": 0.95
|
||||
},
|
||||
"afterClass": {
|
||||
"classification": "land",
|
||||
"probability": 0.98
|
||||
},
|
||||
"area": 1250.5,
|
||||
"detectionAccuracy": 0.96,
|
||||
"pnu": 1234567890
|
||||
},
|
||||
"inspectionResultInfo": {
|
||||
"verificationResult": "완료",
|
||||
"inappropriateReason": ""
|
||||
},
|
||||
"geom": {
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.663, 34.588], [126.662, 34.587], [126.664, 34.589], [126.663, 34.588]]]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "waste",
|
||||
"afterClass": "land"
|
||||
}
|
||||
},
|
||||
"beforeCogUrl": "https://storage.example.com/cog/2023/NI52-3-13-1.tif",
|
||||
"afterCogUrl": "https://storage.example.com/cog/2024/NI52-3-13-1.tif",
|
||||
"mapBox": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.65, 34.58], [126.67, 34.58], [126.67, 34.60], [126.65, 34.60], [126.65, 34.58]]]
|
||||
},
|
||||
"learnGeometries": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.663, 34.588], [126.662, 34.587], [126.664, 34.589], [126.663, 34.588]]]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "waste",
|
||||
"afterClass": "land"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.665, 34.585], [126.664, 34.584], [126.666, 34.586], [126.665, 34.585]]]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "forest",
|
||||
"afterClass": "building"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.660, 34.590], [126.659, 34.589], [126.661, 34.591], [126.660, 34.590]]]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "grassland",
|
||||
"afterClass": "concrete"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
""")
|
||||
})),
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
|
||||
@ApiResponse(responseCode = "404", description = "데이터를 찾을 수 없음", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
@@ -151,13 +301,13 @@ public class TrainingDataLabelApiController {
|
||||
@io.swagger.v3.oas.annotations.Parameter(
|
||||
description = "작업 배정 ID (UUID)",
|
||||
required = true,
|
||||
example = "93c56be8-0246-4b22-b976-2476549733cc")
|
||||
@RequestParam
|
||||
example = "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02")
|
||||
@RequestParam(defaultValue = "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02")
|
||||
java.util.UUID assignmentUid) {
|
||||
return ApiResponseDto.ok(trainingDataLabelService.getDetail(assignmentUid));
|
||||
}
|
||||
|
||||
@Operation(summary = "라벨러 기본 page number 제공", description = "라벨러 기본 page number 제공")
|
||||
@Operation(summary = "라벨러 목록 기본정보제공", description = "라벨러 목록 기본정보제공")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
@@ -182,4 +332,162 @@ public class TrainingDataLabelApiController {
|
||||
return ApiResponseDto.ok(
|
||||
trainingDataLabelService.getDefaultPagingNumber(userId, size, assignmentUid));
|
||||
}
|
||||
|
||||
@Operation(
|
||||
summary = "새로운 polygon(들) 추가 저장",
|
||||
description = "탐지결과 외 새로운 polygon을 추가로 저장합니다. 단일 또는 여러 개를 저장할 수 있습니다.")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "저장 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema = @Schema(implementation = ResponseObj.class))),
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@PostMapping("/new-polygon")
|
||||
public ApiResponseDto<ResponseObj> saveNewPolygon(
|
||||
@io.swagger.v3.oas.annotations.parameters.RequestBody(
|
||||
description = "새로운 polygon 저장 요청",
|
||||
required = true,
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema =
|
||||
@Schema(implementation = TrainingDataLabelDto.NewPolygonRequest.class),
|
||||
examples = {
|
||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||
name = "1개 polygon 저장",
|
||||
value =
|
||||
"""
|
||||
{
|
||||
"assignmentUid": "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02",
|
||||
"analUid": 53,
|
||||
"mapSheetNum": "35905086",
|
||||
"compareYyyy": 2023,
|
||||
"targetYyyy": 2024,
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[126.663, 34.588],
|
||||
[126.662, 34.587],
|
||||
[126.664, 34.589],
|
||||
[126.663, 34.588]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "WASTE",
|
||||
"afterClass": "LAND"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
"""),
|
||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||
name = "3개 polygon 저장",
|
||||
value =
|
||||
"""
|
||||
{
|
||||
"assignmentUid": "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02",
|
||||
"analUid": 53,
|
||||
"mapSheetNum": "35905086",
|
||||
"compareYyyy": 2023,
|
||||
"targetYyyy": 2024,
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[126.663, 34.588],
|
||||
[126.662, 34.587],
|
||||
[126.664, 34.589],
|
||||
[126.663, 34.588]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "WASTE",
|
||||
"afterClass": "LAND"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[126.665, 34.590],
|
||||
[126.664, 34.589],
|
||||
[126.666, 34.591],
|
||||
[126.665, 34.590]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "FOREST",
|
||||
"afterClass": "BUILDING"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[126.667, 34.592],
|
||||
[126.666, 34.591],
|
||||
[126.668, 34.593],
|
||||
[126.667, 34.592]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "FARMLAND",
|
||||
"afterClass": "SOLAR_PANEL"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
""")
|
||||
}))
|
||||
@RequestBody
|
||||
TrainingDataLabelDto.NewPolygonRequest request) {
|
||||
return ApiResponseDto.okObject(trainingDataLabelService.saveNewPolygon(request));
|
||||
}
|
||||
|
||||
@Operation(summary = "COG 이미지 URL 조회", description = "변화 전/후 COG 이미지 URL을 함께 조회합니다")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "조회 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema =
|
||||
@Schema(implementation = TrainingDataLabelDto.CogImageResponse.class))),
|
||||
@ApiResponse(responseCode = "404", description = "이미지를 찾을 수 없음", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@GetMapping("/cog-image")
|
||||
public ApiResponseDto<TrainingDataLabelDto.CogImageResponse> getCogImageUrl(
|
||||
@Parameter(description = "도엽번호", required = true, example = "35905086") @RequestParam
|
||||
String mapSheetNum,
|
||||
@Parameter(description = "변화 전 년도", required = true, example = "2023") @RequestParam
|
||||
Integer beforeYear,
|
||||
@Parameter(description = "변화 후 년도", required = true, example = "2024") @RequestParam
|
||||
Integer afterYear) {
|
||||
return ApiResponseDto.ok(
|
||||
trainingDataLabelService.getCogImageUrl(mapSheetNum, beforeYear, afterYear));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,493 @@
|
||||
package com.kamco.cd.kamcoback.trainingdata;
|
||||
|
||||
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto.ResponseObj;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewGeometryInfo;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewListDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.service.TrainingDataReviewService;
|
||||
import io.swagger.v3.oas.annotations.Hidden;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponses;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
@Tag(name = "라벨링 툴 > 검수자", description = "라벨링 툴 > 검수자 API")
|
||||
@RestController
|
||||
@RequiredArgsConstructor
|
||||
@RequestMapping("/api/training-data/review")
|
||||
public class TrainingDataReviewApiController {
|
||||
|
||||
private final TrainingDataReviewService trainingDataReviewService;
|
||||
|
||||
@Operation(summary = "목록 조회", description = "검수 할당 목록 조회")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "조회 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
|
||||
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@GetMapping
|
||||
public ApiResponseDto<Page<ReviewListDto>> findReviewAssignedList(
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size,
|
||||
@RequestParam(defaultValue = "01022223333") String userId) {
|
||||
TrainingDataReviewDto.searchReq searchReq = new TrainingDataReviewDto.searchReq(page, size, "");
|
||||
return ApiResponseDto.ok(trainingDataReviewService.findReviewAssignedList(searchReq, userId));
|
||||
}
|
||||
|
||||
@Hidden
|
||||
@Operation(summary = "상세 Geometry 조회", description = "검수 할당 상세 Geometry 조회")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "조회 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
|
||||
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@GetMapping("/geom-info")
|
||||
public ApiResponseDto<ReviewGeometryInfo> findReviewAssignedGeom(
|
||||
@RequestParam(defaultValue = "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02") String operatorUid) {
|
||||
return ApiResponseDto.ok(trainingDataReviewService.findReviewAssignedGeom(operatorUid));
|
||||
}
|
||||
|
||||
@Operation(summary = "검수 결과 저장", description = "검수 결과 저장")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "조회 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
|
||||
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@PostMapping
|
||||
public ApiResponseDto<ResponseObj> saveReviewFeature(
|
||||
@RequestBody TrainingDataReviewDto.GeoFeatureRequest request) {
|
||||
return ApiResponseDto.okObject(trainingDataReviewService.saveReviewFeature(request));
|
||||
}
|
||||
|
||||
@Operation(summary = "작업 통계 조회", description = "검수자의 작업 현황 통계를 조회합니다. (전체/미작업/Today 건수)")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "조회 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema = @Schema(implementation = TrainingDataReviewDto.SummaryRes.class))),
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@GetMapping("/summary")
|
||||
public ApiResponseDto<TrainingDataReviewDto.SummaryRes> getSummary(
|
||||
@io.swagger.v3.oas.annotations.Parameter(
|
||||
description = "검수자 사번",
|
||||
required = true,
|
||||
example = "01022223333")
|
||||
@RequestParam
|
||||
String userId) {
|
||||
try {
|
||||
System.out.println("[Controller] getSummary called with userId: " + userId);
|
||||
TrainingDataReviewDto.SummaryRes result = trainingDataReviewService.getSummary(userId);
|
||||
System.out.println("[Controller] getSummary result: " + result);
|
||||
return ApiResponseDto.ok(result);
|
||||
} catch (Exception e) {
|
||||
System.err.println("[Controller] getSummary ERROR: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
// 예외 발생 시에도 빈 통계 반환
|
||||
return ApiResponseDto.ok(
|
||||
TrainingDataReviewDto.SummaryRes.builder()
|
||||
.totalCnt(0L)
|
||||
.undoneCnt(0L)
|
||||
.todayCnt(0L)
|
||||
.build());
|
||||
}
|
||||
}
|
||||
|
||||
@Operation(
|
||||
summary = "변화탐지정보 및 실태조사결과 조회",
|
||||
description = "선택한 작업의 변화탐지정보 및 실태조사결과를 조회합니다. 저장된 여러 개의 polygon을 조회할 수 있습니다.")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "조회 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema = @Schema(implementation = TrainingDataReviewDto.DetailRes.class),
|
||||
examples = {
|
||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||
name = "단일 polygon 조회",
|
||||
description = "1개의 polygon이 저장된 경우 응답 예시",
|
||||
value =
|
||||
"""
|
||||
{
|
||||
"code": "OK",
|
||||
"message": null,
|
||||
"data": {
|
||||
"operatorUid": "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02",
|
||||
"changeDetectionInfo": {
|
||||
"mapSheetInfo": "NI52-3-13-1",
|
||||
"detectionYear": "2023-2024",
|
||||
"beforeClass": {
|
||||
"classification": "waste",
|
||||
"probability": 0.95
|
||||
},
|
||||
"afterClass": {
|
||||
"classification": "land",
|
||||
"probability": 0.98
|
||||
},
|
||||
"area": 1250.5,
|
||||
"detectionAccuracy": 0.96,
|
||||
"pnu": 1234567890
|
||||
},
|
||||
"inspectionResultInfo": {
|
||||
"verificationResult": "완료",
|
||||
"inappropriateReason": ""
|
||||
},
|
||||
"geom": {
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.663, 34.588], [126.662, 34.587], [126.664, 34.589], [126.663, 34.588]]]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "waste",
|
||||
"afterClass": "land"
|
||||
}
|
||||
},
|
||||
"beforeCogUrl": "https://storage.example.com/cog/2023/NI52-3-13-1.tif",
|
||||
"afterCogUrl": "https://storage.example.com/cog/2024/NI52-3-13-1.tif",
|
||||
"mapBox": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.65, 34.58], [126.67, 34.58], [126.67, 34.60], [126.65, 34.60], [126.65, 34.58]]]
|
||||
},
|
||||
"learnGeometries": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.663, 34.588], [126.662, 34.587], [126.664, 34.589], [126.663, 34.588]]]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "waste",
|
||||
"afterClass": "land"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"""),
|
||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||
name = "여러 polygon 조회",
|
||||
description = "3개의 polygon이 저장된 경우 응답 예시",
|
||||
value =
|
||||
"""
|
||||
{
|
||||
"code": "OK",
|
||||
"message": null,
|
||||
"data": {
|
||||
"operatorUid": "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02",
|
||||
"changeDetectionInfo": {
|
||||
"mapSheetInfo": "NI52-3-13-1",
|
||||
"detectionYear": "2023-2024",
|
||||
"beforeClass": {
|
||||
"classification": "waste",
|
||||
"probability": 0.95
|
||||
},
|
||||
"afterClass": {
|
||||
"classification": "land",
|
||||
"probability": 0.98
|
||||
},
|
||||
"area": 1250.5,
|
||||
"detectionAccuracy": 0.96,
|
||||
"pnu": 1234567890
|
||||
},
|
||||
"inspectionResultInfo": {
|
||||
"verificationResult": "완료",
|
||||
"inappropriateReason": ""
|
||||
},
|
||||
"geom": {
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.663, 34.588], [126.662, 34.587], [126.664, 34.589], [126.663, 34.588]]]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "waste",
|
||||
"afterClass": "land"
|
||||
}
|
||||
},
|
||||
"beforeCogUrl": "https://storage.example.com/cog/2023/NI52-3-13-1.tif",
|
||||
"afterCogUrl": "https://storage.example.com/cog/2024/NI52-3-13-1.tif",
|
||||
"mapBox": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.65, 34.58], [126.67, 34.58], [126.67, 34.60], [126.65, 34.60], [126.65, 34.58]]]
|
||||
},
|
||||
"learnGeometries": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.663, 34.588], [126.662, 34.587], [126.664, 34.589], [126.663, 34.588]]]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "waste",
|
||||
"afterClass": "land"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.665, 34.585], [126.664, 34.584], [126.666, 34.586], [126.665, 34.585]]]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "forest",
|
||||
"afterClass": "building"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[126.660, 34.590], [126.659, 34.589], [126.661, 34.591], [126.660, 34.590]]]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "grassland",
|
||||
"afterClass": "concrete"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
""")
|
||||
})),
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
|
||||
@ApiResponse(responseCode = "404", description = "데이터를 찾을 수 없음", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@GetMapping("/detail")
|
||||
public ApiResponseDto<TrainingDataReviewDto.DetailRes> getDetail(
|
||||
@io.swagger.v3.oas.annotations.Parameter(
|
||||
description = "검수 작업 ID (UUID)",
|
||||
required = true,
|
||||
example = "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02")
|
||||
@RequestParam(defaultValue = "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02")
|
||||
java.util.UUID operatorUid) {
|
||||
return ApiResponseDto.ok(trainingDataReviewService.getDetail(operatorUid));
|
||||
}
|
||||
|
||||
@Operation(summary = "검수자 목록 기본정보제공", description = "검수자 목록 기본정보제공")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "조회 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema = @Schema(implementation = TrainingDataReviewDto.DetailRes.class))),
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
|
||||
@ApiResponse(responseCode = "404", description = "데이터를 찾을 수 없음", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@GetMapping("/default-page")
|
||||
public ApiResponseDto<TrainingDataReviewDto.DefaultPaging> getDefaultPagingNumber(
|
||||
@Parameter(description = "사번", example = "01022223333") @RequestParam String userId,
|
||||
@Parameter(description = "페이징 사이즈", example = "20") @RequestParam(defaultValue = "20")
|
||||
Integer size,
|
||||
@Parameter(description = "개별 UUID", example = "79bcdbbe-6ed4-4caa-b4a4-22f3cf2f9d25")
|
||||
@RequestParam(required = false)
|
||||
String operatorUid) {
|
||||
return ApiResponseDto.ok(
|
||||
trainingDataReviewService.getDefaultPagingNumber(userId, size, operatorUid));
|
||||
}
|
||||
|
||||
@Operation(
|
||||
summary = "새로운 polygon(들) 추가 저장",
|
||||
description = "탐지결과 외 새로운 polygon을 추가로 저장합니다. 단일 또는 여러 개를 저장할 수 있습니다.")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "저장 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema = @Schema(implementation = ResponseObj.class))),
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@PostMapping("/new-polygon")
|
||||
public ApiResponseDto<ResponseObj> saveNewPolygon(
|
||||
@io.swagger.v3.oas.annotations.parameters.RequestBody(
|
||||
description = "새로운 polygon 저장 요청",
|
||||
required = true,
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema =
|
||||
@Schema(implementation = TrainingDataReviewDto.NewPolygonRequest.class),
|
||||
examples = {
|
||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||
name = "1개 polygon 저장",
|
||||
value =
|
||||
"""
|
||||
{
|
||||
"operatorUid": "93c56be8-0246-4b22-b976-2476549733cc",
|
||||
"analUid": 53,
|
||||
"mapSheetNum": "35905086",
|
||||
"compareYyyy": 2023,
|
||||
"targetYyyy": 2024,
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[126.663, 34.588],
|
||||
[126.662, 34.587],
|
||||
[126.664, 34.589],
|
||||
[126.663, 34.588]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "WASTE",
|
||||
"afterClass": "LAND"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
"""),
|
||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||
name = "3개 polygon 저장",
|
||||
value =
|
||||
"""
|
||||
{
|
||||
"operatorUid": "93c56be8-0246-4b22-b976-2476549733cc",
|
||||
"analUid": 53,
|
||||
"mapSheetNum": "35905086",
|
||||
"compareYyyy": 2023,
|
||||
"targetYyyy": 2024,
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[126.663, 34.588],
|
||||
[126.662, 34.587],
|
||||
[126.664, 34.589],
|
||||
[126.663, 34.588]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "WASTE",
|
||||
"afterClass": "LAND"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[126.665, 34.590],
|
||||
[126.664, 34.589],
|
||||
[126.666, 34.591],
|
||||
[126.665, 34.590]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "FOREST",
|
||||
"afterClass": "BUILDING"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[126.667, 34.592],
|
||||
[126.666, 34.591],
|
||||
[126.668, 34.593],
|
||||
[126.667, 34.592]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"beforeClass": "FARMLAND",
|
||||
"afterClass": "SOLAR_PANEL"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
""")
|
||||
}))
|
||||
@RequestBody
|
||||
TrainingDataReviewDto.NewPolygonRequest request) {
|
||||
return ApiResponseDto.okObject(trainingDataReviewService.saveNewPolygon(request));
|
||||
}
|
||||
|
||||
@Operation(summary = "COG 이미지 URL 조회", description = "변화 전/후 COG 이미지 URL을 함께 조회합니다")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "조회 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema =
|
||||
@Schema(implementation = TrainingDataReviewDto.CogImageResponse.class))),
|
||||
@ApiResponse(responseCode = "404", description = "이미지를 찾을 수 없음", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@GetMapping("/cog-image")
|
||||
public ApiResponseDto<TrainingDataReviewDto.CogImageResponse> getCogImageUrl(
|
||||
@Parameter(description = "도엽번호", required = true, example = "35905086") @RequestParam
|
||||
String mapSheetNum,
|
||||
@Parameter(description = "변화 전 년도", required = true, example = "2023") @RequestParam
|
||||
Integer beforeYear,
|
||||
@Parameter(description = "변화 후 년도", required = true, example = "2024") @RequestParam
|
||||
Integer afterYear) {
|
||||
return ApiResponseDto.ok(
|
||||
trainingDataReviewService.getCogImageUrl(mapSheetNum, beforeYear, afterYear));
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.kamco.cd.kamcoback.common.utils.geometry.GeometryDeserializer;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
@@ -298,8 +299,8 @@ public class TrainingDataLabelDto {
|
||||
@Schema(description = "도엽 bbox")
|
||||
private JsonNode mapBox;
|
||||
|
||||
@Schema(description = "라벨링 툴에서 그린 폴리곤")
|
||||
private LearnDataGeometry learnGeometry;
|
||||
@Schema(description = "라벨링 툴에서 그린 폴리곤들 (여러 개 가능)")
|
||||
private List<LearnDataGeometry> learnGeometries;
|
||||
}
|
||||
|
||||
@Schema(name = "ChangeDetectionInfo", description = "변화탐지정보")
|
||||
@@ -394,4 +395,133 @@ public class TrainingDataLabelDto {
|
||||
private int page;
|
||||
private UUID assignmentUid;
|
||||
}
|
||||
|
||||
@Schema(name = "NewPolygonRequest", description = "새로운 polygon(들) 추가 저장")
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class NewPolygonRequest {
|
||||
|
||||
@Schema(description = "assignmentUid", example = "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02")
|
||||
private String assignmentUid;
|
||||
|
||||
@Schema(description = "anal_uid", example = "1")
|
||||
private Long analUid;
|
||||
|
||||
@Schema(description = "map_sheet_num (도엽번호)", example = "NI52-3-13-1")
|
||||
private String mapSheetNum;
|
||||
|
||||
@Schema(description = "compare_yyyy (변화 전 년도)", example = "2022")
|
||||
private Integer compareYyyy;
|
||||
|
||||
@Schema(description = "target_yyyy (변화 후 년도)", example = "2023")
|
||||
private Integer targetYyyy;
|
||||
|
||||
@Schema(description = "새로 그린 polygon 리스트")
|
||||
private List<PolygonFeature> features;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class PolygonFeature {
|
||||
|
||||
@Schema(description = "type", example = "Feature")
|
||||
private String type;
|
||||
|
||||
@JsonDeserialize(using = GeometryDeserializer.class)
|
||||
@Schema(
|
||||
description = "polygon geometry",
|
||||
example =
|
||||
"""
|
||||
{
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
126.66292461969202,
|
||||
34.58785236216609
|
||||
],
|
||||
[
|
||||
126.66263801099049,
|
||||
34.58740117447532
|
||||
],
|
||||
[
|
||||
126.66293668521236,
|
||||
34.5873904146878
|
||||
],
|
||||
[
|
||||
126.66312820122245,
|
||||
34.587841464427825
|
||||
],
|
||||
[
|
||||
126.66289124481979,
|
||||
34.58786048381633
|
||||
],
|
||||
[
|
||||
126.66292461969202,
|
||||
34.58785236216609
|
||||
]
|
||||
]
|
||||
]
|
||||
}
|
||||
""")
|
||||
private Geometry geometry;
|
||||
|
||||
@Schema(description = "polygon properties")
|
||||
private PolygonProperties properties;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class PolygonProperties {
|
||||
|
||||
@Schema(description = "beforeClass", example = "WASTE")
|
||||
private String beforeClass;
|
||||
|
||||
@Schema(description = "afterClass", example = "LAND")
|
||||
private String afterClass;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Schema(name = "CogImageRequest", description = "COG 이미지 조회 요청")
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class CogImageRequest {
|
||||
|
||||
@Schema(description = "map_sheet_num (도엽번호)", example = "NI52-3-13-1", required = true)
|
||||
private String mapSheetNum;
|
||||
|
||||
@Schema(description = "year (년도)", example = "2022", required = true)
|
||||
private Integer year;
|
||||
}
|
||||
|
||||
@Schema(name = "CogImageResponse", description = "COG 이미지 URL 응답")
|
||||
@Getter
|
||||
@Setter
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class CogImageResponse {
|
||||
|
||||
@Schema(description = "변화 전 COG 이미지 URL")
|
||||
private String beforeCogUrl;
|
||||
|
||||
@Schema(description = "변화 후 COG 이미지 URL")
|
||||
private String afterCogUrl;
|
||||
|
||||
@Schema(description = "변화 전 년도")
|
||||
private Integer beforeYear;
|
||||
|
||||
@Schema(description = "변화 후 년도")
|
||||
private Integer afterYear;
|
||||
|
||||
@Schema(description = "도엽번호")
|
||||
private String mapSheetNum;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,533 @@
|
||||
package com.kamco.cd.kamcoback.trainingdata.dto;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.kamco.cd.kamcoback.common.utils.geometry.GeometryDeserializer;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.Setter;
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
|
||||
public class TrainingDataReviewDto {
|
||||
|
||||
@Schema(name = "ReviewListDto", description = "ReviewListDto")
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
public static class ReviewListDto {
|
||||
|
||||
private UUID operatorUid;
|
||||
private Long inferenceGeomUid;
|
||||
private String inspectorUid;
|
||||
private String inspectState;
|
||||
private String mapSheetNum;
|
||||
private String mapIdNm;
|
||||
private Long pnu;
|
||||
|
||||
public ReviewListDto(
|
||||
UUID operatorUid,
|
||||
Long inferenceGeomUid,
|
||||
String inspectorUid,
|
||||
String inspectState,
|
||||
String mapSheetNum,
|
||||
String mapIdNm,
|
||||
Long pnu) {
|
||||
this.operatorUid = operatorUid;
|
||||
this.inferenceGeomUid = inferenceGeomUid;
|
||||
this.inspectorUid = inspectorUid;
|
||||
this.inspectState = inspectState;
|
||||
this.mapSheetNum = mapSheetNum;
|
||||
this.mapIdNm = mapIdNm;
|
||||
this.pnu = pnu;
|
||||
}
|
||||
}
|
||||
|
||||
@Schema(name = "ReviewGeometryInfo", description = "ReviewGeometryInfo")
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
public static class ReviewGeometryInfo {
|
||||
|
||||
private UUID operatorUid;
|
||||
private Long inferenceGeomUid;
|
||||
@JsonIgnore private String geomData; // json string
|
||||
private JsonNode geom;
|
||||
private String beforeCogUrl;
|
||||
private String afterCogUrl;
|
||||
@JsonIgnore private String mapBboxString; // json string
|
||||
private JsonNode mapBbox;
|
||||
|
||||
public ReviewGeometryInfo(
|
||||
UUID operatorUid,
|
||||
Long inferenceGeomUid,
|
||||
String geomData,
|
||||
String beforeCogUrl,
|
||||
String afterCogUrl,
|
||||
String mapBboxString) {
|
||||
this.operatorUid = operatorUid;
|
||||
this.inferenceGeomUid = inferenceGeomUid;
|
||||
this.beforeCogUrl = beforeCogUrl;
|
||||
this.afterCogUrl = afterCogUrl;
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
JsonNode geomJson;
|
||||
JsonNode mapBboxJson;
|
||||
try {
|
||||
geomJson = mapper.readTree(geomData);
|
||||
mapBboxJson = mapper.readTree(mapBboxString);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
this.geom = geomJson;
|
||||
this.mapBbox = mapBboxJson;
|
||||
}
|
||||
}
|
||||
|
||||
@Schema(name = "GeoFeatureRequest", description = "검수 결과 저장")
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class GeoFeatureRequest {
|
||||
|
||||
@Schema(description = "operatorUid", example = "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02")
|
||||
private String operatorUid;
|
||||
|
||||
@Schema(description = "type", example = "Feature")
|
||||
private String type;
|
||||
|
||||
@JsonDeserialize(using = GeometryDeserializer.class)
|
||||
@Schema(
|
||||
description = "검수 결과 polygon",
|
||||
example =
|
||||
"""
|
||||
{
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
126.66292461969202,
|
||||
34.58785236216609
|
||||
],
|
||||
[
|
||||
126.66263801099049,
|
||||
34.58740117447532
|
||||
],
|
||||
[
|
||||
126.66293668521236,
|
||||
34.5873904146878
|
||||
],
|
||||
[
|
||||
126.66312820122245,
|
||||
34.587841464427825
|
||||
],
|
||||
[
|
||||
126.66289124481979,
|
||||
34.58786048381633
|
||||
],
|
||||
[
|
||||
126.66292461969202,
|
||||
34.58785236216609
|
||||
]
|
||||
]
|
||||
]
|
||||
}
|
||||
""")
|
||||
private Geometry geometry;
|
||||
|
||||
private Properties properties;
|
||||
|
||||
@Getter
|
||||
public static class Properties {
|
||||
|
||||
@Schema(description = "beforeClass", example = "WASTE")
|
||||
private String beforeClass;
|
||||
|
||||
@Schema(description = "afterClass", example = "LAND")
|
||||
private String afterClass;
|
||||
|
||||
@Schema(description = "inspectState", example = "COMPLETE")
|
||||
private String inspectState;
|
||||
|
||||
@Schema(description = "inspectMemo", example = "검수 완료")
|
||||
private String inspectMemo;
|
||||
}
|
||||
}
|
||||
|
||||
@Schema(name = "InferenceDataGeometry", description = "InferenceDataGeometry")
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
public static class InferenceDataGeometry {
|
||||
|
||||
private String type;
|
||||
@JsonIgnore private String learnGeomString;
|
||||
private JsonNode geometry;
|
||||
private InferenceProperties properties;
|
||||
|
||||
public InferenceDataGeometry(
|
||||
String type, String learnGeomString, InferenceProperties properties) {
|
||||
this.type = type;
|
||||
this.properties = properties;
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
JsonNode inferenceJson;
|
||||
try {
|
||||
inferenceJson = mapper.readTree(learnGeomString);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
this.geometry = inferenceJson;
|
||||
|
||||
if (inferenceJson.isObject()) {
|
||||
((ObjectNode) inferenceJson).remove("crs");
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class InferenceProperties {
|
||||
|
||||
@Schema(description = "beforeClass", example = "WASTE")
|
||||
private String beforeClass;
|
||||
|
||||
@Schema(description = "afterClass", example = "LAND")
|
||||
private String afterClass;
|
||||
}
|
||||
}
|
||||
|
||||
@Schema(name = "LearnDataGeometry", description = "LearnDataGeometry")
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
public static class LearnDataGeometry {
|
||||
|
||||
private String type;
|
||||
@JsonIgnore private String learnGeomString;
|
||||
private JsonNode geometry;
|
||||
private LearnProperties properties;
|
||||
|
||||
public LearnDataGeometry(String type, String learnGeomString, LearnProperties properties) {
|
||||
this.type = type;
|
||||
this.properties = properties;
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
JsonNode learnJson;
|
||||
try {
|
||||
learnJson = mapper.readTree(learnGeomString);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
this.geometry = learnJson;
|
||||
|
||||
if (learnJson.isObject()) {
|
||||
((ObjectNode) learnJson).remove("crs");
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class LearnProperties {
|
||||
|
||||
@Schema(description = "beforeClass", example = "WASTE")
|
||||
private String beforeClass;
|
||||
|
||||
@Schema(description = "afterClass", example = "LAND")
|
||||
private String afterClass;
|
||||
}
|
||||
}
|
||||
|
||||
@Schema(name = "searchReq", description = "검색 요청")
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class searchReq {
|
||||
|
||||
// 페이징 파라미터
|
||||
private int page = 0;
|
||||
private int size = 20;
|
||||
private String sort;
|
||||
|
||||
public Pageable toPageable() {
|
||||
if (sort != null && !sort.isEmpty()) {
|
||||
String[] sortParams = sort.split(",");
|
||||
String property = sortParams[0];
|
||||
Sort.Direction direction =
|
||||
sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC;
|
||||
return PageRequest.of(page, size, Sort.by(direction, property));
|
||||
}
|
||||
return PageRequest.of(page, size);
|
||||
}
|
||||
}
|
||||
|
||||
@Schema(name = "DetailRes", description = "객체 상세 정보 응답")
|
||||
@Getter
|
||||
@Setter
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class DetailRes {
|
||||
|
||||
@Schema(description = "검수 작업 ID")
|
||||
private UUID operatorUid;
|
||||
|
||||
@Schema(description = "변화탐지정보")
|
||||
private ChangeDetectionInfo changeDetectionInfo;
|
||||
|
||||
@Schema(description = "실태조사결과정보")
|
||||
private InspectionResultInfo inspectionResultInfo;
|
||||
|
||||
@Schema(description = "Geometry (GeoJSON)")
|
||||
private InferenceDataGeometry geom;
|
||||
|
||||
@Schema(description = "변화 전 COG 이미지 URL")
|
||||
private String beforeCogUrl;
|
||||
|
||||
@Schema(description = "변화 후 COG 이미지 URL")
|
||||
private String afterCogUrl;
|
||||
|
||||
@Schema(description = "도엽 bbox")
|
||||
private JsonNode mapBox;
|
||||
|
||||
@Schema(description = "검수 시 추가/수정한 폴리곤들 (여러 개 가능)")
|
||||
private List<LearnDataGeometry> learnGeometries;
|
||||
}
|
||||
|
||||
@Schema(name = "ChangeDetectionInfo", description = "변화탐지정보")
|
||||
@Getter
|
||||
@Setter
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class ChangeDetectionInfo {
|
||||
|
||||
@Schema(description = "도엽번호정보", example = "남해")
|
||||
private String mapSheetInfo;
|
||||
|
||||
@Schema(description = "변화탐지연도", example = "2022-2023")
|
||||
private String detectionYear;
|
||||
|
||||
@Schema(description = "변화 전 분류 정보")
|
||||
private ClassificationInfo beforeClass;
|
||||
|
||||
@Schema(description = "변화 후 분류 정보")
|
||||
private ClassificationInfo afterClass;
|
||||
|
||||
@Schema(description = "면적 (㎡)", example = "179.52")
|
||||
private Double area;
|
||||
|
||||
@Schema(description = "탐지정확도 (%)", example = "84.8")
|
||||
private Double detectionAccuracy;
|
||||
|
||||
@Schema(description = "PNU (필지고유번호)", example = "36221202306020")
|
||||
private Long pnu;
|
||||
}
|
||||
|
||||
@Schema(name = "ClassificationInfo", description = "분류정보")
|
||||
@Getter
|
||||
@Setter
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class ClassificationInfo {
|
||||
|
||||
@Schema(description = "분류", example = "일반토지")
|
||||
private String classification;
|
||||
|
||||
@Schema(description = "확률", example = "80.0")
|
||||
private Double probability;
|
||||
}
|
||||
|
||||
@Schema(name = "InspectionResultInfo", description = "실태조사결과정보")
|
||||
@Getter
|
||||
@Setter
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class InspectionResultInfo {
|
||||
|
||||
@Schema(description = "검증결과 (미확인/제외/완료)", example = "미확인")
|
||||
private String verificationResult;
|
||||
|
||||
@Schema(description = "부적합사유")
|
||||
private String inappropriateReason;
|
||||
|
||||
@Schema(description = "메모")
|
||||
private String memo;
|
||||
}
|
||||
|
||||
@Schema(name = "SummaryRes", description = "작업 통계 응답")
|
||||
@Getter
|
||||
@Setter
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class SummaryRes {
|
||||
|
||||
@Schema(description = "전체 배정 건수", example = "8901")
|
||||
private Long totalCnt;
|
||||
|
||||
@Schema(description = "미작업 건수 (UNCONFIRM 상태)", example = "7211")
|
||||
private Long undoneCnt;
|
||||
|
||||
@Schema(description = "오늘 완료 건수", example = "0")
|
||||
private Long todayCnt;
|
||||
}
|
||||
|
||||
@Schema(name = "DefaultPaging", description = "페이징 기본 number, uuid 전달")
|
||||
@Getter
|
||||
@Setter
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class DefaultPaging {
|
||||
|
||||
private int page;
|
||||
private UUID operatorUid;
|
||||
}
|
||||
|
||||
@Schema(name = "NewPolygonRequest", description = "새로운 polygon(들) 추가 저장")
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class NewPolygonRequest {
|
||||
|
||||
@Schema(description = "operatorUid", example = "4f9ebc8b-6635-4177-b42f-7efc9c7b4c02")
|
||||
private String operatorUid;
|
||||
|
||||
@Schema(description = "anal_uid", example = "53")
|
||||
private Long analUid;
|
||||
|
||||
@Schema(description = "map_sheet_num (도엽번호)", example = "35905086")
|
||||
private String mapSheetNum;
|
||||
|
||||
@Schema(description = "compare_yyyy (변화 전 년도)", example = "2023")
|
||||
private Integer compareYyyy;
|
||||
|
||||
@Schema(description = "target_yyyy (변화 후 년도)", example = "2024")
|
||||
private Integer targetYyyy;
|
||||
|
||||
@Schema(description = "새로 그린 polygon 리스트")
|
||||
private List<PolygonFeature> features;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class PolygonFeature {
|
||||
|
||||
@Schema(description = "type", example = "Feature")
|
||||
private String type;
|
||||
|
||||
@JsonDeserialize(using = GeometryDeserializer.class)
|
||||
@Schema(
|
||||
description = "polygon geometry",
|
||||
example =
|
||||
"""
|
||||
{
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
126.66292461969202,
|
||||
34.58785236216609
|
||||
],
|
||||
[
|
||||
126.66263801099049,
|
||||
34.58740117447532
|
||||
],
|
||||
[
|
||||
126.66293668521236,
|
||||
34.5873904146878
|
||||
],
|
||||
[
|
||||
126.66312820122245,
|
||||
34.587841464427825
|
||||
],
|
||||
[
|
||||
126.66289124481979,
|
||||
34.58786048381633
|
||||
],
|
||||
[
|
||||
126.66292461969202,
|
||||
34.58785236216609
|
||||
]
|
||||
]
|
||||
]
|
||||
}
|
||||
""")
|
||||
private Geometry geometry;
|
||||
|
||||
@Schema(description = "polygon properties")
|
||||
private PolygonProperties properties;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class PolygonProperties {
|
||||
|
||||
@Schema(description = "beforeClass", example = "WASTE")
|
||||
private String beforeClass;
|
||||
|
||||
@Schema(description = "afterClass", example = "LAND")
|
||||
private String afterClass;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Schema(name = "CogImageRequest", description = "COG 이미지 조회 요청")
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class CogImageRequest {
|
||||
|
||||
@Schema(description = "map_sheet_num (도엽번호)", example = "NI52-3-13-1", required = true)
|
||||
private String mapSheetNum;
|
||||
|
||||
@Schema(description = "year (년도)", example = "2022", required = true)
|
||||
private Integer year;
|
||||
}
|
||||
|
||||
@Schema(name = "CogImageResponse", description = "COG 이미지 URL 응답")
|
||||
@Getter
|
||||
@Setter
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class CogImageResponse {
|
||||
|
||||
@Schema(description = "변화 전 COG 이미지 URL")
|
||||
private String beforeCogUrl;
|
||||
|
||||
@Schema(description = "변화 후 COG 이미지 URL")
|
||||
private String afterCogUrl;
|
||||
|
||||
@Schema(description = "변화 전 년도")
|
||||
private Integer beforeYear;
|
||||
|
||||
@Schema(description = "변화 후 년도")
|
||||
private Integer afterYear;
|
||||
|
||||
@Schema(description = "도엽번호")
|
||||
private String mapSheetNum;
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ package com.kamco.cd.kamcoback.trainingdata.service;
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto.ApiResponseCode;
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto.ResponseObj;
|
||||
import com.kamco.cd.kamcoback.postgres.core.TrainingDataLabelCoreService;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.DefaultPaging;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.DetailRes;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.GeoFeatureRequest;
|
||||
@@ -87,4 +88,30 @@ public class TrainingDataLabelService {
|
||||
public DefaultPaging getDefaultPagingNumber(String userId, Integer size, String assignmentUid) {
|
||||
return trainingDataLabelCoreService.getDefaultPagingNumber(userId, size, assignmentUid);
|
||||
}
|
||||
|
||||
/**
|
||||
* 새로운 polygon(들) 추가 저장
|
||||
*
|
||||
* @param request 새 polygon 정보 (여러 개 가능)
|
||||
* @return 응답 메시지
|
||||
*/
|
||||
@Transactional
|
||||
public ResponseObj saveNewPolygon(TrainingDataLabelDto.NewPolygonRequest request) {
|
||||
trainingDataLabelCoreService.saveNewPolygon(request);
|
||||
int count = request.getFeatures() != null ? request.getFeatures().size() : 0;
|
||||
return new ResponseObj(ApiResponseCode.OK, count + "개의 polygon이 저장되었습니다.");
|
||||
}
|
||||
|
||||
/**
|
||||
* COG 이미지 URL 조회 (변화 전/후)
|
||||
*
|
||||
* @param mapSheetNum 도엽번호
|
||||
* @param beforeYear 변화 전 년도
|
||||
* @param afterYear 변화 후 년도
|
||||
* @return 변화 전/후 COG 이미지 URL
|
||||
*/
|
||||
public TrainingDataLabelDto.CogImageResponse getCogImageUrl(
|
||||
String mapSheetNum, Integer beforeYear, Integer afterYear) {
|
||||
return trainingDataLabelCoreService.getCogImageUrl(mapSheetNum, beforeYear, afterYear);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
package com.kamco.cd.kamcoback.trainingdata.service;
|
||||
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto.ApiResponseCode;
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto.ResponseObj;
|
||||
import com.kamco.cd.kamcoback.postgres.core.TrainingDataReviewCoreService;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DefaultPaging;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DetailRes;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.GeoFeatureRequest;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewGeometryInfo;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewListDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.SummaryRes;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.searchReq;
|
||||
import jakarta.transaction.Transactional;
|
||||
import java.util.UUID;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
public class TrainingDataReviewService {
|
||||
|
||||
private final TrainingDataReviewCoreService trainingDataReviewCoreService;
|
||||
|
||||
public TrainingDataReviewService(TrainingDataReviewCoreService trainingDataReviewCoreService) {
|
||||
this.trainingDataReviewCoreService = trainingDataReviewCoreService;
|
||||
}
|
||||
|
||||
public Page<ReviewListDto> findReviewAssignedList(searchReq searchReq, String userId) {
|
||||
return trainingDataReviewCoreService.findReviewAssignedList(searchReq, userId);
|
||||
}
|
||||
|
||||
public ReviewGeometryInfo findReviewAssignedGeom(String operatorUid) {
|
||||
return trainingDataReviewCoreService.findReviewAssignedGeom(operatorUid);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public ResponseObj saveReviewFeature(GeoFeatureRequest request) {
|
||||
String status = "";
|
||||
String operatorUid = request.getOperatorUid();
|
||||
Long inferenceGeomUid = trainingDataReviewCoreService.findReviewOperatorGeoUid(operatorUid);
|
||||
|
||||
if (request.getGeometry() == null || request.getGeometry().isEmpty()) {
|
||||
// EXCEPT 상태만 업데이트
|
||||
status = "EXCEPT";
|
||||
trainingDataReviewCoreService.updateReviewStateOperator(
|
||||
operatorUid, status, request.getProperties().getInspectMemo());
|
||||
trainingDataReviewCoreService.updateReviewExceptState(inferenceGeomUid, status);
|
||||
} else {
|
||||
status = "COMPLETE";
|
||||
trainingDataReviewCoreService.updateReviewStateOperator(
|
||||
operatorUid, status, request.getProperties().getInspectMemo());
|
||||
trainingDataReviewCoreService.updateReviewPolygonClass(
|
||||
inferenceGeomUid, request.getGeometry(), request.getProperties(), status);
|
||||
}
|
||||
return new ResponseObj(ApiResponseCode.OK, "저장되었습니다.");
|
||||
}
|
||||
|
||||
/**
|
||||
* 검수자별 작업 통계 조회
|
||||
*
|
||||
* @param userId 검수자 사번
|
||||
* @return 전체/미작업/Today 건수
|
||||
*/
|
||||
public SummaryRes getSummary(String userId) {
|
||||
try {
|
||||
System.out.println("[Service] getSummary called with userId: " + userId);
|
||||
SummaryRes result = trainingDataReviewCoreService.getSummary(userId);
|
||||
System.out.println("[Service] getSummary result: " + result);
|
||||
return result;
|
||||
} catch (Exception e) {
|
||||
System.err.println("[Service] getSummary ERROR: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
// 예외 발생 시에도 빈 통계 반환
|
||||
return SummaryRes.builder().totalCnt(0L).undoneCnt(0L).todayCnt(0L).build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 검수 작업 상세 정보 조회
|
||||
*
|
||||
* @param operatorUid 검수 작업 ID
|
||||
* @return 변화탐지정보 + 실태조사결과정보
|
||||
*/
|
||||
public DetailRes getDetail(UUID operatorUid) {
|
||||
return trainingDataReviewCoreService.getDetail(operatorUid);
|
||||
}
|
||||
|
||||
public DefaultPaging getDefaultPagingNumber(String userId, Integer size, String operatorUid) {
|
||||
return trainingDataReviewCoreService.getDefaultPagingNumber(userId, size, operatorUid);
|
||||
}
|
||||
|
||||
/**
|
||||
* 새로운 polygon(들) 추가 저장
|
||||
*
|
||||
* @param request 새 polygon 정보 (여러 개 가능)
|
||||
* @return 응답 메시지
|
||||
*/
|
||||
@Transactional
|
||||
public ResponseObj saveNewPolygon(TrainingDataReviewDto.NewPolygonRequest request) {
|
||||
trainingDataReviewCoreService.saveNewPolygon(request);
|
||||
int count = request.getFeatures() != null ? request.getFeatures().size() : 0;
|
||||
return new ResponseObj(ApiResponseCode.OK, count + "개의 polygon이 저장되었습니다.");
|
||||
}
|
||||
|
||||
/**
|
||||
* COG 이미지 URL 조회 (변화 전/후)
|
||||
*
|
||||
* @param mapSheetNum 도엽번호
|
||||
* @param beforeYear 변화 전 년도
|
||||
* @param afterYear 변화 후 년도
|
||||
* @return 변화 전/후 COG 이미지 URL
|
||||
*/
|
||||
public TrainingDataReviewDto.CogImageResponse getCogImageUrl(
|
||||
String mapSheetNum, Integer beforeYear, Integer afterYear) {
|
||||
return trainingDataReviewCoreService.getCogImageUrl(mapSheetNum, beforeYear, afterYear);
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,7 @@ spring:
|
||||
jpa:
|
||||
show-sql: false
|
||||
hibernate:
|
||||
ddl-auto: validate # 로컬만 완화(시킬려면 update으로 변경)
|
||||
ddl-auto: update # 로컬만 완화(시킬려면 update으로 변경)
|
||||
properties:
|
||||
hibernate:
|
||||
default_batch_fetch_size: 100 # ✅ 성능 - N+1 쿼리 방지
|
||||
|
||||
10540
src/main/resources/db/migration/dump-kamco_cds-202601132249.sql
Normal file
10540
src/main/resources/db/migration/dump-kamco_cds-202601132249.sql
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user