Rviewer add

This commit is contained in:
DanielLee
2026-01-14 09:42:08 +09:00
parent 56e7866d4f
commit b918ad14c4
16 changed files with 13017 additions and 8 deletions

View File

@@ -1,6 +1,7 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.postgres.repository.trainingdata.TrainingDataLabelRepository;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.DefaultPaging;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.DetailRes;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.GeoFeatureRequest.Properties;
@@ -79,4 +80,13 @@ public class TrainingDataLabelCoreService {
public DefaultPaging getDefaultPagingNumber(String userId, Integer size, String assignmentUid) {
return trainingDataLabelRepository.getDefaultPagingNumber(userId, size, assignmentUid);
}
public void saveNewPolygon(TrainingDataLabelDto.NewPolygonRequest request) {
trainingDataLabelRepository.saveNewPolygon(request);
}
public TrainingDataLabelDto.CogImageResponse getCogImageUrl(
String mapSheetNum, Integer beforeYear, Integer afterYear) {
return trainingDataLabelRepository.getCogImageUrl(mapSheetNum, beforeYear, afterYear);
}
}

View File

@@ -0,0 +1,92 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.postgres.repository.trainingdata.TrainingDataReviewRepository;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DefaultPaging;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DetailRes;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.GeoFeatureRequest.Properties;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewGeometryInfo;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewListDto;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.SummaryRes;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.searchReq;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.locationtech.jts.geom.Geometry;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class TrainingDataReviewCoreService {
private final TrainingDataReviewRepository trainingDataReviewRepository;
public Page<ReviewListDto> findReviewAssignedList(searchReq searchReq, String userId) {
return trainingDataReviewRepository.findReviewAssignedList(searchReq, userId);
}
public ReviewGeometryInfo findReviewAssignedGeom(String operatorUid) {
return trainingDataReviewRepository.findReviewAssignedGeom(operatorUid);
}
public Long findReviewOperatorGeoUid(String operatorUid) {
return trainingDataReviewRepository.findReviewOperatorGeoUid(operatorUid);
}
public void updateReviewStateOperator(String operatorUid, String status, String memo) {
trainingDataReviewRepository.updateReviewStateOperator(operatorUid, status, memo);
}
public void updateReviewExceptState(Long inferenceGeomUid, String status) {
trainingDataReviewRepository.updateReviewExceptState(inferenceGeomUid, status);
}
public void updateReviewPolygonClass(
Long inferenceGeomUid, Geometry geometry, Properties properties, String status) {
trainingDataReviewRepository.updateReviewPolygonClass(
inferenceGeomUid, geometry, properties, status);
}
/**
* 검수자별 작업 통계 조회
*
* @param userId 검수자 사번
* @return 전체/미작업/Today 건수
*/
public SummaryRes getSummary(String userId) {
try {
System.out.println("[CoreService] getSummary called with userId: " + userId);
SummaryRes result = trainingDataReviewRepository.getSummary(userId);
System.out.println("[CoreService] getSummary result: " + result);
return result;
} catch (Exception e) {
System.err.println("[CoreService] getSummary ERROR: " + e.getMessage());
e.printStackTrace();
// 예외 발생 시에도 빈 통계 반환
return SummaryRes.builder().totalCnt(0L).undoneCnt(0L).todayCnt(0L).build();
}
}
/**
* 검수 작업 상세 정보 조회
*
* @param operatorUid 검수 작업 ID
* @return 변화탐지정보 + 실태조사결과정보
*/
public DetailRes getDetail(UUID operatorUid) {
return trainingDataReviewRepository.getDetail(operatorUid);
}
public DefaultPaging getDefaultPagingNumber(String userId, Integer size, String operatorUid) {
return trainingDataReviewRepository.getDefaultPagingNumber(userId, size, operatorUid);
}
public void saveNewPolygon(TrainingDataReviewDto.NewPolygonRequest request) {
trainingDataReviewRepository.saveNewPolygon(request);
}
public TrainingDataReviewDto.CogImageResponse getCogImageUrl(
String mapSheetNum, Integer beforeYear, Integer afterYear) {
return trainingDataReviewRepository.getCogImageUrl(mapSheetNum, beforeYear, afterYear);
}
}

View File

@@ -1,5 +1,6 @@
package com.kamco.cd.kamcoback.postgres.repository.trainingdata;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.DefaultPaging;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.DetailRes;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.GeoFeatureRequest.Properties;
@@ -31,4 +32,9 @@ public interface TrainingDataLabelRepositoryCustom {
DetailRes getDetail(UUID assignmentUid);
DefaultPaging getDefaultPagingNumber(String userId, Integer size, String assignmentUid);
void saveNewPolygon(TrainingDataLabelDto.NewPolygonRequest request);
TrainingDataLabelDto.CogImageResponse getCogImageUrl(
String mapSheetNum, Integer beforeYear, Integer afterYear);
}

View File

@@ -11,6 +11,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState;
import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.ChangeDetectionInfo;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.ClassificationInfo;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataLabelDto.DefaultPaging;
@@ -519,8 +520,8 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
}
}
// 7. 라벨링 저장한 Geometry GeoJSON으로 변환
LearnDataGeometry learnData =
// 7. 라벨링 저장한 Geometry들을 GeoJSON으로 변환 (여러 개 가능)
List<LearnDataGeometry> learnDataList =
queryFactory
.select(
Projections.constructor(
@@ -536,7 +537,7 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
.where(
mapSheetLearnDataGeomEntity.geoUid.eq(
mapSheetAnalDataInferenceGeomEntityEntity.getGeoUid()))
.fetchOne();
.fetch(); // fetchOne() -> fetch()로 변경
return DetailRes.builder()
.assignmentUid(assignmentUid)
@@ -546,7 +547,7 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
.beforeCogUrl(beforeCogUrl)
.afterCogUrl(afterCogUrl)
.mapBox(mapBbox)
.learnGeometry(learnData)
.learnGeometries(learnDataList) // learnGeometry -> learnGeometries
.build();
} catch (Exception e) {
@@ -624,6 +625,165 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
return DefaultPaging.builder().page(page).assignmentUid(firstAssignedUid).build();
}
@Override
public void saveNewPolygon(TrainingDataLabelDto.NewPolygonRequest request) {
try {
if (request.getFeatures() == null || request.getFeatures().isEmpty()) {
throw new RuntimeException("No polygons to save");
}
System.out.println(
"Saving "
+ request.getFeatures().size()
+ " new polygon(s) for mapSheetNum: "
+ request.getMapSheetNum());
int savedCount = 0;
for (TrainingDataLabelDto.NewPolygonRequest.PolygonFeature feature : request.getFeatures()) {
try {
// 1. map_sheet_anal_data_inference_geom 테이블에 새 polygon 삽입
queryFactory
.insert(mapSheetAnalDataInferenceGeomEntity)
.columns(
mapSheetAnalDataInferenceGeomEntity.geom,
mapSheetAnalDataInferenceGeomEntity.compareYyyy,
mapSheetAnalDataInferenceGeomEntity.targetYyyy,
mapSheetAnalDataInferenceGeomEntity.classBeforeCd,
mapSheetAnalDataInferenceGeomEntity.classAfterCd,
mapSheetAnalDataInferenceGeomEntity.createdDttm,
mapSheetAnalDataInferenceGeomEntity.updatedDttm,
mapSheetAnalDataInferenceGeomEntity.labelState)
.values(
feature.getGeometry(),
request.getCompareYyyy(),
request.getTargetYyyy(),
feature.getProperties().getBeforeClass().toLowerCase(),
feature.getProperties().getAfterClass().toLowerCase(),
ZonedDateTime.now(),
ZonedDateTime.now(),
"DONE")
.execute();
// 2. 생성된 geoUid 조회
Long geoUid =
queryFactory
.select(mapSheetAnalDataInferenceGeomEntity.geoUid)
.from(mapSheetAnalDataInferenceGeomEntity)
.where(
mapSheetAnalDataInferenceGeomEntity.geom.eq(feature.getGeometry()),
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(request.getCompareYyyy()),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(request.getTargetYyyy()))
.orderBy(mapSheetAnalDataInferenceGeomEntity.geoUid.desc())
.fetchFirst();
if (geoUid == null) {
System.err.println("Failed to get geo_uid for polygon #" + (savedCount + 1));
continue;
}
// 3. learn_data_geom 테이블에도 삽입
queryFactory
.insert(mapSheetLearnDataGeomEntity)
.columns(
mapSheetLearnDataGeomEntity.geoUid,
mapSheetLearnDataGeomEntity.afterYyyy,
mapSheetLearnDataGeomEntity.beforeYyyy,
mapSheetLearnDataGeomEntity.classAfterCd,
mapSheetLearnDataGeomEntity.classBeforeCd,
mapSheetLearnDataGeomEntity.geom,
mapSheetLearnDataGeomEntity.createdDate,
mapSheetLearnDataGeomEntity.modifiedDate)
.values(
geoUid,
request.getTargetYyyy(),
request.getCompareYyyy(),
feature.getProperties().getAfterClass().toLowerCase(),
feature.getProperties().getBeforeClass().toLowerCase(),
feature.getGeometry(),
ZonedDateTime.now(),
ZonedDateTime.now())
.execute();
savedCount++;
System.out.println(
"Successfully saved polygon #" + savedCount + " with geo_uid: " + geoUid);
} catch (Exception e) {
System.err.println("Error saving polygon #" + (savedCount + 1) + ": " + e.getMessage());
// 개별 polygon 저장 실패해도 계속 진행
}
}
if (savedCount == 0) {
throw new RuntimeException("Failed to save any polygons");
}
System.out.println(
"Successfully saved " + savedCount + "/" + request.getFeatures().size() + " polygon(s)");
} catch (Exception e) {
System.err.println("saveNewPolygon Error: " + e.getMessage());
e.printStackTrace();
throw new RuntimeException("Failed to save new polygons", e);
}
}
@Override
public TrainingDataLabelDto.CogImageResponse getCogImageUrl(
String mapSheetNum, Integer beforeYear, Integer afterYear) {
try {
// beforeYear COG URL 조회
String beforeCogUrl =
queryFactory
.select(
Expressions.stringTemplate(
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
.from(imageryEntity)
.where(imageryEntity.scene5k.eq(mapSheetNum), imageryEntity.year.eq(beforeYear))
.fetchFirst();
// afterYear COG URL 조회
String afterCogUrl =
queryFactory
.select(
Expressions.stringTemplate(
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
.from(imageryEntity)
.where(imageryEntity.scene5k.eq(mapSheetNum), imageryEntity.year.eq(afterYear))
.fetchFirst();
if (beforeCogUrl == null && afterCogUrl == null) {
throw new RuntimeException(
"COG images not found for mapSheetNum: "
+ mapSheetNum
+ ", years: "
+ beforeYear
+ ", "
+ afterYear);
}
return TrainingDataLabelDto.CogImageResponse.builder()
.beforeCogUrl(beforeCogUrl != null ? beforeCogUrl : "")
.afterCogUrl(afterCogUrl != null ? afterCogUrl : "")
.beforeYear(beforeYear)
.afterYear(afterYear)
.mapSheetNum(mapSheetNum)
.build();
} catch (Exception e) {
System.err.println("getCogImageUrl Error: " + e.getMessage());
e.printStackTrace();
throw new RuntimeException(
"Failed to get COG image URLs for mapSheetNum: "
+ mapSheetNum
+ ", years: "
+ beforeYear
+ ", "
+ afterYear,
e);
}
}
private StringExpression makeCogUrl(NumberPath<Integer> year) {
return new CaseBuilder()
.when(imageryEntity.year.eq(year))

View File

@@ -0,0 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.trainingdata;
import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface TrainingDataReviewRepository
extends JpaRepository<LabelingAssignmentEntity, Long>, TrainingDataReviewRepositoryCustom {}

View File

@@ -0,0 +1,40 @@
package com.kamco.cd.kamcoback.postgres.repository.trainingdata;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DefaultPaging;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DetailRes;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.GeoFeatureRequest.Properties;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewGeometryInfo;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewListDto;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.SummaryRes;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.searchReq;
import java.util.UUID;
import org.locationtech.jts.geom.Geometry;
import org.springframework.data.domain.Page;
public interface TrainingDataReviewRepositoryCustom {
Page<ReviewListDto> findReviewAssignedList(searchReq searchReq, String userId);
ReviewGeometryInfo findReviewAssignedGeom(String operatorUid);
Long findReviewOperatorGeoUid(String operatorUid);
void updateReviewStateOperator(String operatorUid, String status, String memo);
void updateReviewExceptState(Long inferenceGeomUid, String status);
void updateReviewPolygonClass(
Long inferenceGeomUid, Geometry geometry, Properties properties, String status);
SummaryRes getSummary(String userId);
DetailRes getDetail(UUID operatorUid);
DefaultPaging getDefaultPagingNumber(String userId, Integer size, String operatorUid);
void saveNewPolygon(TrainingDataReviewDto.NewPolygonRequest request);
TrainingDataReviewDto.CogImageResponse getCogImageUrl(
String mapSheetNum, Integer beforeYear, Integer afterYear);
}

View File

@@ -0,0 +1,842 @@
package com.kamco.cd.kamcoback.postgres.repository.trainingdata;
import static com.kamco.cd.kamcoback.postgres.entity.QImageryEntity.imageryEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity.mapInkx5kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnDataGeomEntity.mapSheetLearnDataGeomEntity;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InspectState;
import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ChangeDetectionInfo;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ClassificationInfo;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DefaultPaging;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.DetailRes;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.GeoFeatureRequest.Properties;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.InferenceDataGeometry;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.InferenceDataGeometry.InferenceProperties;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.InspectionResultInfo;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.LearnDataGeometry;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.LearnDataGeometry.LearnProperties;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewGeometryInfo;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewListDto;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.SummaryRes;
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.searchReq;
import com.querydsl.core.Tuple;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberPath;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityNotFoundException;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import lombok.extern.slf4j.Slf4j;
import org.locationtech.jts.geom.Geometry;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
@Slf4j
public class TrainingDataReviewRepositoryImpl extends QuerydslRepositorySupport
implements TrainingDataReviewRepositoryCustom {
private final JPAQueryFactory queryFactory;
public TrainingDataReviewRepositoryImpl(JPAQueryFactory queryFactory) {
super(LabelingAssignmentEntity.class);
this.queryFactory = queryFactory;
}
@Override
public Page<ReviewListDto> findReviewAssignedList(searchReq searchReq, String userId) {
// 완료된 검수는 오늘만, 나머지는 전체 조회
LocalDate today = LocalDate.now(ZoneId.of("Asia/Seoul"));
ZonedDateTime start = today.atStartOfDay(ZoneId.of("Asia/Seoul"));
ZonedDateTime end = start.plusDays(1);
BooleanExpression doneToday =
labelingAssignmentEntity
.inspectState
.eq(InspectState.COMPLETE.getId())
.and(labelingAssignmentEntity.inspectStatDttm.goe(start))
.and(labelingAssignmentEntity.inspectStatDttm.lt(end));
BooleanExpression unconfirmOrExcept =
labelingAssignmentEntity.inspectState.in(
InspectState.EXCEPT.getId(), InspectState.UNCONFIRM.getId());
BooleanExpression dayStateCondition = doneToday.or(unconfirmOrExcept);
Pageable pageable = PageRequest.of(searchReq.getPage(), searchReq.getSize());
List<ReviewListDto> list =
queryFactory
.select(
Projections.constructor(
ReviewListDto.class,
labelingAssignmentEntity.assignmentUid,
labelingAssignmentEntity.inferenceGeomUid,
labelingAssignmentEntity.inspectorUid,
labelingAssignmentEntity.inspectState,
labelingAssignmentEntity.assignGroupId,
mapInkx5kEntity.mapidNm,
mapSheetAnalDataInferenceGeomEntity.pnu))
.from(labelingAssignmentEntity)
.innerJoin(mapSheetAnalDataInferenceGeomEntity)
.on(
labelingAssignmentEntity.inferenceGeomUid.eq(
mapSheetAnalDataInferenceGeomEntity.geoUid))
.innerJoin(mapInkx5kEntity)
.on(labelingAssignmentEntity.assignGroupId.eq(mapInkx5kEntity.mapidcdNo))
.where(
labelingAssignmentEntity.inspectorUid.eq(userId),
dayStateCondition,
labelingAssignmentEntity.workState.eq("DONE")) // 라벨링 완료된 것만 검수 대상
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(
labelingAssignmentEntity.createdDate.asc(),
labelingAssignmentEntity.inferenceGeomUid.asc())
.fetch();
Long count =
Optional.ofNullable(
queryFactory
.select(labelingAssignmentEntity.assignmentUid.count())
.from(labelingAssignmentEntity)
.innerJoin(mapSheetAnalDataInferenceGeomEntity)
.on(
labelingAssignmentEntity.inferenceGeomUid.eq(
mapSheetAnalDataInferenceGeomEntity.geoUid))
.innerJoin(mapInkx5kEntity)
.on(labelingAssignmentEntity.assignGroupId.eq(mapInkx5kEntity.mapidcdNo))
.where(
labelingAssignmentEntity.inspectorUid.eq(userId),
dayStateCondition,
labelingAssignmentEntity.workState.eq("DONE"))
.fetchOne())
.orElse(0L);
return new PageImpl<>(list, pageable, count);
}
@Override
public ReviewGeometryInfo findReviewAssignedGeom(String operatorUid) {
return queryFactory
.select(
Projections.constructor(
ReviewGeometryInfo.class,
labelingAssignmentEntity.assignmentUid,
labelingAssignmentEntity.inferenceGeomUid,
Expressions.stringTemplate(
"ST_AsGeoJSON({0})", mapSheetAnalDataInferenceGeomEntity.geom),
makeCogUrl(mapSheetAnalDataInferenceGeomEntity.compareYyyy)
.max()
.as("beforeCogUrl"),
makeCogUrl(mapSheetAnalDataInferenceGeomEntity.targetYyyy).max().as("afterCogUrl"),
Expressions.stringTemplate("ST_AsGeoJSON({0})", mapInkx5kEntity.geom)))
.from(labelingAssignmentEntity)
.innerJoin(mapSheetAnalDataInferenceGeomEntity)
.on(
labelingAssignmentEntity.inferenceGeomUid.eq(
mapSheetAnalDataInferenceGeomEntity.geoUid))
.innerJoin(mapInkx5kEntity)
.on(labelingAssignmentEntity.assignGroupId.eq(mapInkx5kEntity.mapidcdNo))
.leftJoin(imageryEntity)
.on(
imageryEntity
.scene5k
.eq(labelingAssignmentEntity.assignGroupId)
.and(
imageryEntity
.year
.eq(mapSheetAnalDataInferenceGeomEntity.compareYyyy)
.or(imageryEntity.year.eq(mapSheetAnalDataInferenceGeomEntity.targetYyyy))))
.where(labelingAssignmentEntity.assignmentUid.eq(UUID.fromString(operatorUid)))
.groupBy(
labelingAssignmentEntity.assignmentUid,
labelingAssignmentEntity.inferenceGeomUid,
labelingAssignmentEntity.inspectorUid,
labelingAssignmentEntity.inspectState,
labelingAssignmentEntity.assignGroupId,
mapInkx5kEntity.mapidNm,
mapSheetAnalDataInferenceGeomEntity.pnu,
mapSheetAnalDataInferenceGeomEntity.geom,
mapInkx5kEntity.geom)
.fetchOne();
}
@Override
public Long findReviewOperatorGeoUid(String operatorUid) {
return queryFactory
.select(labelingAssignmentEntity.inferenceGeomUid)
.from(labelingAssignmentEntity)
.where(labelingAssignmentEntity.assignmentUid.eq(UUID.fromString(operatorUid)))
.fetchOne();
}
@Override
public void updateReviewStateOperator(String operatorUid, String status, String memo) {
queryFactory
.update(labelingAssignmentEntity)
.set(labelingAssignmentEntity.inspectState, status)
.set(labelingAssignmentEntity.inspectStatDttm, ZonedDateTime.now())
.where(labelingAssignmentEntity.assignmentUid.eq(UUID.fromString(operatorUid)))
.execute();
}
@Override
public void updateReviewExceptState(Long inferenceGeomUid, String status) {
// 검수 제외 시 assignment 테이블만 업데이트
// (inference_geom 테이블에는 inspect_state 컬럼이 없음)
}
@Override
public void updateReviewPolygonClass(
Long inferenceGeomUid, Geometry geometry, Properties properties, String status) {
// inference_geom 테이블 정보 가져오기
MapSheetAnalDataInferenceGeomEntity entity =
queryFactory
.selectFrom(mapSheetAnalDataInferenceGeomEntity)
.where(mapSheetAnalDataInferenceGeomEntity.geoUid.eq(inferenceGeomUid))
.fetchOne();
if (Objects.isNull(entity)) {
throw new EntityNotFoundException(
"MapSheetAnalDataInferenceGeomEntity not found for inferenceGeomUid: "
+ inferenceGeomUid);
}
// learn_data_geom 업데이트 또는 insert (검수 완료된 폴리곤 수정)
Long existingLearnGeomUid =
queryFactory
.select(mapSheetLearnDataGeomEntity.geoUid)
.from(mapSheetLearnDataGeomEntity)
.where(mapSheetLearnDataGeomEntity.geoUid.eq(inferenceGeomUid))
.fetchOne();
if (existingLearnGeomUid != null) {
// 기존 데이터 업데이트
queryFactory
.update(mapSheetLearnDataGeomEntity)
.set(mapSheetLearnDataGeomEntity.classAfterCd, properties.getAfterClass().toLowerCase())
.set(mapSheetLearnDataGeomEntity.classBeforeCd, properties.getBeforeClass().toLowerCase())
.set(mapSheetLearnDataGeomEntity.geom, geometry)
.set(mapSheetLearnDataGeomEntity.modifiedDate, ZonedDateTime.now())
.where(mapSheetLearnDataGeomEntity.geoUid.eq(inferenceGeomUid))
.execute();
} else {
// 새로운 데이터 insert
queryFactory
.insert(mapSheetLearnDataGeomEntity)
.columns(
mapSheetLearnDataGeomEntity.geoUid,
mapSheetLearnDataGeomEntity.afterYyyy,
mapSheetLearnDataGeomEntity.beforeYyyy,
mapSheetLearnDataGeomEntity.classAfterCd,
mapSheetLearnDataGeomEntity.classBeforeCd,
mapSheetLearnDataGeomEntity.geom,
mapSheetLearnDataGeomEntity.createdDate,
mapSheetLearnDataGeomEntity.modifiedDate)
.values(
inferenceGeomUid,
entity.getTargetYyyy(),
entity.getCompareYyyy(),
properties.getAfterClass().toLowerCase(),
properties.getBeforeClass().toLowerCase(),
geometry,
ZonedDateTime.now(),
ZonedDateTime.now())
.execute();
}
}
@Override
public SummaryRes getSummary(String userId) {
// 기본값 설정
Long totalCnt = 0L;
Long undoneCnt = 0L;
Long todayCnt = 0L;
try {
System.out.println("=== getSummary START (Review) ===");
System.out.println("userId: " + userId);
// 1. 전체 배정 건수 (라벨링 완료된 것만)
try {
Long result =
queryFactory
.select(labelingAssignmentEntity.count())
.from(labelingAssignmentEntity)
.where(
labelingAssignmentEntity.inspectorUid.eq(userId),
labelingAssignmentEntity.workState.eq("DONE"))
.fetchOne();
totalCnt = (result != null) ? result : 0L;
System.out.println("totalCnt: " + totalCnt);
} catch (Exception e) {
System.err.println(
"totalCnt query error: " + e.getClass().getName() + " - " + e.getMessage());
if (e.getCause() != null) {
System.err.println("Caused by: " + e.getCause().getMessage());
}
totalCnt = 0L;
}
// 2. 미작업 건수 (UNCONFIRM 상태)
try {
Long result =
queryFactory
.select(labelingAssignmentEntity.count())
.from(labelingAssignmentEntity)
.where(
labelingAssignmentEntity.inspectorUid.eq(userId),
labelingAssignmentEntity.workState.eq("DONE"),
labelingAssignmentEntity.inspectState.eq("UNCONFIRM"))
.fetchOne();
undoneCnt = (result != null) ? result : 0L;
System.out.println("undoneCnt: " + undoneCnt);
} catch (Exception e) {
System.err.println(
"undoneCnt query error: " + e.getClass().getName() + " - " + e.getMessage());
if (e.getCause() != null) {
System.err.println("Caused by: " + e.getCause().getMessage());
}
undoneCnt = 0L;
}
// 3. 오늘 완료 건수
try {
// 오늘 날짜의 시작과 끝 시간 계산
ZonedDateTime startOfToday = LocalDate.now().atStartOfDay(java.time.ZoneId.systemDefault());
ZonedDateTime endOfToday = startOfToday.plusDays(1);
System.out.println("startOfToday: " + startOfToday);
System.out.println("endOfToday: " + endOfToday);
Long result =
queryFactory
.select(labelingAssignmentEntity.count())
.from(labelingAssignmentEntity)
.where(
labelingAssignmentEntity.inspectorUid.eq(userId),
labelingAssignmentEntity.inspectState.eq("COMPLETE"),
labelingAssignmentEntity.inspectStatDttm.isNotNull(),
labelingAssignmentEntity.inspectStatDttm.goe(startOfToday),
labelingAssignmentEntity.inspectStatDttm.lt(endOfToday))
.fetchOne();
todayCnt = (result != null) ? result : 0L;
System.out.println("todayCnt: " + todayCnt);
} catch (Exception e) {
System.err.println(
"todayCnt query error: " + e.getClass().getName() + " - " + e.getMessage());
if (e.getCause() != null) {
System.err.println("Caused by: " + e.getCause().getMessage());
}
todayCnt = 0L;
}
System.out.println("=== getSummary END (Review) ===");
System.out.println(
"Final result - totalCnt: "
+ totalCnt
+ ", undoneCnt: "
+ undoneCnt
+ ", todayCnt: "
+ todayCnt);
} catch (Exception e) {
// 최상위 예외 처리
System.err.println("=== getSummary OUTER ERROR (Review) ===");
System.err.println("Error: " + e.getClass().getName() + " - " + e.getMessage());
if (e.getCause() != null) {
System.err.println("Caused by: " + e.getCause().getMessage());
}
e.printStackTrace();
}
// 항상 정상 응답 반환 (예외를 throw하지 않음)
return SummaryRes.builder().totalCnt(totalCnt).undoneCnt(undoneCnt).todayCnt(todayCnt).build();
}
@Override
public DetailRes getDetail(UUID operatorUid) {
try {
// 1. 작업 배정 정보 조회
var assignment =
queryFactory
.selectFrom(labelingAssignmentEntity)
.where(labelingAssignmentEntity.assignmentUid.eq(operatorUid))
.fetchOne();
if (assignment == null) {
throw new RuntimeException("Assignment not found: " + operatorUid);
}
// 2. 추론 결과 정보 조회
var mapSheetAnalDataInferenceGeomEntityEntity =
queryFactory
.selectFrom(mapSheetAnalDataInferenceGeomEntity)
.where(
mapSheetAnalDataInferenceGeomEntity.geoUid.eq(
assignment.toDto().getInferenceGeomUid()))
.fetchOne();
if (mapSheetAnalDataInferenceGeomEntityEntity == null) {
throw new RuntimeException(
"Inference geometry not found: " + assignment.toDto().getInferenceGeomUid());
}
// 3. 도엽 정보 조회
var mapSheetEntity =
queryFactory
.selectFrom(mapInkx5kEntity)
.where(mapInkx5kEntity.mapidcdNo.eq(assignment.toDto().getAssignGroupId()))
.fetchOne();
// 4. COG URL 조회 - imagery만 사용
String beforeCogUrl = "";
String afterCogUrl = "";
try {
var beforeImagery =
queryFactory
.select(
Expressions.stringTemplate(
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
.from(imageryEntity)
.where(
imageryEntity.scene5k.eq(assignment.toDto().getAssignGroupId()),
imageryEntity.year.eq(
mapSheetAnalDataInferenceGeomEntityEntity.getCompareYyyy()))
.fetchFirst();
beforeCogUrl = beforeImagery != null ? beforeImagery : "";
var afterImagery =
queryFactory
.select(
Expressions.stringTemplate(
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
.from(imageryEntity)
.where(
imageryEntity.scene5k.eq(assignment.toDto().getAssignGroupId()),
imageryEntity.year.eq(
mapSheetAnalDataInferenceGeomEntityEntity.getTargetYyyy()))
.fetchFirst();
afterCogUrl = afterImagery != null ? afterImagery : "";
} catch (Exception e) {
System.err.println("COG URL retrieval error: " + e.getMessage());
// COG URL 조회 실패 시 빈 문자열 유지
}
// 5. DTO 생성
var changeDetectionInfo =
ChangeDetectionInfo.builder()
.mapSheetInfo(mapSheetEntity != null ? mapSheetEntity.getMapidNm() : "")
.detectionYear(
(mapSheetAnalDataInferenceGeomEntityEntity.getCompareYyyy() != null
? mapSheetAnalDataInferenceGeomEntityEntity.getCompareYyyy()
: 0)
+ "-"
+ (mapSheetAnalDataInferenceGeomEntityEntity.getTargetYyyy() != null
? mapSheetAnalDataInferenceGeomEntityEntity.getTargetYyyy()
: 0))
.beforeClass(
ClassificationInfo.builder()
.classification(
mapSheetAnalDataInferenceGeomEntityEntity.getClassBeforeCd() != null
? mapSheetAnalDataInferenceGeomEntityEntity.getClassBeforeCd()
: "")
.probability(
mapSheetAnalDataInferenceGeomEntityEntity.getClassBeforeProb() != null
? mapSheetAnalDataInferenceGeomEntityEntity.getClassBeforeProb()
: 0.0)
.build())
.afterClass(
ClassificationInfo.builder()
.classification(
mapSheetAnalDataInferenceGeomEntityEntity.getClassAfterCd() != null
? mapSheetAnalDataInferenceGeomEntityEntity.getClassAfterCd()
: "")
.probability(
mapSheetAnalDataInferenceGeomEntityEntity.getClassAfterProb() != null
? mapSheetAnalDataInferenceGeomEntityEntity.getClassAfterProb()
: 0.0)
.build())
.area(
mapSheetAnalDataInferenceGeomEntityEntity.getArea() != null
? mapSheetAnalDataInferenceGeomEntityEntity.getArea()
: 0.0)
.detectionAccuracy(
mapSheetAnalDataInferenceGeomEntityEntity.getCdProb() != null
? mapSheetAnalDataInferenceGeomEntityEntity.getCdProb()
: 0.0)
.pnu(
mapSheetAnalDataInferenceGeomEntityEntity.getPnu() != null
? mapSheetAnalDataInferenceGeomEntityEntity.getPnu()
: 0L)
.build();
var inspectionResultInfo =
InspectionResultInfo.builder()
.verificationResult(convertInspectState(assignment.toDto().getInspectState()))
.inappropriateReason("")
.memo("")
.build();
// 6. Geometry를 GeoJSON으로 변환
InferenceDataGeometry inferData =
queryFactory
.select(
Projections.constructor(
InferenceDataGeometry.class,
Expressions.stringTemplate("{0}", "Feature"),
Expressions.stringTemplate(
"ST_AsGeoJSON({0})", mapSheetAnalDataInferenceGeomEntity.geom),
Projections.constructor(
InferenceProperties.class,
mapSheetAnalDataInferenceGeomEntity.classBeforeCd,
mapSheetAnalDataInferenceGeomEntity.classAfterCd)))
.from(mapSheetAnalDataInferenceGeomEntity)
.where(
mapSheetAnalDataInferenceGeomEntity.geoUid.eq(
mapSheetAnalDataInferenceGeomEntityEntity.getGeoUid()))
.fetchOne();
// 도엽 bbox json으로 가져오기
JsonNode mapBbox = null;
if (mapSheetEntity.getGeom() != null) {
try {
String bboxString =
queryFactory
.select(Expressions.stringTemplate("ST_AsGeoJSON({0})", mapInkx5kEntity.geom))
.from(mapInkx5kEntity)
.where(mapInkx5kEntity.mapidcdNo.eq(assignment.toDto().getAssignGroupId()))
.fetchOne();
if (bboxString != null) {
ObjectMapper mapper = new ObjectMapper();
mapBbox = mapper.readTree(bboxString);
}
} catch (Exception e) {
throw new RuntimeException("GeoJSON parsing error: " + e.getMessage());
}
}
// 7. 라벨링 저장한 Geometry들을 GeoJSON으로 변환 (여러 개 가능)
List<LearnDataGeometry> learnDataList =
queryFactory
.select(
Projections.constructor(
LearnDataGeometry.class,
Expressions.stringTemplate("{0}", "Feature"),
Expressions.stringTemplate(
"ST_AsGeoJSON({0})", mapSheetLearnDataGeomEntity.geom),
Projections.constructor(
LearnProperties.class,
mapSheetLearnDataGeomEntity.classBeforeCd,
mapSheetLearnDataGeomEntity.classAfterCd)))
.from(mapSheetLearnDataGeomEntity)
.where(
mapSheetLearnDataGeomEntity.geoUid.eq(
mapSheetAnalDataInferenceGeomEntityEntity.getGeoUid()))
.fetch(); // fetchOne() -> fetch()로 변경
return DetailRes.builder()
.operatorUid(operatorUid)
.changeDetectionInfo(changeDetectionInfo)
.inspectionResultInfo(inspectionResultInfo)
.geom(inferData)
.beforeCogUrl(beforeCogUrl)
.afterCogUrl(afterCogUrl)
.mapBox(mapBbox)
.learnGeometries(learnDataList) // learnGeometry -> learnGeometries
.build();
} catch (Exception e) {
System.err.println("getDetail Error: " + e.getMessage());
e.printStackTrace();
throw new RuntimeException("Failed to get detail for operatorUid: " + operatorUid, e);
}
}
@Override
public DefaultPaging getDefaultPagingNumber(String userId, Integer size, String operatorUid) {
ZoneId KST = ZoneId.of("Asia/Seoul");
ZonedDateTime todayStart = ZonedDateTime.now(KST).toLocalDate().atStartOfDay(KST);
ZonedDateTime todayEnd = todayStart.plusDays(1);
BooleanExpression doneToday =
labelingAssignmentEntity
.inspectState
.eq(InspectState.COMPLETE.getId())
.and(labelingAssignmentEntity.inspectStatDttm.goe(todayStart))
.and(labelingAssignmentEntity.inspectStatDttm.lt(todayEnd));
BooleanExpression unconfirmOrExcept =
labelingAssignmentEntity.inspectState.in(
InspectState.EXCEPT.getId(), InspectState.UNCONFIRM.getId());
BooleanExpression stateCondition = doneToday.or(unconfirmOrExcept);
Tuple firstAssigned =
queryFactory
.select(
labelingAssignmentEntity.assignmentUid,
labelingAssignmentEntity.createdDate,
labelingAssignmentEntity.inferenceGeomUid)
.from(labelingAssignmentEntity)
.where(
labelingAssignmentEntity.inspectorUid.eq(userId),
labelingAssignmentEntity.workState.eq("DONE"),
stateCondition,
operatorUid == null
? labelingAssignmentEntity.inspectState.eq(InspectState.UNCONFIRM.getId())
: labelingAssignmentEntity.assignmentUid.eq(UUID.fromString(operatorUid)))
.orderBy(
labelingAssignmentEntity.createdDate.asc(),
labelingAssignmentEntity.inferenceGeomUid.asc())
.limit(1)
.fetchOne();
if (firstAssigned == null) {
return DefaultPaging.builder().page(0).operatorUid(null).build();
}
UUID firstAssignedUid = firstAssigned.get(labelingAssignmentEntity.assignmentUid);
ZonedDateTime createdDttm = firstAssigned.get(labelingAssignmentEntity.createdDate);
Long inferenceGeomUid = firstAssigned.get(labelingAssignmentEntity.inferenceGeomUid);
BooleanExpression beforeCondition =
labelingAssignmentEntity
.createdDate
.lt(createdDttm)
.or(
labelingAssignmentEntity
.createdDate
.eq(createdDttm)
.and(labelingAssignmentEntity.inferenceGeomUid.lt(inferenceGeomUid)));
Long beforeCnt =
queryFactory
.select(labelingAssignmentEntity.count())
.from(labelingAssignmentEntity)
.where(
labelingAssignmentEntity.inspectorUid.eq(userId),
labelingAssignmentEntity.workState.eq("DONE"),
beforeCondition.and(stateCondition))
.fetchOne();
int page = (int) (beforeCnt / size); // 기본 사이즈 20
return DefaultPaging.builder().page(page).operatorUid(firstAssignedUid).build();
}
@Override
public void saveNewPolygon(TrainingDataReviewDto.NewPolygonRequest request) {
try {
if (request.getFeatures() == null || request.getFeatures().isEmpty()) {
throw new RuntimeException("No polygons to save");
}
System.out.println(
"Saving "
+ request.getFeatures().size()
+ " new polygon(s) for mapSheetNum: "
+ request.getMapSheetNum());
int savedCount = 0;
for (TrainingDataReviewDto.NewPolygonRequest.PolygonFeature feature : request.getFeatures()) {
try {
// 1. map_sheet_anal_data_inference_geom 테이블에 새 polygon 삽입
queryFactory
.insert(mapSheetAnalDataInferenceGeomEntity)
.columns(
mapSheetAnalDataInferenceGeomEntity.geom,
mapSheetAnalDataInferenceGeomEntity.compareYyyy,
mapSheetAnalDataInferenceGeomEntity.targetYyyy,
mapSheetAnalDataInferenceGeomEntity.classBeforeCd,
mapSheetAnalDataInferenceGeomEntity.classAfterCd,
mapSheetAnalDataInferenceGeomEntity.createdDttm,
mapSheetAnalDataInferenceGeomEntity.updatedDttm,
mapSheetAnalDataInferenceGeomEntity.labelState)
.values(
feature.getGeometry(),
request.getCompareYyyy(),
request.getTargetYyyy(),
feature.getProperties().getBeforeClass().toLowerCase(),
feature.getProperties().getAfterClass().toLowerCase(),
ZonedDateTime.now(),
ZonedDateTime.now(),
"DONE")
.execute();
// 2. 생성된 geoUid 조회
Long geoUid =
queryFactory
.select(mapSheetAnalDataInferenceGeomEntity.geoUid)
.from(mapSheetAnalDataInferenceGeomEntity)
.where(
mapSheetAnalDataInferenceGeomEntity.geom.eq(feature.getGeometry()),
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(request.getCompareYyyy()),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(request.getTargetYyyy()))
.orderBy(mapSheetAnalDataInferenceGeomEntity.geoUid.desc())
.fetchFirst();
if (geoUid == null) {
System.err.println("Failed to get geo_uid for polygon #" + (savedCount + 1));
continue;
}
// 3. learn_data_geom 테이블에도 삽입
queryFactory
.insert(mapSheetLearnDataGeomEntity)
.columns(
mapSheetLearnDataGeomEntity.geoUid,
mapSheetLearnDataGeomEntity.afterYyyy,
mapSheetLearnDataGeomEntity.beforeYyyy,
mapSheetLearnDataGeomEntity.classAfterCd,
mapSheetLearnDataGeomEntity.classBeforeCd,
mapSheetLearnDataGeomEntity.geom,
mapSheetLearnDataGeomEntity.createdDate,
mapSheetLearnDataGeomEntity.modifiedDate)
.values(
geoUid,
request.getTargetYyyy(),
request.getCompareYyyy(),
feature.getProperties().getAfterClass().toLowerCase(),
feature.getProperties().getBeforeClass().toLowerCase(),
feature.getGeometry(),
ZonedDateTime.now(),
ZonedDateTime.now())
.execute();
savedCount++;
System.out.println(
"Successfully saved polygon #" + savedCount + " with geo_uid: " + geoUid);
} catch (Exception e) {
System.err.println("Error saving polygon #" + (savedCount + 1) + ": " + e.getMessage());
// 개별 polygon 저장 실패해도 계속 진행
}
}
if (savedCount == 0) {
throw new RuntimeException("Failed to save any polygons");
}
System.out.println(
"Successfully saved " + savedCount + "/" + request.getFeatures().size() + " polygon(s)");
} catch (Exception e) {
System.err.println("saveNewPolygon Error: " + e.getMessage());
e.printStackTrace();
throw new RuntimeException("Failed to save new polygons", e);
}
}
@Override
public TrainingDataReviewDto.CogImageResponse getCogImageUrl(
String mapSheetNum, Integer beforeYear, Integer afterYear) {
try {
// beforeYear COG URL 조회
String beforeCogUrl =
queryFactory
.select(
Expressions.stringTemplate(
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
.from(imageryEntity)
.where(imageryEntity.scene5k.eq(mapSheetNum), imageryEntity.year.eq(beforeYear))
.fetchFirst();
// afterYear COG URL 조회
String afterCogUrl =
queryFactory
.select(
Expressions.stringTemplate(
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
.from(imageryEntity)
.where(imageryEntity.scene5k.eq(mapSheetNum), imageryEntity.year.eq(afterYear))
.fetchFirst();
if (beforeCogUrl == null && afterCogUrl == null) {
throw new RuntimeException(
"COG images not found for mapSheetNum: "
+ mapSheetNum
+ ", years: "
+ beforeYear
+ ", "
+ afterYear);
}
return TrainingDataReviewDto.CogImageResponse.builder()
.beforeCogUrl(beforeCogUrl != null ? beforeCogUrl : "")
.afterCogUrl(afterCogUrl != null ? afterCogUrl : "")
.beforeYear(beforeYear)
.afterYear(afterYear)
.mapSheetNum(mapSheetNum)
.build();
} catch (Exception e) {
System.err.println("getCogImageUrl Error: " + e.getMessage());
e.printStackTrace();
throw new RuntimeException(
"Failed to get COG image URLs for mapSheetNum: "
+ mapSheetNum
+ ", years: "
+ beforeYear
+ ", "
+ afterYear,
e);
}
}
private StringExpression makeCogUrl(NumberPath<Integer> year) {
return new CaseBuilder()
.when(imageryEntity.year.eq(year))
.then(
Expressions.stringTemplate(
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
.otherwise("");
}
private BooleanExpression statusInInspectState(String status) {
if (Objects.isNull(status)) {
return null;
}
String[] arrStatus = status.split(",");
return labelingAssignmentEntity.inspectState.in(arrStatus);
}
private String convertInspectState(String inspectState) {
if (inspectState == null) {
return "미확인";
}
switch (inspectState) {
case "UNCONFIRM":
return "미확인";
case "EXCEPT":
return "제외";
case "COMPLETE":
return "완료";
default:
return "미확인";
}
}
}