{
+
+ private final T enumValue;
+ private final String id;
+ private final String text;
+
+ public EnumDto(T enumValue, String id, String text) {
+ this.enumValue = enumValue;
+ this.id = id;
+ this.text = text;
+ }
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/CodeDto.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/CodeDto.java
new file mode 100644
index 0000000..ae4bc00
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/CodeDto.java
@@ -0,0 +1,20 @@
+package com.kamco.cd.kamcoback.enums;
+
+public class CodeDto {
+
+ private String code;
+ private String name;
+
+ public CodeDto(String code, String name) {
+ this.code = code;
+ this.name = name;
+ }
+
+ public String getCode() {
+ return code;
+ }
+
+ public String getName() {
+ return name;
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/CommonUseStatus.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/CommonUseStatus.java
new file mode 100644
index 0000000..bcd0b39
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/CommonUseStatus.java
@@ -0,0 +1,46 @@
+package com.kamco.cd.kamcoback.enums;
+
+import com.kamco.cd.kamcoback.enums.ApiConfigEnum.EnumDto;
+import com.kamco.cd.kamcoback.inferface.EnumType;
+import java.util.Arrays;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+
+/**
+ * Common usage status used across the system.
+ *
+ * This enum represents whether a resource is active, excluded from processing, or inactive. It
+ * is commonly used for filtering, business rules, and status management.
+ */
+@Getter
+@AllArgsConstructor
+public enum CommonUseStatus implements EnumType {
+
+ // @formatter:off
+ USE("USE", "사용중", 100)
+ /** Actively used and available */
+ ,
+ EXCEPT("EXCEPT", "영구 추론제외", 200)
+ /** Explicitly excluded from use or processing */
+ ,
+ AUTO_EXCEPT("AUTO_EXCEPT", "자동추론 제외", 300),
+ NOT_USE("NOT_USE", "사용안함", 999)
+/** Not used or disabled */
+;
+ // @formatter:on
+
+ private String id;
+ private String text;
+ private int ordering;
+
+ public static CommonUseStatus getEnumById(String id) {
+ return Arrays.stream(CommonUseStatus.values())
+ .filter(x -> x.getId().equals(id))
+ .findFirst()
+ .orElse(CommonUseStatus.NOT_USE);
+ }
+
+ public EnumDto getEnumDto() {
+ return new EnumDto<>(this, this.id, this.text);
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/Enums.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/Enums.java
new file mode 100644
index 0000000..c87856e
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/Enums.java
@@ -0,0 +1,86 @@
+package com.kamco.cd.kamcoback.enums;
+
+import com.kamco.cd.kamcoback.inferface.CodeExpose;
+import com.kamco.cd.kamcoback.inferface.CodeHidden;
+import com.kamco.cd.kamcoback.inferface.EnumType;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import org.reflections.Reflections;
+
+public class Enums {
+
+ private static final String BASE_PACKAGE = "com.kamco.cd.kamcoback";
+
+ /** 노출 가능한 enum만 모아둔 맵 key: enum simpleName (예: RoleType) value: enum Class */
+ private static final Map>> exposedEnumMap = scanExposedEnumMap();
+
+ // code로 enum 찾기
+ public static & EnumType> E fromId(Class enumClass, String id) {
+ if (id == null) {
+ return null;
+ }
+
+ for (E e : enumClass.getEnumConstants()) {
+ if (id.equalsIgnoreCase(e.getId())) {
+ return e;
+ }
+ }
+ return null;
+ }
+
+ // enum -> CodeDto list
+ public static List toList(Class extends Enum>> enumClass) {
+ Object[] enums = enumClass.getEnumConstants();
+
+ return Arrays.stream(enums)
+ .map(e -> (EnumType) e)
+ .filter(e -> !isHidden(enumClass, (Enum>) e))
+ .map(e -> new CodeDto(e.getId(), e.getText()))
+ .toList();
+ }
+
+ private static boolean isHidden(Class extends Enum>> enumClass, Enum> e) {
+ try {
+ return enumClass.getField(e.name()).isAnnotationPresent(CodeHidden.class);
+ } catch (NoSuchFieldException ex) {
+ return false;
+ }
+ }
+
+ /** 특정 타입(enum)만 조회 /codes/{type} -> type = RoleType 같은 값 */
+ public static List getCodes(String type) {
+ Class extends Enum>> enumClass = exposedEnumMap.get(type);
+ if (enumClass == null) {
+ throw new IllegalArgumentException("지원하지 않는 코드 타입: " + type);
+ }
+ return toList(enumClass);
+ }
+
+ /** 전체 enum 코드 조회 */
+ public static Map> getAllCodes() {
+ Map> result = new HashMap<>();
+ for (Map.Entry>> e : exposedEnumMap.entrySet()) {
+ result.put(e.getKey(), toList(e.getValue()));
+ }
+ return result;
+ }
+
+ /** CodeExpose + EnumType 인 enum만 스캔해서 Map 구성 */
+ private static Map>> scanExposedEnumMap() {
+ Reflections reflections = new Reflections(BASE_PACKAGE);
+
+ Set> types = reflections.getTypesAnnotatedWith(CodeExpose.class);
+
+ Map>> result = new HashMap<>();
+
+ for (Class> clazz : types) {
+ if (clazz.isEnum() && EnumType.class.isAssignableFrom(clazz)) {
+ result.put(clazz.getSimpleName(), (Class extends Enum>>) clazz);
+ }
+ }
+ return result;
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/MngStateType.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/MngStateType.java
new file mode 100644
index 0000000..f5fd245
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/MngStateType.java
@@ -0,0 +1,26 @@
+package com.kamco.cd.kamcoback.enums;
+
+import com.kamco.cd.kamcoback.inferface.EnumType;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+
+@Getter
+@AllArgsConstructor
+public enum MngStateType implements EnumType {
+ NOTYET("동기화 시작"),
+ PROCESSING("데이터 체크"),
+ DONE("동기화 작업 종료"),
+ TAKINGERROR("오류 데이터 처리중");
+
+ private final String desc;
+
+ @Override
+ public String getId() {
+ return name();
+ }
+
+ @Override
+ public String getText() {
+ return desc;
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/SyncStateType.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/SyncStateType.java
new file mode 100644
index 0000000..adba1cc
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/enums/SyncStateType.java
@@ -0,0 +1,33 @@
+package com.kamco.cd.kamcoback.enums;
+
+import com.kamco.cd.kamcoback.inferface.CodeExpose;
+import com.kamco.cd.kamcoback.inferface.CodeHidden;
+import com.kamco.cd.kamcoback.inferface.EnumType;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+
+@CodeExpose
+@Getter
+@AllArgsConstructor
+public enum SyncStateType implements EnumType {
+ @CodeHidden
+ NOTYET("미처리"),
+ NOFILE("파일없음"),
+ NOTPAIR("페어파일누락"),
+ DUPLICATE("파일중복"),
+ TYPEERROR("손상파일"),
+ @CodeHidden
+ DONE("완료");
+
+ private final String desc;
+
+ @Override
+ public String getId() {
+ return name();
+ }
+
+ @Override
+ public String getText() {
+ return desc;
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inference/dto/DetectionClassification.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inference/dto/DetectionClassification.java
new file mode 100644
index 0000000..d2a1330
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inference/dto/DetectionClassification.java
@@ -0,0 +1,56 @@
+package com.kamco.cd.kamcoback.inference.dto;
+
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+
+@Getter
+@AllArgsConstructor
+public enum DetectionClassification {
+ ROAD("road", "도로", 10),
+ BUILDING("building", "건물", 20),
+ GREENHOUSE("greenhouse", "비닐하우스", 30),
+ FIELD("field", "논/밭", 40),
+ ORCHARD("orchard", "과수원", 50),
+ GRASS("grass", "초지", 60),
+ FOREST("forest", "숲", 70),
+ WATER("water", "물", 80),
+ STONE("stone", "모래/자갈", 90),
+ WASTE("waste", "적치물", 100),
+ CONTAINER("container", "컨테이너", 110),
+ LAND("land", "일반토지", 120),
+ SOLAR("solar", "태양광", 130),
+ TANK("tank", "물탱크", 140),
+ NDC("NDC", "미분류", 150),
+ ETC("ETC", "기타", 160);
+
+ private final String id;
+ private final String desc;
+ private final int order;
+
+ /**
+ * Optional: Helper method to get the enum from a String, case-insensitive, or return ETC if not
+ * found.
+ */
+ public static DetectionClassification fromString(String text) {
+ if (text == null || text.trim().isEmpty()) {
+ return ETC;
+ }
+
+ try {
+ return DetectionClassification.valueOf(text.toUpperCase());
+ } catch (IllegalArgumentException e) {
+ // If the string doesn't match any enum constant name, return ETC
+ return ETC;
+ }
+ }
+
+ /**
+ * Desc 한글명 get 하기
+ *
+ * @return
+ */
+ public static String fromStrDesc(String text) {
+ DetectionClassification dtf = fromString(text);
+ return dtf.getDesc();
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceDetailDto.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceDetailDto.java
new file mode 100644
index 0000000..4cfe330
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceDetailDto.java
@@ -0,0 +1,561 @@
+package com.kamco.cd.kamcoback.inference.dto;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.kamco.cd.kamcoback.common.utils.enums.ImageryFitStatus;
+import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm;
+import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.DetectOption;
+import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
+import io.swagger.v3.oas.annotations.media.Schema;
+import java.time.Duration;
+import java.time.ZonedDateTime;
+import java.util.List;
+import java.util.UUID;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.data.domain.Pageable;
+import org.springframework.data.domain.Sort;
+
+public class InferenceDetailDto {
+
+ @Schema(name = "InferenceResultBasic", description = "분석결과 기본 정보")
+ @Getter
+ public static class Basic {
+
+ private Long id;
+ private String dataName;
+ private Long mapSheepNum;
+ private Long detectingCnt;
+ @JsonFormatDttm private ZonedDateTime analStrtDttm;
+ @JsonFormatDttm private ZonedDateTime analEndDttm;
+ private Long analSec;
+ private String analState;
+
+ public Basic(
+ Long id,
+ String dataName,
+ Long mapSheepNum,
+ Long detectingCnt,
+ ZonedDateTime analStrtDttm,
+ ZonedDateTime analEndDttm,
+ Long analSec,
+ String analState) {
+ this.id = id;
+ this.dataName = dataName;
+ this.mapSheepNum = mapSheepNum;
+ this.detectingCnt = detectingCnt;
+ this.analStrtDttm = analStrtDttm;
+ this.analEndDttm = analEndDttm;
+ this.analSec = analSec;
+ this.analState = analState;
+ }
+ }
+
+ @Schema(name = "AnalysisResultList", description = "분석결과 목록")
+ @Getter
+ public static class AnalResList {
+
+ private Long id;
+ private String analTitle;
+ private Long detectingCnt;
+ @JsonFormatDttm private ZonedDateTime analStrtDttm;
+ @JsonFormatDttm private ZonedDateTime analEndDttm;
+ private Long analSec;
+ private Long analPredSec;
+ private String analState;
+ private String analStateNm;
+ private String gukyuinUsed;
+
+ public AnalResList(
+ Long id,
+ String analTitle,
+ Long detectingCnt,
+ ZonedDateTime analStrtDttm,
+ ZonedDateTime analEndDttm,
+ Long analSec,
+ Long analPredSec,
+ String analState,
+ String analStateNm,
+ String gukyuinUsed) {
+ this.id = id;
+ this.analTitle = analTitle;
+ this.detectingCnt = detectingCnt;
+ this.analStrtDttm = analStrtDttm;
+ this.analEndDttm = analEndDttm;
+ this.analSec = analSec;
+ this.analPredSec = analPredSec;
+ this.analState = analState;
+ this.analStateNm = analStateNm;
+ this.gukyuinUsed = gukyuinUsed;
+ }
+ }
+
+ @Schema(name = "AnalysisResultSummary", description = "분석결과 요약정보")
+ @Getter
+ public static class AnalResSummary {
+
+ private Long id;
+ private String analTitle;
+ private String modelInfo;
+ private Integer targetYyyy;
+ private Integer compareYyyy;
+ @JsonFormatDttm private ZonedDateTime analStrtDttm;
+ @JsonFormatDttm private ZonedDateTime analEndDttm;
+ private Long analSec;
+ private Long analPredSec;
+ private String resultUrl;
+ private Long detectingCnt;
+ private Double accuracy;
+ private String analState;
+ private String analStateNm;
+
+ public AnalResSummary(
+ Long id,
+ String analTitle,
+ String modelInfo,
+ Integer targetYyyy,
+ Integer compareYyyy,
+ ZonedDateTime analStrtDttm,
+ ZonedDateTime analEndDttm,
+ Long analSec,
+ Long analPredSec,
+ String resultUrl,
+ Long detectingCnt,
+ Double accuracy,
+ String analState,
+ String analStateNm) {
+ this.id = id;
+ this.analTitle = analTitle;
+ this.modelInfo = modelInfo;
+ this.targetYyyy = targetYyyy;
+ this.compareYyyy = compareYyyy;
+ this.analStrtDttm = analStrtDttm;
+ this.analEndDttm = analEndDttm;
+ this.analSec = analSec;
+ this.analPredSec = analPredSec;
+ this.resultUrl = resultUrl;
+ this.detectingCnt = detectingCnt;
+ this.accuracy = accuracy;
+ this.analState = analState;
+ this.analStateNm = analStateNm;
+ }
+ }
+
+ @Getter
+ public static class Dashboard {
+
+ String classAfterCd;
+ String classAfterName;
+ Long classAfterCnt;
+
+ public Dashboard(String classAfterCd, Long classAfterCnt) {
+ this.classAfterCd = classAfterCd;
+ this.classAfterName = DetectionClassification.fromString(classAfterCd).getDesc();
+ this.classAfterCnt = classAfterCnt;
+ }
+ }
+
+ @Getter
+ public static class Detail {
+
+ AnalResSummary summary;
+ List dashboard;
+ Long totalCnt;
+
+ public Detail(AnalResSummary summary, List dashboard, Long totalCnt) {
+ this.summary = summary;
+ this.dashboard = dashboard;
+ this.totalCnt = totalCnt;
+ }
+ }
+
+ // 분석 상세 ROW
+ @Getter
+ @AllArgsConstructor
+ public static class DetailListEntity {
+
+ private Uid code;
+ private Double detectionScore;
+ private Clazzes compare;
+ private Clazzes target;
+ private MapSheet mapSheet;
+ private Coordinate center;
+ @JsonFormatDttm private ZonedDateTime updatedDttm;
+
+ public DetailListEntity(
+ UUID uuid,
+ Double detectionScore,
+ Clazzes compare,
+ Clazzes target,
+ MapSheet mapSheet,
+ Coordinate center,
+ ZonedDateTime updatedDttm) {
+ this.code = new Uid(uuid);
+ this.detectionScore = detectionScore;
+ this.compare = compare;
+ this.target = target;
+ this.mapSheet = mapSheet;
+ this.center = center;
+ this.updatedDttm = updatedDttm;
+ }
+ }
+
+ @Getter
+ @AllArgsConstructor
+ public static class Uid {
+
+ private String shortCode;
+ private String code;
+
+ public Uid(UUID uuid) {
+ if (uuid != null) {
+ this.shortCode = uuid.toString().substring(0, 8).toUpperCase();
+ this.code = uuid.toString();
+ }
+ }
+ }
+
+ // MAP NO
+ @Getter
+ @AllArgsConstructor
+ public static class MapSheet {
+
+ private String number;
+ private String name;
+ }
+
+ // classification info
+ @Getter
+ public static class Clazz {
+
+ private String code;
+ private String name;
+ @JsonIgnore private Double score;
+
+ public Clazz(String code, Double score) {
+ this.code = code;
+ this.score = score;
+ this.name = DetectionClassification.fromString(code).getDesc();
+ }
+
+ public Clazz(String code) {
+ this.code = code;
+ this.name = DetectionClassification.fromString(code).getDesc();
+ }
+ }
+
+ // classification info
+ @Getter
+ public static class Clazzes {
+
+ private DetectionClassification code;
+ private String name;
+
+ @JsonInclude(JsonInclude.Include.NON_NULL)
+ private Double score;
+
+ private Integer order;
+
+ public Clazzes(DetectionClassification classification, Double score) {
+ this.code = classification;
+ this.name = classification.getDesc();
+ this.order = classification.getOrder();
+ this.score = score;
+ }
+
+ public Clazzes(DetectionClassification classification) {
+ this.code = classification;
+ this.name = classification.getDesc();
+ this.order = classification.getOrder();
+ }
+ }
+
+ // 좌표 정보 point
+ @Getter
+ public static class Coordinate {
+
+ private Double lon; // 경도(Longitude)
+ private Double lat; // 위도(Latitude)
+ private String srid; // Spatial Reference ID의 약자로, 데이터베이스에서 좌표계를 식별하는 고유 번호 추후enum으로
+
+ public Coordinate(Double lon, Double lat) {
+ this.lon = lon;
+ this.lat = lat;
+ this.srid = "EPSG:4326";
+ }
+ }
+
+ @Getter
+ public static class Geom {
+
+ UUID uuid;
+ String uid;
+ Integer compareYyyy;
+ Integer targetYyyy;
+ Double cdProb;
+ String classBeforeCd;
+ String classBeforeName;
+ Double classBeforeProb;
+ String classAfterCd;
+ String classAfterName;
+ Double classAfterProb;
+ Long mapSheetNum;
+ String mapSheetName;
+ String subUid;
+ String pnu;
+ String fitState;
+
+ public Geom(
+ UUID uuid,
+ String uid,
+ Integer compareYyyy,
+ Integer targetYyyy,
+ Double cdProb,
+ String classBeforeCd,
+ Double classBeforeProb,
+ String classAfterCd,
+ Double classAfterProb,
+ Long mapSheetNum,
+ String mapSheetName,
+ String subUid,
+ String pnu,
+ String fitState) {
+ this.uuid = uuid;
+ this.uid = uid;
+ this.compareYyyy = compareYyyy;
+ this.targetYyyy = targetYyyy;
+ this.cdProb = cdProb;
+ this.classBeforeCd = classBeforeCd;
+ this.classBeforeName = DetectionClassification.fromString(classBeforeCd).getDesc();
+ this.classBeforeProb = classBeforeProb;
+ this.classAfterCd = classAfterCd;
+ this.classAfterName = DetectionClassification.fromString(classAfterCd).getDesc();
+ this.classAfterProb = classAfterProb;
+ this.mapSheetNum = mapSheetNum;
+ this.mapSheetName = mapSheetName;
+ this.subUid = subUid;
+ this.pnu = pnu;
+ this.fitState = fitState;
+ }
+
+ @JsonProperty("fitState")
+ public String getFitState() {
+ return this.fitState == null ? null : this.fitState;
+ }
+
+ // @JsonIgnore String gemoStr;
+ // @JsonIgnore String geomCenterStr;
+ // JsonNode gemo;
+ // JsonNode geomCenter;
+
+ @JsonProperty("fitStateName")
+ public String fitStateName() {
+ return ImageryFitStatus.getDescByCode(this.fitState);
+ }
+ }
+
+ @Schema(name = "InferenceResultSearchReq", description = "분석결과 목록 요청 정보")
+ @Getter
+ @Setter
+ @NoArgsConstructor
+ @AllArgsConstructor
+ public static class SearchReq {
+
+ // 검색 조건
+ private String statCode;
+ private String title;
+
+ // 페이징 파라미터
+ private int page = 0;
+ private int size = 20;
+
+ public Pageable toPageable() {
+ return PageRequest.of(page, size);
+ }
+ }
+
+ @Schema(name = "InferenceResultSearchReq", description = "분석결과 목록 요청 정보")
+ @Getter
+ @Setter
+ @NoArgsConstructor
+ @AllArgsConstructor
+ public static class SearchGeoReq {
+
+ // 기준년도
+ private String targetClass;
+ // 비교년도
+ private String compareClass;
+ // 분석도엽
+ private Long mapSheetNum;
+
+ // 페이징 파라미터
+ private int page = 0;
+ private int size = 20;
+ private String sort;
+
+ public Pageable toPageable() {
+ if (sort != null && !sort.isEmpty()) {
+ String[] sortParams = sort.split(",");
+ String property = sortParams[0];
+ Sort.Direction direction =
+ sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC;
+ return PageRequest.of(page, size, Sort.by(direction, property));
+ }
+ return PageRequest.of(page, size);
+ }
+ }
+
+ @Getter
+ @Setter
+ @AllArgsConstructor
+ @NoArgsConstructor
+ public static class InferenceBatchSheet {
+
+ private Long id;
+ private UUID uuid;
+ private Long m1BatchId;
+ private Long m2BatchId;
+ private Long m3BatchId;
+ private String status;
+ private String runningModelType;
+ private UUID m1ModelUuid;
+ private UUID m2ModelUuid;
+ private UUID m3ModelUuid;
+ private String uid;
+ }
+
+ @Schema(name = "AnalResultInfo", description = "추론결과 기본정보")
+ @Getter
+ @Setter
+ @AllArgsConstructor
+ @NoArgsConstructor
+ public static class AnalResultInfo {
+
+ private String analTitle;
+ private String modelVer1;
+ private String modelVer2;
+ private String modelVer3;
+ private Integer compareYyyy;
+ private Integer targetYyyy;
+ private String detectOption;
+ private String mapSheetScope;
+ @JsonFormatDttm private ZonedDateTime inferStartDttm;
+ @JsonFormatDttm private ZonedDateTime inferEndDttm;
+ private Integer stage;
+ private String elapsedDuration;
+ private String subUid;
+ private Boolean applyYn;
+ @JsonFormatDttm private ZonedDateTime applyDttm;
+
+ private String bboxGeom;
+ private String bboxCenterPoint;
+ private UUID inferenceUuid;
+
+ public AnalResultInfo(
+ String analTitle,
+ String modelVer1,
+ String modelVer2,
+ String modelVer3,
+ Integer compareYyyy,
+ Integer targetYyyy,
+ String detectOption,
+ String mapSheetScope,
+ ZonedDateTime inferStartDttm,
+ ZonedDateTime inferEndDttm,
+ Integer stage,
+ String subUid,
+ Boolean applyYn,
+ ZonedDateTime applyDttm,
+ UUID inferenceUuid) {
+ this.analTitle = analTitle;
+ this.modelVer1 = modelVer1;
+ this.modelVer2 = modelVer2;
+ this.modelVer3 = modelVer3;
+ this.compareYyyy = compareYyyy;
+ this.targetYyyy = targetYyyy;
+ this.detectOption = DetectOption.getDescByCode(detectOption);
+ this.mapSheetScope = MapSheetScope.getDescByCode(mapSheetScope);
+ this.inferStartDttm = inferStartDttm;
+ this.inferEndDttm = inferEndDttm;
+ this.stage = stage;
+ this.subUid = subUid;
+ this.applyYn = applyYn;
+ this.applyDttm = applyDttm;
+ Duration elapsed =
+ (inferStartDttm != null && inferEndDttm != null)
+ ? Duration.between(inferStartDttm, inferEndDttm)
+ : null;
+
+ if (elapsed != null) {
+ long seconds = elapsed.getSeconds();
+ long abs = Math.abs(seconds);
+
+ long h = abs / 3600;
+ long m = (abs % 3600) / 60;
+ long s = abs % 60;
+
+ this.elapsedDuration = String.format("%02d:%02d:%02d", h, m, s);
+ }
+ this.inferenceUuid = inferenceUuid;
+ }
+
+ @JsonProperty("bboxGeom")
+ public JsonNode getBboxGeom() {
+ ObjectMapper mapper = new ObjectMapper();
+ try {
+ if (this.bboxGeom != null) {
+ return mapper.readTree(this.bboxGeom);
+ } else {
+ return null;
+ }
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @JsonProperty("bboxCenterPoint")
+ public JsonNode getBboxCenterPoint() {
+ ObjectMapper mapper = new ObjectMapper();
+ try {
+ if (this.bboxCenterPoint != null) {
+ return mapper.readTree(this.bboxCenterPoint);
+ } else {
+ return null;
+ }
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public Boolean getApplyYn() {
+ return this.applyYn != null && this.applyYn;
+ }
+ }
+
+ @Getter
+ @Setter
+ @NoArgsConstructor
+ @AllArgsConstructor
+ public static class BboxPointDto {
+
+ private String bboxGeom;
+ private String bboxCenterPoint;
+ }
+
+ @Getter
+ @Setter
+ @NoArgsConstructor
+ @AllArgsConstructor
+ public static class Scene {
+
+ private String path;
+ private Integer size;
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java
new file mode 100644
index 0000000..2ee0526
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java
@@ -0,0 +1,679 @@
+package com.kamco.cd.kamcoback.inference.dto;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.kamco.cd.kamcoback.common.utils.enums.EnumType;
+import com.kamco.cd.kamcoback.common.utils.interfaces.EnumValid;
+import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm;
+import io.swagger.v3.oas.annotations.media.Schema;
+import jakarta.validation.constraints.NotBlank;
+import jakarta.validation.constraints.NotNull;
+import java.time.Duration;
+import java.time.LocalDate;
+import java.time.ZonedDateTime;
+import java.util.Arrays;
+import java.util.List;
+import java.util.UUID;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.data.domain.Pageable;
+
+public class InferenceResultDto {
+
+ /** 분석대상 도엽 enum */
+ @Getter
+ @AllArgsConstructor
+ public enum MapSheetScope implements EnumType {
+ ALL("전체"),
+ PART("부분"),
+ ;
+
+ private final String desc;
+
+ public static MapSheetScope fromCode(String code) {
+ return Arrays.stream(values()).filter(v -> v.name().equals(code)).findFirst().orElse(null);
+ }
+
+ public static String getDescByCode(String code) {
+ return fromCode(code).getDesc();
+ }
+
+ @Override
+ public String getId() {
+ return name();
+ }
+
+ @Override
+ public String getText() {
+ return desc;
+ }
+ }
+
+ /** 탐지 데이터 옵션 dto */
+ @Getter
+ @AllArgsConstructor
+ public enum DetectOption implements EnumType {
+ EXCL("추론제외"),
+ PREV("이전 년도 도엽 사용"),
+ ;
+ private final String desc;
+
+ public static DetectOption fromCode(String code) {
+ return Arrays.stream(values()).filter(v -> v.name().equals(code)).findFirst().orElse(null);
+ }
+
+ public static String getDescByCode(String code) {
+ return fromCode(code).getDesc();
+ }
+
+ @Override
+ public String getId() {
+ return name();
+ }
+
+ @Override
+ public String getText() {
+ return desc;
+ }
+ }
+
+ @Getter
+ @AllArgsConstructor
+ public enum Status implements EnumType {
+ READY("대기"),
+ IN_PROGRESS("진행중"),
+ END("완료"),
+ FORCED_END("강제종료");
+ private final String desc;
+
+ public static Status fromCode(String code) {
+ return Arrays.stream(values()).filter(v -> v.name().equals(code)).findFirst().orElse(null);
+ }
+
+ public static String getDescByCode(String code) {
+ return fromCode(code).getDesc();
+ }
+
+ @Override
+ public String getId() {
+ return name();
+ }
+
+ @Override
+ public String getText() {
+ return desc;
+ }
+ }
+
+ @Getter
+ @AllArgsConstructor
+ public enum ServerStatus implements EnumType {
+ SAFETY("원활"),
+ CAUTION("주의"),
+ FAILUR("장애"),
+ ;
+
+ private final String desc;
+
+ @Override
+ public String getId() {
+ return name();
+ }
+
+ @Override
+ public String getText() {
+ return desc;
+ }
+ }
+
+ /** 목록조회 dto */
+ @Schema(name = "ResultList", description = "추론관리 목록")
+ @Getter
+ @Setter
+ @AllArgsConstructor
+ @NoArgsConstructor
+ public static class ResultList {
+
+ private UUID uuid;
+
+ @Schema(description = "제목")
+ private String title;
+
+ @Schema(description = "회차")
+ private Integer stage;
+
+ @Schema(description = "상태")
+ private String status;
+
+ @Schema(description = "분석 도엽")
+ private String mapSheetCnt;
+
+ @Schema(description = "탐지건수")
+ private Long detectingCnt;
+
+ @Schema(description = "시작일시")
+ @JsonFormatDttm
+ private ZonedDateTime startTime;
+
+ @Schema(description = "종료일시")
+ @JsonFormatDttm
+ private ZonedDateTime endTime;
+
+ @Schema(description = "반영여부")
+ private Boolean applyYn;
+
+ @Schema(description = "반영일시")
+ @JsonFormatDttm
+ private ZonedDateTime applyDttm;
+
+ @Schema(description = "비교년도")
+ private Integer compareYyyy;
+
+ @Schema(description = "기준년도")
+ private Integer targetYyyy;
+
+ @Schema(description = "uid")
+ private String uid;
+
+ @Schema(description = "uid 앞 8자리")
+ @JsonProperty("subUid")
+ public String subUid() {
+ return this.uid.substring(0, 8).toUpperCase();
+ }
+
+ @Schema(description = "상태명")
+ @JsonProperty("statusName")
+ public String statusName() {
+ return Status.getDescByCode(this.status);
+ }
+
+ @Schema(description = "소요시간")
+ @JsonProperty("elapsedTim")
+ public String getElapsedTime() {
+ if (this.startTime == null || this.endTime == null) {
+ return null;
+ }
+ ZonedDateTime start = this.startTime;
+ ZonedDateTime end = this.endTime;
+ Duration d = Duration.between(start, end);
+
+ if (d.isNegative()) {
+ d = d.negated();
+ }
+
+ long s = d.getSeconds();
+ long h = s / 3600;
+ long m = (s % 3600) / 60;
+ long sec = s % 60;
+
+ return String.format("%d시간 %d분 %d초", h, m, sec);
+ }
+ }
+
+ /** 목록조회 검색 조건 dto */
+ @Getter
+ @Setter
+ @NoArgsConstructor
+ @AllArgsConstructor
+ public static class SearchListReq {
+
+ // 검색 조건
+ private String applyYn;
+ private LocalDate strtDttm;
+ private LocalDate endDttm;
+ private String title;
+
+ // 페이징 파라미터
+ private int page = 0;
+ private int size = 20;
+
+ public Pageable toPageable() {
+ return PageRequest.of(page, size);
+ }
+ }
+
+ /** 변화탐지 실행 정보 저장 요청 정보 */
+ @Getter
+ @Setter
+ @NoArgsConstructor
+ @AllArgsConstructor
+ public static class RegReq {
+
+ @Schema(description = "제목", example = "2023-2024 변화탐지 테스트")
+ @NotBlank
+ private String title;
+
+ @Schema(description = "M1", example = "b40e0f68-c1d8-49fc-93f9-a36270093861")
+ @NotNull
+ private UUID model1Uuid;
+
+ @Schema(description = "M2", example = "ec92b7d2-b5a3-4915-9bdf-35fb3ca8ad27")
+ @NotNull
+ private UUID model2Uuid;
+
+ @Schema(description = "M3", example = "37f45782-8ccf-4cf6-911c-a055a1510d39")
+ @NotNull
+ private UUID model3Uuid;
+
+ @Schema(description = "비교년도", example = "2023")
+ @NotNull
+ private Integer compareYyyy;
+
+ @Schema(description = "탐지년도", example = "2024")
+ @NotNull
+ private Integer targetYyyy;
+
+ @Schema(description = "분석대상 도엽 - 전체(ALL), 부분(PART)", example = "PART")
+ @NotBlank
+ @EnumValid(enumClass = MapSheetScope.class, message = "분석대상 도엽 옵션은 '전체', '부분' 만 사용 가능합니다.")
+ private String mapSheetScope;
+
+ @Schema(description = "탐지 데이터 옵션 - 추론제외(EXCL), 이전 년도 도엽 사용(PREV)", example = "EXCL")
+ @NotBlank
+ @EnumValid(
+ enumClass = DetectOption.class,
+ message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.")
+ private String detectOption;
+
+ @Schema(description = "5k 도협 번호 목록", example = "[33605,33606, 33610, 34802, 35603, 35611]")
+ @NotNull
+ private List mapSheetNum;
+ }
+
+ @Getter
+ @Setter
+ public static class MapSheetNumDto {
+
+ private String mapSheetNum;
+ private String mapSheetName;
+ }
+
+ @Getter
+ @Setter
+ @NoArgsConstructor
+ @Schema(name = "InferenceStatusDetailDto", description = "추론(변화탐지) 진행상태")
+ public static class InferenceStatusDetailDto {
+
+ @Schema(description = "탐지대상 도엽수")
+ private Long detectingCnt;
+
+ @Schema(description = "모델1 분석 대기")
+ private Integer m1PendingJobs;
+
+ @Schema(description = "모델2 분석 대기")
+ private Integer m2PendingJobs;
+
+ @Schema(description = "모델3 분석 대기")
+ private Integer m3PendingJobs;
+
+ @Schema(description = "모델1 분석 진행중")
+ private Integer m1RunningJobs;
+
+ @Schema(description = "모델2 분석 진행중")
+ private Integer m2RunningJobs;
+
+ @Schema(description = "모델3 분석 진행중")
+ private Integer m3RunningJobs;
+
+ @Schema(description = "모델1 분석 완료")
+ private Integer m1CompletedJobs;
+
+ @Schema(description = "모델2 분석 완료")
+ private Integer m2CompletedJobs;
+
+ @Schema(description = "모델3 분석 완료")
+ private Integer m3CompletedJobs;
+
+ @Schema(description = "모델1 분석 실패")
+ private Integer m1FailedJobs;
+
+ @Schema(description = "모델2 분석 실패")
+ private Integer m2FailedJobs;
+
+ @Schema(description = "모델3 분석 실패")
+ private Integer m3FailedJobs;
+
+ @Schema(description = "모델1 사용시간 시작일시")
+ @JsonFormatDttm
+ ZonedDateTime m1ModelStartDttm;
+
+ @Schema(description = "모델2 사용시간 시작일시")
+ @JsonFormatDttm
+ ZonedDateTime m2ModelStartDttm;
+
+ @Schema(description = "모델3 사용시간 시작일시")
+ @JsonFormatDttm
+ ZonedDateTime m3ModelStartDttm;
+
+ @Schema(description = "모델1 사용시간 종료일시")
+ @JsonFormatDttm
+ ZonedDateTime m1ModelEndDttm;
+
+ @Schema(description = "모델2 사용시간 종료일시")
+ @JsonFormatDttm
+ ZonedDateTime m2ModelEndDttm;
+
+ @Schema(description = "모델3 사용시간 종료일시")
+ @JsonFormatDttm
+ ZonedDateTime m3ModelEndDttm;
+
+ @Schema(description = "변화탐지 제목")
+ private String title;
+
+ @Schema(description = "비교년도")
+ private Integer compareYyyy;
+
+ @Schema(description = "기준년도")
+ private Integer targetYyyy;
+
+ @Schema(description = "회차")
+ private Integer stage;
+
+ @Schema(description = "변화탐지 시작")
+ @JsonFormatDttm
+ private ZonedDateTime inferStartDttm;
+
+ @Schema(description = "변화탐지 종료")
+ @JsonFormatDttm
+ private ZonedDateTime inferEndDttm;
+
+ @Schema(description = "변화탐지 옵션")
+ private String detectOption;
+
+ @Schema(description = "분석도엽")
+ private String mapSheetScope;
+
+ @Schema(description = "모델1 버전")
+ private String modelVer1;
+
+ @Schema(description = "모델2 버전")
+ private String modelVer2;
+
+ @Schema(description = "모델3 버전")
+ private String modelVer3;
+
+ @Schema(description = "탑지 도엽 수")
+ @JsonIgnore
+ private Long totalJobs;
+
+ public InferenceStatusDetailDto(
+ Long detectingCnt,
+ Integer m1PendingJobs,
+ Integer m2PendingJobs,
+ Integer m3PendingJobs,
+ Integer m1RunningJobs,
+ Integer m2RunningJobs,
+ Integer m3RunningJobs,
+ Integer m1CompletedJobs,
+ Integer m2CompletedJobs,
+ Integer m3CompletedJobs,
+ Integer m1FailedJobs,
+ Integer m2FailedJobs,
+ Integer m3FailedJobs,
+ ZonedDateTime m1ModelStartDttm,
+ ZonedDateTime m2ModelStartDttm,
+ ZonedDateTime m3ModelStartDttm,
+ ZonedDateTime m1ModelEndDttm,
+ ZonedDateTime m2ModelEndDttm,
+ ZonedDateTime m3ModelEndDttm,
+ String title,
+ Integer compareYyyy,
+ Integer targetYyyy,
+ Integer stage,
+ ZonedDateTime inferStartDttm,
+ ZonedDateTime inferEndDttm,
+ String detectOption,
+ String mapSheetScope,
+ String modelVer1,
+ String modelVer2,
+ String modelVer3,
+ Long totalJobs) {
+ this.detectingCnt = detectingCnt;
+ this.m1PendingJobs = m1PendingJobs;
+ this.m2PendingJobs = m2PendingJobs;
+ this.m3PendingJobs = m3PendingJobs;
+ this.m1RunningJobs = m1RunningJobs;
+ this.m2RunningJobs = m2RunningJobs;
+ this.m3RunningJobs = m3RunningJobs;
+ this.m1CompletedJobs = m1CompletedJobs;
+ this.m2CompletedJobs = m2CompletedJobs;
+ this.m3CompletedJobs = m3CompletedJobs;
+ this.m1FailedJobs = m1FailedJobs;
+ this.m2FailedJobs = m2FailedJobs;
+ this.m3FailedJobs = m3FailedJobs;
+ this.m1ModelStartDttm = m1ModelStartDttm;
+ this.m2ModelStartDttm = m2ModelStartDttm;
+ this.m3ModelStartDttm = m3ModelStartDttm;
+ this.m1ModelEndDttm = m1ModelEndDttm;
+ this.m2ModelEndDttm = m2ModelEndDttm;
+ this.m3ModelEndDttm = m3ModelEndDttm;
+ this.title = title;
+ this.compareYyyy = compareYyyy;
+ this.targetYyyy = targetYyyy;
+ this.stage = stage;
+ this.inferStartDttm = inferStartDttm;
+ this.inferEndDttm = inferEndDttm;
+ this.detectOption = detectOption;
+ this.mapSheetScope = mapSheetScope;
+ this.modelVer1 = modelVer1;
+ this.modelVer2 = modelVer2;
+ this.modelVer3 = modelVer3;
+ this.totalJobs = totalJobs;
+ }
+
+ @Schema(description = "진행률")
+ @JsonProperty("progress")
+ private int getProgress() {
+ long tiles = this.totalJobs == null ? 0L : this.totalJobs; // 도엽수
+ int models = 3; // 모델 개수
+ int completed =
+ this.m1CompletedJobs
+ + this.m2CompletedJobs
+ + this.m3CompletedJobs
+ + this.m1FailedJobs
+ + this.m2FailedJobs
+ + this.m3FailedJobs; // 완료수
+
+ long total = tiles * models; // 전체 작업량
+ if (completed >= total) {
+ return 100;
+ }
+ return (int) ((completed * 100L) / total);
+ }
+
+ @Schema(description = "변화탐지 옵션명")
+ @JsonProperty("detectOptionName")
+ private String getDetectOptionName() {
+ return DetectOption.getDescByCode(this.detectOption);
+ }
+
+ @Schema(description = "분석도엽 명")
+ @JsonProperty("mapSheetScopeName")
+ private String getMapSheetScopeName() {
+ return MapSheetScope.getDescByCode(this.mapSheetScope);
+ }
+
+ @Schema(description = "M1 사용시간")
+ @JsonProperty("m1ElapsedTim")
+ public String getM1ElapsedTime() {
+ return formatElapsedTime(this.m1ModelStartDttm, this.m1ModelEndDttm);
+ }
+
+ @Schema(description = "M2 사용시간")
+ @JsonProperty("m2ElapsedTim")
+ public String getM2ElapsedTime() {
+ return formatElapsedTime(this.m2ModelStartDttm, this.m2ModelEndDttm);
+ }
+
+ @Schema(description = "M3 사용시간")
+ @JsonProperty("m3ElapsedTim")
+ public String getM3ElapsedTime() {
+ return formatElapsedTime(this.m3ModelStartDttm, this.m3ModelEndDttm);
+ }
+
+ private String formatElapsedTime(ZonedDateTime start, ZonedDateTime end) {
+ if (start == null || end == null) {
+ return null;
+ }
+
+ Duration d = Duration.between(start, end);
+ if (d.isNegative()) {
+ d = d.negated();
+ }
+
+ long s = d.getSeconds();
+ long h = s / 3600;
+ long m = (s % 3600) / 60;
+ long sec = s % 60;
+
+ return String.format("%d시간 %d분 %d초", h, m, sec);
+ }
+ }
+
+ @Getter
+ @Setter
+ @NoArgsConstructor
+ @AllArgsConstructor
+ public static class InferenceServerStatusDto {
+
+ private String serverName;
+ @JsonIgnore private float cpu_user;
+ @JsonIgnore private float cpu_system;
+ @JsonIgnore private float memused;
+ private Long kbmemused;
+ private float gpuUtil;
+
+ // private String cpuStatusName;
+ // private String memStatusName;
+ // private String gpuStatusName;
+ // private float cpu_use_rate;
+ // private float gpu_use_rate;
+ // private float mem_use_rate;
+
+ public float getCpuUseRate() {
+ return this.cpu_user + this.cpu_system;
+ }
+
+ public String getServerStatus() {
+ String enumId = "SAFETY";
+ if (this.cpu_user + this.cpu_system + this.gpuUtil + this.kbmemused == 0) {
+ enumId = "FAILUR";
+ }
+ // if( this.cpu_user+this.cpu_system >= 80 )enumId = "CAUTION";
+ return enumId;
+ }
+
+ public String getServerStatusName() {
+ String enumStr = ServerStatus.SAFETY.getText();
+ if (this.cpu_user + this.cpu_system + this.gpuUtil + this.kbmemused == 0) {
+ enumStr = ServerStatus.FAILUR.getText();
+ }
+ return enumStr;
+ }
+
+ public String getCpuStatus() {
+ String enumId = "SAFETY";
+ if (this.cpu_user + this.cpu_system >= 80) {
+ enumId = "CAUTION";
+ } else if (this.cpu_user + this.cpu_system + this.memused == 0) {
+ enumId = "FAILUR";
+ }
+ return enumId;
+ }
+
+ public String getGpuStatus() {
+ String enumId = "SAFETY";
+ if (this.gpuUtil >= 80) {
+ enumId = "CAUTION";
+ } else if (this.cpu_user + this.cpu_system == 0) {
+ enumId = "FAILUR";
+ }
+ return enumId;
+ }
+
+ public String getMemStatus() {
+ String enumId = "SAFETY";
+ if (this.memused >= 80) {
+ enumId = "CAUTION";
+ } else if (this.cpu_user + this.cpu_system + this.memused == 0) {
+ enumId = "FAILUR";
+ }
+ return enumId;
+ }
+
+ public String getCpuStatusName() {
+ if (this.cpu_user + this.cpu_system >= 80) {
+ return ServerStatus.CAUTION.getText();
+ } else if (this.cpu_user + this.cpu_system + this.memused == 0) {
+ return ServerStatus.FAILUR.getText();
+ }
+ return ServerStatus.SAFETY.getText();
+ }
+
+ public String getGpuStatusName() {
+ if (this.gpuUtil >= 80) {
+ return ServerStatus.CAUTION.getText();
+ } else if (this.cpu_user + this.cpu_system + this.memused == 0) {
+ return ServerStatus.FAILUR.getText();
+ }
+ return ServerStatus.SAFETY.getText();
+ }
+
+ public String getMemStatusName() {
+ if (this.memused >= 80) {
+ return ServerStatus.CAUTION.getText();
+ } else if (this.cpu_user + this.cpu_system + this.memused == 0) {
+ return ServerStatus.FAILUR.getText();
+ }
+ return ServerStatus.SAFETY.getText();
+ }
+ }
+
+ @Getter
+ @Setter
+ @NoArgsConstructor
+ @AllArgsConstructor
+ public static class InferenceStatusDetailDto2 {
+
+ InferenceServerStatusDto serverStatus;
+ }
+
+ @Getter
+ @Setter
+ @AllArgsConstructor
+ @NoArgsConstructor
+ public static class SaveInferenceAiDto {
+
+ private UUID uuid;
+ private Long batchId;
+ private String status;
+ private String type;
+ private ZonedDateTime inferStartDttm;
+ private ZonedDateTime inferEndDttm;
+ private Long detectEndCnt;
+ private String modelComparePath;
+ private String modelTargetPath;
+ private String modelModelPath;
+ private ZonedDateTime modelStartDttm;
+ private ZonedDateTime modelEndDttm;
+ private Long updateUid;
+ private String runningModelType;
+ private Integer pendingJobs;
+ private Integer runningJobs;
+ private Integer completedJobs;
+ private Integer failedJobs;
+ }
+
+ @Getter
+ @Setter
+ public static class InferenceLearnDto {
+
+ private String uid;
+ private Long m1ModelBatchId;
+ private Long m2ModelBatchId;
+ private Long m3ModelBatchId;
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inferface/CodeExpose.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inferface/CodeExpose.java
new file mode 100644
index 0000000..aa50c0e
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inferface/CodeExpose.java
@@ -0,0 +1,10 @@
+package com.kamco.cd.kamcoback.inferface;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Target(ElementType.TYPE)
+@Retention(RetentionPolicy.RUNTIME)
+public @interface CodeExpose {}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inferface/CodeHidden.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inferface/CodeHidden.java
new file mode 100644
index 0000000..7516dfe
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inferface/CodeHidden.java
@@ -0,0 +1,10 @@
+package com.kamco.cd.kamcoback.inferface;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.FIELD)
+public @interface CodeHidden {}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inferface/EnumType.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inferface/EnumType.java
new file mode 100644
index 0000000..2fa5f21
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inferface/EnumType.java
@@ -0,0 +1,8 @@
+package com.kamco.cd.kamcoback.inferface;
+
+public interface EnumType {
+
+ String getId();
+
+ String getText();
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inferface/JsonFormatDttm.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inferface/JsonFormatDttm.java
new file mode 100644
index 0000000..aabb352
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/inferface/JsonFormatDttm.java
@@ -0,0 +1,19 @@
+package com.kamco.cd.kamcoback.inferface;
+
+import com.fasterxml.jackson.annotation.JacksonAnnotationsInside;
+import com.fasterxml.jackson.annotation.JsonFormat;
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Target({ElementType.FIELD, ElementType.METHOD})
+@Retention(RetentionPolicy.RUNTIME)
+@Documented
+@JacksonAnnotationsInside
+@JsonFormat(
+ shape = JsonFormat.Shape.STRING,
+ pattern = "yyyy-MM-dd'T'HH:mm:ssXXX",
+ timezone = "Asia/Seoul")
+public @interface JsonFormatDttm {}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/core/TrainingDataLabelJobCoreService.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/core/TrainingDataLabelJobCoreService.java
new file mode 100644
index 0000000..640783b
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/core/TrainingDataLabelJobCoreService.java
@@ -0,0 +1,36 @@
+package com.kamco.cd.kamcoback.postgres.core;
+
+import com.kamco.cd.kamcoback.postgres.repository.scheduler.TrainingDataLabelJobRepository;
+import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.InspectorPendingDto;
+import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.Tasks;
+import java.util.List;
+import java.util.UUID;
+import lombok.RequiredArgsConstructor;
+import org.springframework.stereotype.Service;
+
+@Service
+@RequiredArgsConstructor
+public class TrainingDataLabelJobCoreService {
+
+ private final TrainingDataLabelJobRepository trainingDataLabelJobRepository;
+
+ public List findCompletedYesterdayUnassigned() {
+ return trainingDataLabelJobRepository.findCompletedYesterdayUnassigned();
+ }
+
+ public void assignReviewerBatch(List assignmentUids, String reviewerId) {
+ trainingDataLabelJobRepository.assignReviewerBatch(assignmentUids, reviewerId);
+ }
+
+ public List findInspectorPendingByRound(Long analUid) {
+ return trainingDataLabelJobRepository.findInspectorPendingByRound(analUid);
+ }
+
+ public void lockInspectors(Long analUid, List reviewerIds) {
+ trainingDataLabelJobRepository.lockInspectors(analUid, reviewerIds);
+ }
+
+ public void updateGeomUidTestState(List geomUids) {
+ trainingDataLabelJobRepository.updateGeomUidTestState(geomUids);
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/CommonDateEntity.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/CommonDateEntity.java
new file mode 100644
index 0000000..cb9abe9
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/CommonDateEntity.java
@@ -0,0 +1,34 @@
+package com.kamco.cd.kamcoback.postgres.entity;
+
+import jakarta.persistence.Column;
+import jakarta.persistence.MappedSuperclass;
+import jakarta.persistence.PrePersist;
+import jakarta.persistence.PreUpdate;
+import java.time.ZonedDateTime;
+import lombok.Getter;
+import org.springframework.data.annotation.CreatedDate;
+import org.springframework.data.annotation.LastModifiedDate;
+
+@Getter
+@MappedSuperclass
+public class CommonDateEntity {
+
+ @CreatedDate
+ @Column(name = "created_dttm", updatable = false, nullable = false)
+ private ZonedDateTime createdDate;
+
+ @LastModifiedDate
+ @Column(name = "updated_dttm", nullable = false)
+ private ZonedDateTime modifiedDate;
+
+ @PrePersist
+ protected void onPersist() {
+ this.createdDate = ZonedDateTime.now();
+ this.modifiedDate = ZonedDateTime.now();
+ }
+
+ @PreUpdate
+ protected void onUpdate() {
+ this.modifiedDate = ZonedDateTime.now();
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/LabelingAssignmentEntity.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/LabelingAssignmentEntity.java
new file mode 100644
index 0000000..0e0abea
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/LabelingAssignmentEntity.java
@@ -0,0 +1,69 @@
+package com.kamco.cd.kamcoback.postgres.entity;
+
+import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Id;
+import jakarta.persistence.Table;
+import java.time.ZonedDateTime;
+import java.util.UUID;
+
+@Entity
+@Table(name = "tb_labeling_assignment")
+public class LabelingAssignmentEntity extends CommonDateEntity {
+
+ @Id
+ @Column(name = "assignment_uid")
+ private UUID assignmentUid;
+
+ @Column(name = "inference_geom_uid")
+ private Long inferenceGeomUid;
+
+ @Column(name = "worker_uid")
+ private String workerUid;
+
+ @Column(name = "inspector_uid")
+ private String inspectorUid;
+
+ @Column(name = "work_state")
+ private String workState;
+
+ @Column(name = "stagnation_yn")
+ private Character stagnationYn;
+
+ @Column(name = "assign_group_id")
+ private String assignGroupId;
+
+ @Column(name = "learn_geom_uid")
+ private Long learnGeomUid;
+
+ @Column(name = "anal_uid")
+ private Long analUid;
+
+ @Column(name = "inspect_state")
+ private String inspectState;
+
+ @Column(name = "work_stat_dttm")
+ private ZonedDateTime workStatDttm;
+
+ @Column(name = "inspect_stat_dttm")
+ private ZonedDateTime inspectStatDttm;
+
+ public LabelAllocateDto.Basic toDto() {
+ return new LabelAllocateDto.Basic(
+ this.assignmentUid,
+ this.inferenceGeomUid,
+ this.workerUid,
+ this.inspectorUid,
+ this.workState,
+ this.stagnationYn,
+ this.assignGroupId,
+ this.learnGeomUid,
+ this.analUid,
+ super.getCreatedDate(),
+ super.getModifiedDate(),
+ this.inspectState,
+ this.workStatDttm,
+ this.inspectStatDttm);
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/LabelingInspectorEntity.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/LabelingInspectorEntity.java
new file mode 100644
index 0000000..c378179
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/LabelingInspectorEntity.java
@@ -0,0 +1,32 @@
+package com.kamco.cd.kamcoback.postgres.entity;
+
+import com.kamco.cd.kamcoback.label.dto.LabelInspectorDto;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Id;
+import jakarta.persistence.Table;
+import java.util.UUID;
+
+@Entity
+@Table(name = "tb_labeling_inspector")
+public class LabelingInspectorEntity extends CommonDateEntity {
+
+ @Id
+ @Column(name = "operator_uid")
+ private UUID operatorUid;
+
+ @Column(name = "anal_uid")
+ private Long analUid;
+
+ @Column(name = "inspector_uid")
+ private String inspectorUid;
+
+ public LabelInspectorDto.Basic toDto() {
+ return new LabelInspectorDto.Basic(
+ this.operatorUid,
+ this.analUid,
+ this.inspectorUid,
+ super.getCreatedDate(),
+ super.getModifiedDate());
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapInkx50kEntity.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapInkx50kEntity.java
new file mode 100644
index 0000000..c483e5c
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapInkx50kEntity.java
@@ -0,0 +1,48 @@
+package com.kamco.cd.kamcoback.postgres.entity;
+
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.SequenceGenerator;
+import jakarta.persistence.Table;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+import org.locationtech.jts.geom.Geometry;
+
+@Getter
+@Setter
+@Table(name = "tb_map_inkx_50k")
+@Entity
+@NoArgsConstructor
+public class MapInkx50kEntity extends CommonDateEntity {
+
+ @Id
+ @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_inkx_50k_fid_seq_gen")
+ @SequenceGenerator(
+ name = "tb_map_inkx_50k_fid_seq_gen",
+ sequenceName = "tb_map_inkx_50k_fid_seq",
+ allocationSize = 1)
+ private Integer fid;
+
+ @Column(name = "mapidcd_no")
+ private String mapidcdNo;
+
+ @Column(name = "mapid_nm")
+ private String mapidNm;
+
+ @Column(name = "mapid_no")
+ private String mapidNo;
+
+ @Column(name = "geom")
+ private Geometry geom;
+
+ public MapInkx50kEntity(String mapidcdNo, String mapidNm, String mapidNo, Geometry geom) {
+ this.mapidcdNo = mapidcdNo;
+ this.mapidNm = mapidNm;
+ this.mapidNo = mapidNo;
+ this.geom = geom;
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapInkx5kEntity.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapInkx5kEntity.java
new file mode 100644
index 0000000..3246924
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapInkx5kEntity.java
@@ -0,0 +1,75 @@
+package com.kamco.cd.kamcoback.postgres.entity;
+
+import com.kamco.cd.kamcoback.enums.CommonUseStatus;
+import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
+import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.FetchType;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.SequenceGenerator;
+import jakarta.persistence.Table;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+import org.locationtech.jts.geom.Geometry;
+
+@Getter
+@Setter
+@Table(name = "tb_map_inkx_5k")
+@Entity
+@NoArgsConstructor
+public class MapInkx5kEntity extends CommonDateEntity {
+
+ @Id
+ @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_inkx_5k_fid_seq_gen")
+ @SequenceGenerator(
+ name = "tb_map_inkx_5k_fid_seq_gen",
+ sequenceName = "tb_map_inkx_5k_fid_seq",
+ allocationSize = 1)
+ private Integer fid;
+
+ @Column(name = "mapidcd_no")
+ private String mapidcdNo;
+
+ @Column(name = "mapid_nm")
+ private String mapidNm;
+
+ @Column(name = "geom")
+ private Geometry geom;
+
+ @ManyToOne(fetch = FetchType.LAZY)
+ @JoinColumn(name = "fid_k50", referencedColumnName = "fid")
+ private MapInkx50kEntity mapInkx50k;
+
+ // 사용상태 USE,
+ @Column(name = "use_inference")
+ @Enumerated(EnumType.STRING)
+ private CommonUseStatus useInference;
+
+ public InferenceDetailDto.MapSheet toEntity() {
+ return new MapSheet(mapidcdNo, mapidNm);
+ }
+
+ // Constructor
+ public MapInkx5kEntity(
+ String mapidcdNo, String mapidNm, Geometry geom, MapInkx50kEntity mapInkx50k) {
+ this.mapidcdNo = mapidcdNo;
+ this.mapidNm = mapidNm;
+ this.geom = geom;
+ this.mapInkx50k = mapInkx50k;
+ // 생성시 default 사용함 (사용,제외,사용안함)
+ this.useInference = CommonUseStatus.USE;
+ }
+
+ // 변경 사용상태 (추론사용여부)
+ public void updateUseInference(CommonUseStatus useInference) {
+ this.useInference = useInference;
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceGeomEntity.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceGeomEntity.java
new file mode 100644
index 0000000..c0487a1
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataInferenceGeomEntity.java
@@ -0,0 +1,181 @@
+package com.kamco.cd.kamcoback.postgres.entity;
+
+import com.kamco.cd.kamcoback.inference.dto.DetectionClassification;
+import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
+import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Clazzes;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.FetchType;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
+import jakarta.validation.constraints.Size;
+import java.time.ZonedDateTime;
+import java.util.UUID;
+import lombok.Getter;
+import lombok.Setter;
+import org.hibernate.annotations.ColumnDefault;
+import org.locationtech.jts.geom.Geometry;
+
+@Getter
+@Setter
+@Entity
+@Table(name = "tb_map_sheet_anal_data_inference_geom")
+public class MapSheetAnalDataInferenceGeomEntity {
+
+ @Id
+ @GeneratedValue(strategy = GenerationType.IDENTITY)
+ @Column(name = "geo_uid")
+ private Long geoUid;
+
+ @Column(name = "cd_prob")
+ private Double cdProb;
+
+ @Size(max = 40)
+ @Column(name = "class_before_cd", length = 40)
+ private String classBeforeCd;
+
+ @Column(name = "class_before_prob")
+ private Double classBeforeProb;
+
+ @Size(max = 40)
+ @Column(name = "class_after_cd", length = 40)
+ private String classAfterCd;
+
+ @Column(name = "class_after_prob")
+ private Double classAfterProb;
+
+ @Column(name = "map_sheet_num")
+ private Long mapSheetNum;
+
+ @Column(name = "compare_yyyy")
+ private Integer compareYyyy;
+
+ @Column(name = "target_yyyy")
+ private Integer targetYyyy;
+
+ @Column(name = "area")
+ private Double area;
+
+ @Size(max = 100)
+ @Column(name = "geo_type", length = 100)
+ private String geoType;
+
+ @Column(name = "data_uid")
+ private Long dataUid;
+
+ @ColumnDefault("now()")
+ @Column(name = "created_dttm")
+ private ZonedDateTime createdDttm;
+
+ @Column(name = "created_uid")
+ private Long createdUid;
+
+ @ColumnDefault("now()")
+ @Column(name = "updated_dttm")
+ private ZonedDateTime updatedDttm;
+
+ @Column(name = "updated_uid")
+ private Long updatedUid;
+
+ @ColumnDefault("0")
+ @Column(name = "geom_cnt")
+ private Long geomCnt;
+
+ @ColumnDefault("0")
+ @Column(name = "pnu")
+ private Long pnu;
+
+ @Size(max = 20)
+ @Column(name = "fit_state", length = 20)
+ private String fitState;
+
+ @ColumnDefault("now()")
+ @Column(name = "fit_state_dttm")
+ private ZonedDateTime fitStateDttm;
+
+ @Column(name = "labeler_uid")
+ private Long labelerUid;
+
+ @Size(max = 20)
+ @ColumnDefault("'0'")
+ @Column(name = "label_state", length = 20)
+ private String labelState;
+
+ @ColumnDefault("now()")
+ @Column(name = "label_state_dttm")
+ private ZonedDateTime labelStateDttm;
+
+ @Column(name = "tester_uid")
+ private Long testerUid;
+
+ @Size(max = 20)
+ @ColumnDefault("'0'")
+ @Column(name = "test_state", length = 20)
+ private String testState;
+
+ @ColumnDefault("now()")
+ @Column(name = "test_state_dttm")
+ private ZonedDateTime testStateDttm;
+
+ @Column(name = "fit_state_cmmnt", length = Integer.MAX_VALUE)
+ private String fitStateCmmnt;
+
+ @Column(name = "ref_map_sheet_num")
+ private Long refMapSheetNum;
+
+ @ColumnDefault("uuid_generate_v4()")
+ @Column(name = "uuid")
+ private UUID uuid;
+
+ @Column(name = "stage")
+ private Integer stage;
+
+ @Column(name = "file_created_yn")
+ private Boolean fileCreatedYn;
+
+ @Column(name = "geom", columnDefinition = "geometry")
+ private Geometry geom;
+
+ @Column(name = "geom_center", columnDefinition = "geometry")
+ private Geometry geomCenter;
+
+ @Column(name = "before_geom", columnDefinition = "geometry")
+ private Geometry beforeGeom;
+
+ @Column(name = "file_created_dttm")
+ private ZonedDateTime fileCreatedDttm;
+
+ @Column(name = "result_uid")
+ private String resultUid;
+
+ @ManyToOne(fetch = FetchType.EAGER)
+ @JoinColumn(name = "map_5k_id", referencedColumnName = "fid")
+ private MapInkx5kEntity map5k;
+
+ @Column(name = "label_send_dttm")
+ private ZonedDateTime labelSendDttm;
+
+ @Column(name = "lock_yn")
+ private String lockYn;
+
+ public InferenceDetailDto.DetailListEntity toEntity() {
+ DetectionClassification classification = DetectionClassification.fromString(classBeforeCd);
+ Clazzes comparedClazz = new Clazzes(classification, classBeforeProb);
+ DetectionClassification classification1 = DetectionClassification.fromString(classAfterCd);
+ Clazzes targetClazz = new Clazzes(classification1, classAfterProb);
+ InferenceDetailDto.MapSheet mapSheet = map5k != null ? map5k.toEntity() : null;
+
+ InferenceDetailDto.Coordinate coordinate = null;
+ if (geomCenter != null) {
+ org.locationtech.jts.geom.Point point = (org.locationtech.jts.geom.Point) geomCenter;
+ coordinate = new InferenceDetailDto.Coordinate(point.getX(), point.getY());
+ }
+
+ return new InferenceDetailDto.DetailListEntity(
+ uuid, cdProb, comparedClazz, targetClazz, mapSheet, coordinate, createdDttm);
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/repository/TrainingDataLabelJobRepository.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/repository/TrainingDataLabelJobRepository.java
new file mode 100644
index 0000000..b6f83e5
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/repository/TrainingDataLabelJobRepository.java
@@ -0,0 +1,23 @@
+package com.kamco.cd.kamcoback.postgres.repository.scheduler;
+
+import com.kamco.cd.kamcoback.postgres.entity.LabelingInspectorEntity;
+import jakarta.persistence.LockModeType;
+import java.util.List;
+import java.util.UUID;
+import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.Lock;
+import org.springframework.data.jpa.repository.Query;
+
+public interface TrainingDataLabelJobRepository
+ extends JpaRepository, TrainingDataLabelJobRepositoryCustom {
+
+ @Lock(LockModeType.PESSIMISTIC_WRITE)
+ @Query(
+ """
+ select r
+ from LabelingInspectorEntity r
+ where r.analUid = :analUid
+ and r.inspectorUid in :inspectorUids
+ """)
+ List lockInspectors(Long analUid, List inspectorUids);
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/repository/TrainingDataLabelJobRepositoryCustom.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/repository/TrainingDataLabelJobRepositoryCustom.java
new file mode 100644
index 0000000..cb824f9
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/repository/TrainingDataLabelJobRepositoryCustom.java
@@ -0,0 +1,17 @@
+package com.kamco.cd.kamcoback.postgres.repository.scheduler;
+
+import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.InspectorPendingDto;
+import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.Tasks;
+import java.util.List;
+import java.util.UUID;
+
+public interface TrainingDataLabelJobRepositoryCustom {
+
+ List findCompletedYesterdayUnassigned();
+
+ List findInspectorPendingByRound(Long analUid);
+
+ void assignReviewerBatch(List assignmentUids, String reviewerId);
+
+ void updateGeomUidTestState(List geomUids);
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/repository/TrainingDataLabelJobRepositoryImpl.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/repository/TrainingDataLabelJobRepositoryImpl.java
new file mode 100644
index 0000000..f1bfe27
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/postgres/repository/TrainingDataLabelJobRepositoryImpl.java
@@ -0,0 +1,119 @@
+package com.kamco.cd.kamcoback.postgres.repository.scheduler;
+
+import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity;
+import static com.kamco.cd.kamcoback.postgres.entity.QLabelingInspectorEntity.labelingInspectorEntity;
+import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
+
+import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InspectState;
+import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState;
+import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity;
+import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.InspectorPendingDto;
+import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.Tasks;
+import com.querydsl.core.types.Projections;
+import com.querydsl.core.types.dsl.BooleanExpression;
+import com.querydsl.core.types.dsl.Expressions;
+import com.querydsl.core.types.dsl.StringExpression;
+import com.querydsl.jpa.impl.JPAQueryFactory;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.util.List;
+import java.util.UUID;
+import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
+import org.springframework.stereotype.Repository;
+
+@Repository
+public class TrainingDataLabelJobRepositoryImpl extends QuerydslRepositorySupport
+ implements TrainingDataLabelJobRepositoryCustom {
+
+ private final JPAQueryFactory queryFactory;
+ private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)");
+
+ public TrainingDataLabelJobRepositoryImpl(JPAQueryFactory queryFactory) {
+ super(LabelingAssignmentEntity.class);
+ this.queryFactory = queryFactory;
+ }
+
+ @Override
+ public List findCompletedYesterdayUnassigned() {
+ ZoneId zone = ZoneId.of("Asia/Seoul");
+ ZonedDateTime todayStart = ZonedDateTime.now(zone).toLocalDate().atStartOfDay(zone);
+ ZonedDateTime yesterdayStart = todayStart.minusDays(1);
+
+ BooleanExpression isYesterday =
+ labelingAssignmentEntity
+ .workStatDttm
+ .goe(yesterdayStart)
+ .and(labelingAssignmentEntity.workStatDttm.lt(todayStart));
+
+ return queryFactory
+ .select(
+ Projections.constructor(
+ Tasks.class,
+ labelingAssignmentEntity.assignmentUid,
+ labelingAssignmentEntity.inferenceGeomUid,
+ labelingAssignmentEntity.analUid))
+ .from(labelingAssignmentEntity)
+ .where(
+ labelingAssignmentEntity.workState.in(LabelState.SKIP.getId(), LabelState.DONE.getId()),
+ labelingAssignmentEntity.inspectorUid.isNull(),
+ isYesterday)
+ .orderBy(
+ labelingAssignmentEntity.analUid.asc(),
+ labelingAssignmentEntity.assignGroupId.asc(),
+ labelingAssignmentEntity.inferenceGeomUid.asc())
+ .fetch();
+ }
+
+ /**
+ * 해당 회차에 라벨링 할당받은 검수자별 완료 건수 count(), 완료한 게 적은 순으로 해야 일이 한 사람에게 몰리지 않음
+ *
+ * @param analUid
+ * @return
+ */
+ @Override
+ public List findInspectorPendingByRound(Long analUid) {
+ return queryFactory
+ .select(
+ Projections.constructor(
+ InspectorPendingDto.class,
+ labelingInspectorEntity.inspectorUid,
+ labelingAssignmentEntity.assignmentUid.count()))
+ .from(labelingInspectorEntity)
+ .leftJoin(labelingAssignmentEntity)
+ .on(
+ labelingInspectorEntity.inspectorUid.eq(labelingAssignmentEntity.inspectorUid),
+ labelingAssignmentEntity.inspectState.in(
+ InspectState.EXCEPT.getId(), InspectState.COMPLETE.getId()))
+ .where(labelingInspectorEntity.analUid.eq(analUid))
+ .groupBy(labelingInspectorEntity.inspectorUid)
+ .orderBy(labelingAssignmentEntity.assignmentUid.count().asc())
+ .fetch();
+ }
+
+ /**
+ * 배치용 여러 건 update
+ *
+ * @param assignmentUids
+ * @param reviewerId
+ */
+ @Override
+ public void assignReviewerBatch(List assignmentUids, String reviewerId) {
+ queryFactory
+ .update(labelingAssignmentEntity)
+ .set(labelingAssignmentEntity.inspectorUid, reviewerId)
+ .set(labelingAssignmentEntity.inspectState, InspectState.UNCONFIRM.getId())
+ .set(labelingAssignmentEntity.modifiedDate, ZonedDateTime.now())
+ .where(labelingAssignmentEntity.assignmentUid.in(assignmentUids))
+ .execute();
+ }
+
+ @Override
+ public void updateGeomUidTestState(List geomUids) {
+ queryFactory
+ .update(mapSheetAnalDataInferenceGeomEntity)
+ .set(mapSheetAnalDataInferenceGeomEntity.testState, InspectState.UNCONFIRM.getId())
+ .set(mapSheetAnalDataInferenceGeomEntity.updatedDttm, ZonedDateTime.now())
+ .where(mapSheetAnalDataInferenceGeomEntity.geoUid.in(geomUids))
+ .execute();
+ }
+}
diff --git a/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/service/TrainingDataLabelJobService.java b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/service/TrainingDataLabelJobService.java
new file mode 100644
index 0000000..5f9a98a
--- /dev/null
+++ b/label/label-to-review/src/main/java/com/kamco/cd/kamcoback/service/TrainingDataLabelJobService.java
@@ -0,0 +1,112 @@
+package com.kamco.cd.kamcoback.scheduler.service;
+
+import com.kamco.cd.kamcoback.postgres.core.TrainingDataLabelJobCoreService;
+import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.InspectorPendingDto;
+import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.Tasks;
+import jakarta.transaction.Transactional;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.stream.Collectors;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.log4j.Log4j2;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Service;
+
+@Log4j2
+@Service
+@RequiredArgsConstructor
+public class TrainingDataLabelJobService {
+
+ private final TrainingDataLabelJobCoreService trainingDataLabelJobCoreService;
+
+ @Value("${spring.profiles.active}")
+ private String profile;
+
+ private boolean isLocalProfile() {
+ return "local".equalsIgnoreCase(profile);
+ }
+
+ @Transactional
+ @Scheduled(cron = "0 0 0 * * *")
+ public void assignReviewerYesterdayLabelComplete() {
+
+ if (isLocalProfile()) {
+ return;
+ }
+
+ try {
+ List tasks = trainingDataLabelJobCoreService.findCompletedYesterdayUnassigned();
+
+ if (tasks.isEmpty()) {
+ return;
+ }
+
+ // 회차별로 그룹핑
+ Map> taskByRound =
+ tasks.stream().collect(Collectors.groupingBy(Tasks::getAnalUid));
+
+ // 회차별 분배
+ for (Map.Entry> entry : taskByRound.entrySet()) {
+ Long analUid = entry.getKey();
+ List analTasks = entry.getValue();
+
+ // pending 계산
+ List pendings =
+ trainingDataLabelJobCoreService.findInspectorPendingByRound(analUid);
+
+ if (pendings.isEmpty()) {
+ continue;
+ }
+
+ List reviewerIds =
+ pendings.stream().map(InspectorPendingDto::getInspectorUid).toList();
+
+ // Lock 걸릴 수 있기 때문에 엔티티 조회하는 Repository 에서 구현
+ trainingDataLabelJobCoreService.lockInspectors(analUid, reviewerIds);
+
+ // 균등 분배
+ Map> assignMap = distributeByLeastPending(analTasks, reviewerIds);
+
+ // reviewer별 batch update
+ assignMap.forEach(
+ (reviewerId, assignedTasks) -> {
+ if (assignedTasks.isEmpty()) {
+ return;
+ }
+
+ List assignmentUids =
+ assignedTasks.stream().map(Tasks::getAssignmentUid).toList();
+ trainingDataLabelJobCoreService.assignReviewerBatch(assignmentUids, reviewerId);
+
+ List geomUids = assignedTasks.stream().map(Tasks::getInferenceUid).toList();
+ trainingDataLabelJobCoreService.updateGeomUidTestState(geomUids);
+ });
+ }
+ } catch (Exception e) {
+ log.error("배치 처리 중 예외", e);
+ }
+ }
+
+ private Map> distributeByLeastPending(
+ List tasks, List reviewerIds) {
+ Map> result = new LinkedHashMap<>();
+
+ // 순서 유지 중요 (ASC 정렬된 상태)
+ for (String reviewerId : reviewerIds) {
+ result.put(reviewerId, new ArrayList<>());
+ }
+
+ int reviewerCount = reviewerIds.size();
+
+ for (int i = 0; i < tasks.size(); i++) {
+ String reviewerId = reviewerIds.get(i % reviewerCount);
+ result.get(reviewerId).add(tasks.get(i));
+ }
+
+ return result;
+ }
+}
diff --git a/label/label-to-review/src/main/resources/application.yml b/label/label-to-review/src/main/resources/application.yml
new file mode 100644
index 0000000..5d1aadc
--- /dev/null
+++ b/label/label-to-review/src/main/resources/application.yml
@@ -0,0 +1,4 @@
+server:
+ port: 9080
+
+
diff --git a/label/label-to-review/src/main/resources/application_dev.yml b/label/label-to-review/src/main/resources/application_dev.yml
new file mode 100644
index 0000000..a5bc4d9
--- /dev/null
+++ b/label/label-to-review/src/main/resources/application_dev.yml
@@ -0,0 +1,67 @@
+server:
+ port: 9080
+
+spring:
+ application:
+ name: label-to-review
+ profiles:
+ active: dev # 사용할 프로파일 지정 (ex. dev, prod, test)
+
+ datasource:
+ url: jdbc:postgresql://192.168.2.127:15432/kamco_cds
+ #url: jdbc:postgresql://localhost:5432/kamco_cds
+ username: kamco_cds
+ password: kamco_cds_Q!W@E#R$
+ hikari:
+ minimum-idle: 1
+ maximum-pool-size: 5
+
+ jpa:
+ hibernate:
+ ddl-auto: update # 테이블이 없으면 생성, 있으면 업데이트
+ properties:
+ hibernate:
+ jdbc:
+ batch_size: 50
+ default_batch_fetch_size: 100
+logging:
+ level:
+ root: INFO
+ org.springframework.web: DEBUG
+ org.springframework.security: DEBUG
+
+ # 헬스체크 노이즈 핵심만 다운
+ org.springframework.security.web.FilterChainProxy: INFO
+ org.springframework.security.web.authentication.AnonymousAuthenticationFilter: INFO
+ org.springframework.security.web.authentication.Http403ForbiddenEntryPoint: INFO
+ org.springframework.web.servlet.DispatcherServlet: INFO
+# actuator
+management:
+ health:
+ readinessstate:
+ enabled: true
+ livenessstate:
+ enabled: true
+ endpoint:
+ health:
+ probes:
+ enabled: true
+ show-details: always
+ endpoints:
+ jmx:
+ exposure:
+ exclude: "*"
+ web:
+ base-path: /monitor
+ exposure:
+ include:
+ - "health"
+
+file:
+ #sync-root-dir: D:/kamco-nfs/images/
+ sync-root-dir: /kamco-nfs/images/
+ sync-tmp-dir: ${file.sync-root-dir}/tmp
+ sync-file-extention: tfw,tif
+ sync-auto-exception-start-year: 2025
+ sync-auto-exception-before-year-cnt: 3
+
diff --git a/label/label-to-review/src/main/resources/application_local.yml b/label/label-to-review/src/main/resources/application_local.yml
new file mode 100644
index 0000000..328045e
--- /dev/null
+++ b/label/label-to-review/src/main/resources/application_local.yml
@@ -0,0 +1,67 @@
+server:
+ port: 9080
+
+spring:
+ application:
+ name: imagery-make-dataset
+ profiles:
+ active: local # 사용할 프로파일 지정 (ex. dev, prod, test)
+
+ datasource:
+ url: jdbc:postgresql://192.168.2.127:15432/kamco_cds
+ #url: jdbc:postgresql://localhost:5432/kamco_cds
+ username: kamco_cds
+ password: kamco_cds_Q!W@E#R$
+ hikari:
+ minimum-idle: 1
+ maximum-pool-size: 5
+
+ jpa:
+ hibernate:
+ ddl-auto: update # 테이블이 없으면 생성, 있으면 업데이트
+ properties:
+ hibernate:
+ jdbc:
+ batch_size: 50
+ default_batch_fetch_size: 100
+logging:
+ level:
+ root: INFO
+ org.springframework.web: DEBUG
+ org.springframework.security: DEBUG
+
+ # 헬스체크 노이즈 핵심만 다운
+ org.springframework.security.web.FilterChainProxy: INFO
+ org.springframework.security.web.authentication.AnonymousAuthenticationFilter: INFO
+ org.springframework.security.web.authentication.Http403ForbiddenEntryPoint: INFO
+ org.springframework.web.servlet.DispatcherServlet: INFO
+# actuator
+management:
+ health:
+ readinessstate:
+ enabled: true
+ livenessstate:
+ enabled: true
+ endpoint:
+ health:
+ probes:
+ enabled: true
+ show-details: always
+ endpoints:
+ jmx:
+ exposure:
+ exclude: "*"
+ web:
+ base-path: /monitor
+ exposure:
+ include:
+ - "health"
+
+file:
+ #sync-root-dir: D:/kamco-nfs/images/
+ sync-root-dir: /kamco-nfs/images/
+ sync-tmp-dir: ${file.sync-root-dir}/tmp
+ sync-file-extention: tfw,tif
+ sync-auto-exception-start-year: 2025
+ sync-auto-exception-before-year-cnt: 3
+
diff --git a/label/label-to-review/src/main/resources/application_prod.yml b/label/label-to-review/src/main/resources/application_prod.yml
new file mode 100644
index 0000000..1282d26
--- /dev/null
+++ b/label/label-to-review/src/main/resources/application_prod.yml
@@ -0,0 +1,67 @@
+server:
+ port: 9080
+
+spring:
+ application:
+ name: imagery-make-dataset
+ profiles:
+ active: prod # 사용할 프로파일 지정 (ex. dev, prod, test)
+
+ datasource:
+ url: jdbc:postgresql://192.168.2.127:15432/kamco_cds
+ #url: jdbc:postgresql://localhost:5432/kamco_cds
+ username: kamco_cds
+ password: kamco_cds_Q!W@E#R$
+ hikari:
+ minimum-idle: 1
+ maximum-pool-size: 5
+
+ jpa:
+ hibernate:
+ ddl-auto: update # 테이블이 없으면 생성, 있으면 업데이트
+ properties:
+ hibernate:
+ jdbc:
+ batch_size: 50
+ default_batch_fetch_size: 100
+logging:
+ level:
+ root: INFO
+ org.springframework.web: DEBUG
+ org.springframework.security: DEBUG
+
+ # 헬스체크 노이즈 핵심만 다운
+ org.springframework.security.web.FilterChainProxy: INFO
+ org.springframework.security.web.authentication.AnonymousAuthenticationFilter: INFO
+ org.springframework.security.web.authentication.Http403ForbiddenEntryPoint: INFO
+ org.springframework.web.servlet.DispatcherServlet: INFO
+# actuator
+management:
+ health:
+ readinessstate:
+ enabled: true
+ livenessstate:
+ enabled: true
+ endpoint:
+ health:
+ probes:
+ enabled: true
+ show-details: always
+ endpoints:
+ jmx:
+ exposure:
+ exclude: "*"
+ web:
+ base-path: /monitor
+ exposure:
+ include:
+ - "health"
+
+file:
+ #sync-root-dir: D:/kamco-nfs/images/
+ sync-root-dir: /kamco-nfs/images/
+ sync-tmp-dir: ${file.sync-root-dir}/tmp
+ sync-file-extention: tfw,tif
+ sync-auto-exception-start-year: 2025
+ sync-auto-exception-before-year-cnt: 3
+
diff --git a/label/label-to-review/src/main/resources/static/chunk_upload_test.html b/label/label-to-review/src/main/resources/static/chunk_upload_test.html
new file mode 100644
index 0000000..2c331d4
--- /dev/null
+++ b/label/label-to-review/src/main/resources/static/chunk_upload_test.html
@@ -0,0 +1,137 @@
+
+
+
+
+ Chunk Upload Test
+
+
+대용량 파일 청크 업로드 테스트
+
+* Chunk 테스트 사이즈 10M (10 * 1024 * 1024) - 성능에 따라 변경가능
+
+* 업로드 API선택
+
+
+* 파일첨부
+
+
+
+* 업로드시 업로드 이력을 추적하기 위해 UUID생성해서 전달(파일병합시 사용)(script 예제참고)
+UUID :
+
+* API 호출시 파일정보 추출해서 자동 할당해야 함.(script 예제참고)
+chunkIndex :
+chunkTotalIndex :
+
+* API 호출시 파일정보 추출해서 자동 할당해야 함.(script 예제참고)
+fileSize :
+
+
+
+
+* 진행율(%)
+
+
+* 결과메세지
+
+
+
+
+
diff --git a/label/label-to-review/unpack_and_offline_build_airgap_macos.sh b/label/label-to-review/unpack_and_offline_build_airgap_macos.sh
new file mode 100644
index 0000000..4ab128e
--- /dev/null
+++ b/label/label-to-review/unpack_and_offline_build_airgap_macos.sh
@@ -0,0 +1,359 @@
+#!/bin/bash
+# unpack_and_offline_build_airgap_macos.sh
+# ============================================================================
+# Execution Environment: OFFLINE (Air-gapped, No Internet)
+# Purpose: Extract bundle and run offline build
+# ============================================================================
+# macOS Bash Script
+# Version: 3.1
+#
+# IMPORTANT: This script automatically:
+# 1. Extracts the archive
+# 2. Sets GRADLE_USER_HOME to project local cache
+# 3. Configures settings.gradle for offline resolution
+# 4. Runs build with --offline flag
+# ============================================================================
+
+set -e
+
+# ============================================================================
+# Configuration
+# ============================================================================
+WRAPPER_SEED_PATH="wrapper_jar_seed"
+OFFLINE_HOME_NAME="_offline_gradle_home"
+
+# Color codes
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+CYAN='\033[0;36m'
+GRAY='\033[0;90m'
+WHITE='\033[1;37m'
+NC='\033[0m' # No Color
+
+echo ""
+echo -e "${CYAN}============================================================${NC}"
+echo -e "${CYAN} Gradle Offline Build Runner (macOS)${NC}"
+echo -e "${CYAN} Environment: AIR-GAPPED (No Internet)${NC}"
+echo -e "${CYAN} Mode: Fully Offline (--offline enforced)${NC}"
+echo -e "${CYAN}============================================================${NC}"
+echo ""
+
+# ============================================================================
+# [1/16] Check Current Directory
+# ============================================================================
+echo -e "${YELLOW}==[1/16] Check Current Directory ==${NC}"
+START_DIR="$(pwd)"
+echo "PWD: $START_DIR"
+echo ""
+
+# ============================================================================
+# [2/16] Select Archive
+# ============================================================================
+echo -e "${YELLOW}==[2/16] Select Archive ==${NC}"
+
+ARCHIVE=""
+if [ $# -ge 1 ]; then
+ ARCHIVE="$1"
+fi
+
+if [ -z "$ARCHIVE" ]; then
+ # Auto-detect most recent .tar.gz file (macOS compatible)
+ ARCHIVE=$(find "$START_DIR" -maxdepth 1 -type f \( -name "*.tar.gz" -o -name "*.tgz" \) -exec stat -f "%m %N" {} \; 2>/dev/null | sort -rn | head -1 | cut -d' ' -f2-)
+
+ if [ -z "$ARCHIVE" ]; then
+ echo -e "${RED}[ERROR] No archive found${NC}"
+ ls -lh "$START_DIR"
+ exit 1
+ fi
+
+ echo -e "${CYAN}[AUTO] $(basename "$ARCHIVE")${NC}"
+else
+ if [ ! -f "$ARCHIVE" ]; then
+ ARCHIVE="$START_DIR/$ARCHIVE"
+ fi
+ echo -e "${CYAN}[USER] $(basename "$ARCHIVE")${NC}"
+fi
+
+if [ ! -f "$ARCHIVE" ]; then
+ echo -e "${RED}ERROR: Archive not found: $ARCHIVE${NC}"
+ exit 1
+fi
+
+# macOS stat command
+ARCHIVE_SIZE=$(stat -f%z "$ARCHIVE" 2>/dev/null)
+ARCHIVE_SIZE_MB=$(awk "BEGIN {printf \"%.2f\", $ARCHIVE_SIZE / 1048576}")
+echo "Size: ${ARCHIVE_SIZE_MB} MB"
+echo ""
+
+# ============================================================================
+# [3/16] Check tar
+# ============================================================================
+echo -e "${YELLOW}==[3/16] Check tar ==${NC}"
+
+if ! command -v tar &>/dev/null; then
+ echo -e "${RED}ERROR: tar not found${NC}"
+ exit 1
+fi
+echo -e "${GREEN}[OK] tar found${NC}"
+echo ""
+
+# ============================================================================
+# [4/16] Extract Archive
+# ============================================================================
+echo -e "${YELLOW}==[4/16] Extract Archive ==${NC}"
+echo -e "${GRAY}[INFO] Extracting...${NC}"
+
+tar -xzf "$ARCHIVE" -C "$START_DIR"
+if [ $? -ne 0 ]; then
+ echo -e "${RED}ERROR: Extraction failed${NC}"
+ exit 1
+fi
+echo -e "${GREEN}[OK] Extracted${NC}"
+echo ""
+
+# ============================================================================
+# [5/16] Set Permissions
+# ============================================================================
+echo -e "${YELLOW}==[5/16] Set Permissions ==${NC}"
+
+chmod -R u+rw "$START_DIR" 2>/dev/null || true
+# Remove extended attributes that macOS may add
+xattr -cr "$START_DIR" 2>/dev/null || true
+echo -e "${GREEN}[OK] Permissions set${NC}"
+echo ""
+
+# ============================================================================
+# [6/16] Find Project Root
+# ============================================================================
+echo -e "${YELLOW}==[6/16] Find Project Root ==${NC}"
+
+GRADLEW=$(find "$START_DIR" -name "gradlew" -type f 2>/dev/null | sort | head -1)
+if [ -z "$GRADLEW" ]; then
+ echo -e "${RED}ERROR: gradlew not found${NC}"
+ exit 1
+fi
+
+PROJECT_DIR=$(dirname "$GRADLEW")
+echo -e "${CYAN}Project: $PROJECT_DIR${NC}"
+cd "$PROJECT_DIR"
+echo ""
+
+# ============================================================================
+# [7/16] Fix Permissions
+# ============================================================================
+echo -e "${YELLOW}==[7/16] Fix Permissions ==${NC}"
+
+chmod +x ./gradlew
+find . -name "*.sh" -type f -exec chmod +x {} \; 2>/dev/null || true
+# Remove quarantine attributes that macOS adds to downloaded files
+xattr -d com.apple.quarantine ./gradlew 2>/dev/null || true
+find . -name "*.jar" -exec xattr -d com.apple.quarantine {} \; 2>/dev/null || true
+echo -e "${GREEN}[OK] Permissions fixed${NC}"
+echo ""
+
+# ============================================================================
+# [8/16] Verify Wrapper
+# ============================================================================
+echo -e "${YELLOW}==[8/16] Verify Wrapper ==${NC}"
+
+WRAPPER_DIR="$PROJECT_DIR/gradle/wrapper"
+WRAPPER_JAR="$WRAPPER_DIR/gradle-wrapper.jar"
+WRAPPER_PROP="$WRAPPER_DIR/gradle-wrapper.properties"
+
+if [ ! -f "$WRAPPER_PROP" ]; then
+ echo -e "${RED}ERROR: gradle-wrapper.properties missing${NC}"
+ exit 1
+fi
+
+if [ ! -f "$WRAPPER_JAR" ]; then
+ SEED_JAR="$PROJECT_DIR/$WRAPPER_SEED_PATH/gradle-wrapper.jar"
+ if [ -f "$SEED_JAR" ]; then
+ mkdir -p "$WRAPPER_DIR"
+ cp "$SEED_JAR" "$WRAPPER_JAR"
+ echo -e "${GREEN}[OK] Injected from seed${NC}"
+ else
+ echo -e "${RED}ERROR: wrapper jar missing${NC}"
+ exit 1
+ fi
+else
+ echo -e "${GREEN}[OK] Wrapper verified${NC}"
+fi
+echo ""
+
+# ============================================================================
+# [9/16] Set GRADLE_USER_HOME
+# ============================================================================
+echo -e "${YELLOW}==[9/16] Set GRADLE_USER_HOME ==${NC}"
+
+OFFLINE_HOME="$PROJECT_DIR/$OFFLINE_HOME_NAME"
+if [ ! -d "$OFFLINE_HOME" ]; then
+ echo -e "${RED}ERROR: _offline_gradle_home not found in archive${NC}"
+ exit 1
+fi
+
+export GRADLE_USER_HOME="$OFFLINE_HOME"
+echo -e "${CYAN}GRADLE_USER_HOME = $GRADLE_USER_HOME${NC}"
+
+# Check cache
+CACHES_DIR="$OFFLINE_HOME/caches"
+if [ -d "$CACHES_DIR" ]; then
+ # macOS du command
+ if du -k "$CACHES_DIR" &>/dev/null; then
+ CACHE_SIZE=$(du -sk "$CACHES_DIR" 2>/dev/null | cut -f1)
+ CACHE_SIZE=$((CACHE_SIZE * 1024))
+ else
+ CACHE_SIZE=0
+ fi
+ CACHE_SIZE_MB=$(awk "BEGIN {printf \"%.2f\", $CACHE_SIZE / 1048576}")
+ echo -e "${CYAN}[INFO] Cache size: ${CACHE_SIZE_MB} MB${NC}"
+else
+ echo -e "${YELLOW}[WARN] No cache folder found${NC}"
+fi
+echo ""
+
+# ============================================================================
+# [10/16] Verify settings.gradle
+# ============================================================================
+echo -e "${YELLOW}==[10/16] Verify settings.gradle ==${NC}"
+
+SETTINGS_FILE=""
+if [ -f "./settings.gradle" ]; then
+ SETTINGS_FILE="settings.gradle"
+elif [ -f "./settings.gradle.kts" ]; then
+ SETTINGS_FILE="settings.gradle.kts"
+fi
+
+if [ -n "$SETTINGS_FILE" ]; then
+ if grep -q "mavenLocal()" "$SETTINGS_FILE" && grep -q "pluginManagement" "$SETTINGS_FILE"; then
+ echo -e "${GREEN}[OK] settings.gradle configured for offline${NC}"
+ else
+ echo -e "${YELLOW}[WARN] settings.gradle may not be configured for offline${NC}"
+ echo -e "${GRAY}[INFO] Build may fail if plugins not cached${NC}"
+ fi
+fi
+echo ""
+
+# ============================================================================
+# [11/16] Test Gradle
+# ============================================================================
+echo -e "${YELLOW}==[11/16] Test Gradle ==${NC}"
+
+GRADLE_WORKS=false
+if ./gradlew --offline --version &>/dev/null; then
+ GRADLE_WORKS=true
+ echo -e "${GREEN}[OK] Gradle working in offline mode${NC}"
+else
+ echo -e "${YELLOW}[WARN] Gradle --version failed${NC}"
+fi
+echo ""
+
+# ============================================================================
+# [12/16] Stop Daemon
+# ============================================================================
+echo -e "${YELLOW}==[12/16] Stop Daemon ==${NC}"
+
+./gradlew --stop &>/dev/null || true
+sleep 2
+echo -e "${GREEN}[OK] Daemon stopped${NC}"
+echo ""
+
+# ============================================================================
+# [13/16] Run Offline Build
+# ============================================================================
+echo -e "${YELLOW}==[13/16] Run Offline Build ==${NC}"
+echo ""
+echo -e "${CYAN}============================================================${NC}"
+echo -e "${CYAN} Building with --offline flag${NC}"
+echo -e "${CYAN} All dependencies from local cache${NC}"
+echo -e "${CYAN}============================================================${NC}"
+echo ""
+
+BUILD_SUCCESS=false
+BUILD_TASK=""
+
+# Try bootJar
+echo -e "${GRAY}[TRY] --offline bootJar...${NC}"
+if ./gradlew --offline clean bootJar --no-daemon; then
+ BUILD_SUCCESS=true
+ BUILD_TASK="bootJar"
+fi
+
+# Try jar
+if [ "$BUILD_SUCCESS" = false ]; then
+ echo -e "${GRAY}[TRY] --offline jar...${NC}"
+ if ./gradlew --offline clean jar --no-daemon; then
+ BUILD_SUCCESS=true
+ BUILD_TASK="jar"
+ fi
+fi
+
+# Try build
+if [ "$BUILD_SUCCESS" = false ]; then
+ echo -e "${GRAY}[TRY] --offline build...${NC}"
+ if ./gradlew --offline build --no-daemon; then
+ BUILD_SUCCESS=true
+ BUILD_TASK="build"
+ fi
+fi
+
+echo ""
+if [ "$BUILD_SUCCESS" = true ]; then
+ echo -e "${GREEN}============================================================${NC}"
+ echo -e "${GREEN} BUILD SUCCESS! (task: $BUILD_TASK)${NC}"
+ echo -e "${GREEN}============================================================${NC}"
+else
+ echo -e "${RED}============================================================${NC}"
+ echo -e "${RED} BUILD FAILED!${NC}"
+ echo -e "${RED}============================================================${NC}"
+ echo ""
+ echo -e "${YELLOW}Possible causes:${NC}"
+ echo -e "${WHITE} - Dependencies not in cache${NC}"
+ echo -e "${WHITE} - Plugin resolution failed${NC}"
+ echo -e "${WHITE} - Need complete build in online env first${NC}"
+ exit 1
+fi
+echo ""
+
+# ============================================================================
+# [14/16] Show Build Output
+# ============================================================================
+echo -e "${YELLOW}==[14/16] Build Output ==${NC}"
+
+LIBS_DIR="$PROJECT_DIR/build/libs"
+if [ -d "$LIBS_DIR" ]; then
+ echo -e "${CYAN}build/libs contents:${NC}"
+ ls -lh "$LIBS_DIR"/*.jar 2>/dev/null | awk '{printf " %-40s %10s\n", $9, $5}'
+
+ MAIN_JAR=$(find "$LIBS_DIR" -name "*.jar" -type f ! -name "*-plain.jar" ! -name "*-sources.jar" ! -name "*-javadoc.jar" 2>/dev/null | head -1)
+else
+ echo -e "${YELLOW}[WARN] build/libs not found${NC}"
+fi
+echo ""
+
+# ============================================================================
+# [15/16] Run Instructions
+# ============================================================================
+echo -e "${YELLOW}==[15/16] Run Instructions ==${NC}"
+echo ""
+
+if [ -n "$MAIN_JAR" ]; then
+ echo -e "${CYAN}To run the application:${NC}"
+ echo -e "${WHITE} java -jar $(basename "$MAIN_JAR")${NC}"
+ echo ""
+fi
+
+echo -e "${CYAN}To rebuild:${NC}"
+echo -e "${WHITE} export GRADLE_USER_HOME=\"./_offline_gradle_home\"${NC}"
+echo -e "${WHITE} ./gradlew --offline bootJar --no-daemon${NC}"
+echo ""
+
+# ============================================================================
+# [16/16] Complete
+# ============================================================================
+echo -e "${GREEN}============================================================${NC}"
+echo -e "${GREEN} Offline Build Complete!${NC}"
+echo -e "${GREEN}============================================================${NC}"
+echo ""
+echo -e "${CYAN}Project: $PROJECT_DIR${NC}"
+echo ""