Merge branch 'develop' of https://kamco.gitea.gs.dabeeo.com/dabeeo/kamco-dabeeo-backoffice into feat/dean/test-scene

This commit is contained in:
2026-01-05 16:50:12 +09:00
54 changed files with 4563 additions and 97 deletions

View File

@@ -0,0 +1,125 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.AllocateInfoDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.searchReq;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerStatistics;
import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity;
import com.kamco.cd.kamcoback.postgres.repository.label.LabelAllocateRepository;
import java.time.LocalDate;
import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class LabelAllocateCoreService {
private final LabelAllocateRepository labelAllocateRepository;
public List<AllocateInfoDto> fetchNextIds(
Long lastId, Long batchSize, Integer compareYyyy, Integer targetYyyy, Integer stage) {
return labelAllocateRepository.fetchNextIds(lastId, batchSize, compareYyyy, targetYyyy, stage);
}
public void assignOwner(List<AllocateInfoDto> ids, String userId, Long analUid) {
labelAllocateRepository.assignOwner(ids, userId, analUid);
}
public List<LabelAllocateDto.Basic> findAssignedLabelerList(Long analUid) {
return labelAllocateRepository.findAssignedLabelerList(analUid).stream()
.map(LabelingAssignmentEntity::toDto)
.toList();
}
public Long findLabelUnAssignedCnt(Integer stage, Integer compareYyyy, Integer targetYyyy) {
return labelAllocateRepository.findLabelUnAssignedCnt(stage, compareYyyy, targetYyyy);
}
public void assignInspector(UUID assignmentUid, String inspectorUid) {
labelAllocateRepository.assignInspector(assignmentUid, inspectorUid);
}
public List<UserList> availUserList(String role) {
return labelAllocateRepository.availUserList(role);
}
public ProjectInfo findProjectInfo(Long analUid) {
return labelAllocateRepository.findProjectInfo(analUid);
}
public ProjectInfo findLatestProjectInfo() {
return labelAllocateRepository.findLatestProjectInfo();
}
public List<WorkerStatistics> findWorkerStatistics(
Long analUid, String workerType, String search, String sortType) {
return labelAllocateRepository.findWorkerStatistics(analUid, workerType, search, sortType);
}
public WorkProgressInfo findWorkProgressInfo(Long analUid) {
return labelAllocateRepository.findWorkProgressInfo(analUid);
}
public Long findDailyProcessedCount(
String workerId, String workerType, LocalDate date, Long analUid) {
return labelAllocateRepository.findDailyProcessedCount(workerId, workerType, date, analUid);
}
public void assignInspectorBulk(List<UUID> assignmentUids, String inspectorUid) {
labelAllocateRepository.assignInspectorBulk(assignmentUids, inspectorUid);
}
public InferenceDetail findInferenceDetail(String uuid) {
return labelAllocateRepository.findInferenceDetail(uuid);
}
public List<Long> fetchNextMoveIds(
Long lastId,
Long batchSize,
Integer compareYyyy,
Integer targetYyyy,
Integer stage,
String userId) {
return labelAllocateRepository.fetchNextMoveIds(
lastId, batchSize, compareYyyy, targetYyyy, stage, userId);
}
public void assignOwnerMove(List<Long> sub, String userId) {
labelAllocateRepository.assignOwnerMove(sub, userId);
}
public LabelerDetail findLabelerDetail(String userId, String uuid) {
return labelAllocateRepository.findLabelerDetail(userId, uuid);
}
public Long findMapSheetAnalInferenceUid(Integer compareYyyy, Integer targetYyyy, Integer stage) {
return labelAllocateRepository.findMapSheetAnalInferenceUid(compareYyyy, targetYyyy, stage);
}
public void insertInspector(Long analUid, String inspector) {
labelAllocateRepository.insertInspector(analUid, inspector);
}
public Page<LabelingStatDto> findLabelerDailyStat(
searchReq searchReq, String uuid, String userId) {
return labelAllocateRepository.findLabelerDailyStat(searchReq, uuid, userId);
}
public Page<LabelingStatDto> findInspectorDailyStat(
searchReq searchReq, String uuid, String userId) {
return labelAllocateRepository.findInspectorDailyStat(searchReq, uuid, userId);
}
public LabelerDetail findInspectorDetail(String userId, String uuid) {
return labelAllocateRepository.findInspectorDetail(userId, uuid);
}
}

View File

@@ -0,0 +1,60 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.ChangeDetectYear;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMng;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMngDetail;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.WorkerState;
import com.kamco.cd.kamcoback.postgres.repository.label.LabelWorkRepository;
import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class LabelWorkCoreService {
private final LabelWorkRepository labelWorkRepository;
/**
* 변화탐지 년도 셀렉트박스 조회
*
* @return
*/
public List<ChangeDetectYear> getChangeDetectYear() {
return labelWorkRepository.findChangeDetectYearList().stream()
.map(
e ->
new ChangeDetectYear(
e.getCompareYyyy() + "-" + e.getTargetYyyy(),
e.getCompareYyyy() + "-" + e.getTargetYyyy()))
.toList();
}
/**
* 라벨링작업 관리 목록 조회
*
* @param searchReq
* @return
*/
public Page<LabelWorkMng> labelWorkMngList(LabelWorkDto.LabelWorkMngSearchReq searchReq) {
return labelWorkRepository.labelWorkMngList(searchReq);
}
public Page<WorkerState> findlabelWorkStateList(LabelWorkDto.WorkerStateSearchReq searchReq) {
return labelWorkRepository.findlabelWorkStateList(searchReq);
}
;
/**
* 작업배정 정보 조회
*
* @param uuid
* @return
*/
public LabelWorkMngDetail findLabelWorkMngDetail(UUID uuid) {
return labelWorkRepository.findLabelWorkMngDetail(uuid);
}
}

View File

@@ -9,6 +9,7 @@ import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx50kRepository;
import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto.MapListEntity;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto.UseInferReq;
import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.Valid;
@@ -101,4 +102,11 @@ public class MapInkxMngCoreService {
return getScene5k.toEntity();
}
public Page<MapListEntity> getSceneListByPage(
CommonUseStatus useInference, String searchVal, MapInkxMngDto.searchReq searchReq) {
return mapInkx5kRepository
.getSceneListByPage(useInference, searchVal, searchReq)
.map(MapInkx5kEntity::toDto);
}
}

View File

@@ -0,0 +1,70 @@
package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
import com.kamco.cd.kamcoback.postgres.CommonDateEntity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import java.time.ZonedDateTime;
import java.util.UUID;
@Entity
@Table(name = "tb_labeling_assignment")
public class LabelingAssignmentEntity extends CommonDateEntity {
@Id
@Column(name = "assignment_uid")
private UUID assignmentUid;
@Column(name = "inference_geom_uid")
private Long inferenceGeomUid;
@Column(name = "worker_uid")
private String workerUid;
@Column(name = "inspector_uid")
private String inspectorUid;
@Column(name = "work_state")
private String workState;
@Column(name = "stagnation_yn")
private Character stagnationYn;
@Column(name = "assign_group_id")
private String assignGroupId;
@Column(name = "learn_geom_uid")
private Long learnGeomUid;
@Column(name = "anal_uid")
private Long analUid;
@Column(name = "inspect_state")
private String inspectState;
@Column(name = "work_stat_dttm")
private ZonedDateTime workStatDttm;
@Column(name = "inspect_stat_dttm")
private ZonedDateTime inspectStatDttm;
public LabelAllocateDto.Basic toDto() {
return new LabelAllocateDto.Basic(
this.assignmentUid,
this.inferenceGeomUid,
this.workerUid,
this.inspectorUid,
this.workState,
this.stagnationYn,
this.assignGroupId,
this.learnGeomUid,
this.analUid,
super.getCreatedDate(),
super.getModifiedDate(),
this.inspectState,
this.workStatDttm,
this.inspectStatDttm);
}
}

View File

@@ -0,0 +1,33 @@
package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.label.dto.LabelInspectorDto;
import com.kamco.cd.kamcoback.postgres.CommonDateEntity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import java.util.UUID;
@Entity
@Table(name = "tb_labeling_inspector")
public class LabelingInspectorEntity extends CommonDateEntity {
@Id
@Column(name = "operator_uid")
private UUID operatorUid;
@Column(name = "anal_uid")
private Long analUid;
@Column(name = "inspector_uid")
private String inspectorUid;
public LabelInspectorDto.Basic toDto() {
return new LabelInspectorDto.Basic(
this.operatorUid,
this.analUid,
this.inspectorUid,
super.getCreatedDate(),
super.getModifiedDate());
}
}

View File

@@ -1,5 +1,6 @@
package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet;
import com.kamco.cd.kamcoback.postgres.CommonDateEntity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
@@ -46,4 +47,8 @@ public class MapInkx50kEntity extends CommonDateEntity {
this.mapidNo = mapidNo;
this.geom = geom;
}
public MapSheet toEntity() {
return new MapSheet(mapidcdNo, mapidNm);
}
}

View File

@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.common.enums.CommonUseStatus;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet;
import com.kamco.cd.kamcoback.postgres.CommonDateEntity;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto.MapListEntity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.EnumType;
@@ -73,4 +74,14 @@ public class MapInkx5kEntity extends CommonDateEntity {
public InferenceResultDto.MapSheet toEntity() {
return new MapSheet(mapidcdNo, mapidNm);
}
public MapListEntity toDto() {
return MapListEntity.builder()
.scene5k(this.toEntity())
.scene50k(this.mapInkx50k.toEntity())
.useInference(useInference)
.createdDttm(super.getCreatedDate())
.updatedDttm(super.getModifiedDate())
.build();
}
}

View File

@@ -103,4 +103,7 @@ public class MapSheetAnalEntity {
@ColumnDefault("now()")
@Column(name = "updated_dttm")
private ZonedDateTime updatedDttm;
@Column(name = "gukyuin_apply_dttm")
private ZonedDateTime gukyuinApplyDttm;
}

View File

@@ -0,0 +1,152 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.type.SqlTypes;
@Getter
@Setter
@Entity
@Table(name = "tb_map_sheet_anal_inference")
public class MapSheetAnalInferenceEntity {
@Id
@GeneratedValue(
strategy = GenerationType.SEQUENCE,
generator = "tb_map_sheet_anal_inference_id_gen")
@SequenceGenerator(
name = "tb_map_sheet_anal_inference_id_gen",
sequenceName = "tb_map_sheet_anal_inference_uid",
allocationSize = 1)
@Column(name = "anal_uid", nullable = false)
private Long id;
@Column(name = "compare_yyyy")
private Integer compareYyyy;
@Column(name = "target_yyyy")
private Integer targetYyyy;
@Column(name = "model_uid")
private Long modelUid;
@Size(max = 100)
@Column(name = "server_ids", length = 100)
private String serverIds;
@Column(name = "anal_strt_dttm")
private ZonedDateTime analStrtDttm;
@Column(name = "anal_end_dttm")
private ZonedDateTime analEndDttm;
@Column(name = "anal_sec")
private Long analSec;
@Size(max = 20)
@Column(name = "anal_state", length = 20)
private String analState;
@Size(max = 20)
@Column(name = "gukyuin_used", length = 20)
private String gukyuinUsed;
@Column(name = "accuracy")
private Double accuracy;
@Size(max = 255)
@Column(name = "result_url")
private String resultUrl;
@ColumnDefault("now()")
@Column(name = "created_dttm")
private ZonedDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
@ColumnDefault("now()")
@Column(name = "updated_dttm")
private ZonedDateTime updatedDttm;
@Column(name = "updated_uid")
private Long updatedUid;
@Size(max = 255)
@Column(name = "anal_title")
private String analTitle;
@Column(name = "detecting_cnt")
private Long detectingCnt;
@Column(name = "anal_pred_sec")
private Long analPredSec;
@Column(name = "model_ver_uid")
private Long modelVerUid;
@Column(name = "hyper_params")
@JdbcTypeCode(SqlTypes.JSON)
private Map<String, Object> hyperParams;
@Column(name = "tranning_rate")
private List<Double> tranningRate;
@Column(name = "validation_rate")
private List<Double> validationRate;
@Column(name = "test_rate", length = Integer.MAX_VALUE)
private String testRate;
@Size(max = 128)
@Column(name = "detecting_description", length = 128)
private String detectingDescription;
@Size(max = 12)
@Column(name = "base_map_sheet_num", length = 12)
private String baseMapSheetNum;
@ColumnDefault("gen_random_uuid()")
@Column(name = "uuid")
private UUID uuid;
@Size(max = 50)
@Column(name = "model_m1_ver", length = 50)
private String modelM1Ver;
@Size(max = 50)
@Column(name = "model_m2_ver", length = 50)
private String modelM2Ver;
@Size(max = 50)
@Column(name = "model_m3_ver", length = 50)
private String modelM3Ver;
@Size(max = 20)
@Column(name = "anal_target_type", length = 20)
private String analTargetType;
@Column(name = "gukyuin_apply_dttm")
private ZonedDateTime gukyuinApplyDttm;
@Size(max = 20)
@Column(name = "detection_data_option", length = 20)
private String detectionDataOption;
@Column(name = "stage")
private Integer stage;
}

View File

@@ -3,12 +3,9 @@ package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.postgres.CommonDateEntity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
@@ -69,10 +66,11 @@ public class MapSheetMngHstEntity extends CommonDateEntity {
private Integer mngYyyy; // 년도
// JPA 연관관계: MapInkx5k 참조 (PK 기반) 소속도엽번호 1:5k
/*
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "map_sheet_code", referencedColumnName = "fid")
private MapInkx5kEntity mapInkx5kByCode;
*/
// TODO 1:5k 관련 정보 추후 제거 필요
@Column(name = "map_sheet_num")
private String mapSheetNum; // 도엽번호

View File

@@ -5,7 +5,6 @@ import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import java.time.ZonedDateTime;
import java.util.List;
import lombok.RequiredArgsConstructor;
@@ -16,8 +15,7 @@ import org.springframework.stereotype.Repository;
public class InferenceResultRepositoryImpl implements InferenceResultRepositoryCustom {
private final JPAQueryFactory queryFactory;
@PersistenceContext private final EntityManager em;
private final EntityManager em;
/** tb_map_sheet_anal_data_inference */
private final QMapSheetAnalDataInferenceEntity inferenceEntity =
@@ -36,22 +34,28 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
String sql =
"""
INSERT INTO tb_map_sheet_anal_inference (
stage,
compare_yyyy,
target_yyyy,
anal_map_sheet,
stage,
anal_title
anal_title,
detecting_cnt,
created_dttm,
updated_dttm
)
SELECT
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS anal_map_sheet,
r.stage,
CONCAT(r.stage ,'_', r.input1 ,'_', r.input2 ,'_', r.map_id) as anal_title
FROM inference_results r
GROUP BY r.stage, r.input1, r.input2, r.map_id
ON CONFLICT (compare_yyyy, target_yyyy, anal_map_sheet, stage)
r.stage,
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
CONCAT(r.stage, '_', r.input1, '_', r.input2) AS anal_title,
COUNT(*) AS detecting_cnt,
now(),
now()
FROM inference_results r
GROUP BY r.stage, r.input1, r.input2
ON CONFLICT (stage, compare_yyyy, target_yyyy)
DO UPDATE SET
detecting_cnt = EXCLUDED.detecting_cnt,
anal_title = EXCLUDED.anal_title,
updated_dttm = now()
""";
@@ -72,30 +76,42 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
String sql =
"""
INSERT INTO tb_map_sheet_anal_data_inference (
anal_uid,
stage,
compare_yyyy,
target_yyyy,
map_sheet_num,
created_dttm,
updated_dttm,
detecting_cnt,
file_created_yn,
detecting_cnt
created_dttm,
updated_dttm
)
SELECT
ai.id AS anal_uid,
r.stage,
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS map_sheet_num,
now() AS created_dttm,
now() AS updated_dttm,
false AS file_created_yn,
count(*) AS detecting_cnt
COUNT(*) AS detecting_cnt,
false AS file_created_yn,
now(),
now()
FROM inference_results r
GROUP BY r.stage, r.input1, r.input2, r.map_id
JOIN tb_map_sheet_anal_inference ai
ON ai.stage = r.stage
AND ai.compare_yyyy = r.input1
AND ai.target_yyyy = r.input2
GROUP BY
ai.id,
r.stage,
r.input1,
r.input2,
r.map_id
ON CONFLICT (stage, compare_yyyy, target_yyyy, map_sheet_num)
DO UPDATE SET
updated_dttm = now(),
detecting_cnt = EXCLUDED.detecting_cnt
anal_uid = EXCLUDED.anal_uid,
detecting_cnt = EXCLUDED.detecting_cnt,
updated_dttm = now()
""";
return em.createNativeQuery(sql).executeUpdate();
@@ -114,46 +130,70 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
String sql =
"""
INSERT INTO tb_map_sheet_anal_data_inference_geom (
uuid, stage, cd_prob, compare_yyyy, target_yyyy, map_sheet_num,
class_before_cd, class_before_prob, class_after_cd, class_after_prob,
geom, area, data_uid, created_dttm, updated_dttm,
file_created_yn
INSERT INTO tb_map_sheet_anal_data_inference_geom (
uuid,
stage,
cd_prob,
compare_yyyy,
target_yyyy,
map_sheet_num,
class_before_cd,
class_before_prob,
class_after_cd,
class_after_prob,
geom,
area,
data_uid,
file_created_yn,
created_dttm,
updated_dttm
)
SELECT
x.uuid, x.stage, x.cd_prob, x.compare_yyyy, x.target_yyyy, x.map_sheet_num,
x.class_before_cd, x.class_before_prob, x.class_after_cd, x.class_after_prob,
x.geom, x.area, x.data_uid, x.created_dttm, x.updated_dttm,
false AS file_created_yn
x.uuid,
x.stage,
x.cd_prob,
x.compare_yyyy,
x.target_yyyy,
x.map_sheet_num,
x.class_before_cd,
x.class_before_prob,
x.class_after_cd,
x.class_after_prob,
x.geom,
x.area,
x.data_uid,
false,
x.created_dttm,
x.updated_dttm
FROM (
SELECT DISTINCT ON (r.uuid)
r.uuid,
r.stage,
r.cd_prob,
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS map_sheet_num,
r.before_class AS class_before_cd,
r.before_probability AS class_before_prob,
r.after_class AS class_after_cd,
r.after_probability AS class_after_prob,
CASE
WHEN r.geometry IS NULL THEN NULL
WHEN left(r.geometry, 2) = '01'
THEN ST_SetSRID(ST_GeomFromWKB(decode(r.geometry, 'hex')), 5186)
ELSE ST_GeomFromText(r.geometry, 5186)
END AS geom,
r.area,
di.data_uid,
r.created_dttm,
r.updated_dttm
FROM inference_results r
JOIN tb_map_sheet_anal_data_inference di
ON di.stage = r.stage
AND di.compare_yyyy = r.input1
AND di.target_yyyy = r.input2
AND di.map_sheet_num = r.map_id
ORDER BY r.uuid, r.updated_dttm DESC NULLS LAST, r.uid DESC
SELECT DISTINCT ON (r.uuid)
r.uuid,
r.stage,
r.cd_prob,
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS map_sheet_num,
r.before_class AS class_before_cd,
r.before_probability AS class_before_prob,
r.after_class AS class_after_cd,
r.after_probability AS class_after_prob,
CASE
WHEN r.geometry IS NULL THEN NULL
WHEN LEFT(r.geometry, 2) = '01'
THEN ST_SetSRID(ST_GeomFromWKB(decode(r.geometry, 'hex')), 5186)
ELSE ST_GeomFromText(r.geometry, 5186)
END AS geom,
r.area,
di.data_uid,
r.created_dttm,
r.updated_dttm
FROM inference_results r
JOIN tb_map_sheet_anal_data_inference di
ON di.stage = r.stage
AND di.compare_yyyy = r.input1
AND di.target_yyyy = r.input2
AND di.map_sheet_num = r.map_id
ORDER BY r.uuid, r.updated_dttm DESC NULLS LAST, r.uid DESC
) x
ON CONFLICT (uuid)
DO UPDATE SET

View File

@@ -0,0 +1,8 @@
package com.kamco.cd.kamcoback.postgres.repository.label;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface LabelAllocateRepository
extends JpaRepository<MapSheetAnalDataInferenceGeomEntity, Long>,
LabelAllocateRepositoryCustom {}

View File

@@ -0,0 +1,76 @@
package com.kamco.cd.kamcoback.postgres.repository.label;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.AllocateInfoDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerStatistics;
import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity;
import java.time.LocalDate;
import java.util.List;
import java.util.UUID;
import org.springframework.data.domain.Page;
public interface LabelAllocateRepositoryCustom {
List<AllocateInfoDto> fetchNextIds(
Long lastId, Long batchSize, Integer compareYyyy, Integer targetYyyy, Integer stage);
void assignOwner(List<AllocateInfoDto> ids, String userId, Long analUid);
List<LabelingAssignmentEntity> findAssignedLabelerList(Long analUid);
Long findLabelUnAssignedCnt(Integer stage, Integer compareYyyy, Integer targetYyyy);
void assignInspector(UUID assignmentUid, String userId);
List<UserList> availUserList(String role);
// 프로젝트 정보 조회
ProjectInfo findProjectInfo(Long analUid);
// 최신 프로젝트 정보 조회 (analUid 없이)
ProjectInfo findLatestProjectInfo();
// 작업자 통계 조회
List<WorkerStatistics> findWorkerStatistics(
Long analUid, String workerType, String search, String sortType);
// 작업 진행 현황 조회
WorkProgressInfo findWorkProgressInfo(Long analUid);
// 작업자별 일일 처리량 조회
Long findDailyProcessedCount(String workerId, String workerType, LocalDate date, Long analUid);
void assignInspectorBulk(List<UUID> assignmentUids, String inspectorUid);
InferenceDetail findInferenceDetail(String uuid);
List<Long> fetchNextMoveIds(
Long lastId,
Long batchSize,
Integer compareYyyy,
Integer targetYyyy,
Integer stage,
String userId);
void assignOwnerMove(List<Long> sub, String userId);
LabelerDetail findLabelerDetail(String userId, String uuid);
Long findMapSheetAnalInferenceUid(Integer compareYyyy, Integer targetYyyy, Integer stage);
void insertInspector(Long analUid, String inspector);
Page<LabelingStatDto> findLabelerDailyStat(
LabelAllocateDto.searchReq searchReq, String uuid, String userId);
Page<LabelingStatDto> findInspectorDailyStat(
LabelAllocateDto.searchReq searchReq, String uuid, String userId);
LabelerDetail findInspectorDetail(String userId, String uuid);
}

View File

@@ -0,0 +1,973 @@
package com.kamco.cd.kamcoback.postgres.repository.label;
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingInspectorEntity.labelingInspectorEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity.mapSheetAnalEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMemberEntity.memberEntity;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.AllocateInfoDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InspectState;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerStatistics;
import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMemberEntity;
import com.querydsl.core.types.Expression;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberExpression;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityNotFoundException;
import jakarta.persistence.PersistenceContext;
import jakarta.transaction.Transactional;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Repository;
@Slf4j
@Repository
@RequiredArgsConstructor
public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCustom {
private final JPAQueryFactory queryFactory;
@PersistenceContext private EntityManager em;
@Override
public List<AllocateInfoDto> fetchNextIds(
Long lastId, Long batchSize, Integer compareYyyy, Integer targetYyyy, Integer stage) {
return queryFactory
.select(
Projections.constructor(
AllocateInfoDto.class,
mapSheetAnalDataInferenceGeomEntity.geoUid,
mapSheetAnalDataInferenceGeomEntity.mapSheetNum))
.from(mapSheetAnalDataInferenceGeomEntity)
.where(
lastId == null ? null : mapSheetAnalDataInferenceGeomEntity.geoUid.gt(lastId),
mapSheetAnalDataInferenceGeomEntity.pnu.isNotNull(),
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(compareYyyy),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(targetYyyy),
mapSheetAnalDataInferenceGeomEntity.stage.eq(stage),
mapSheetAnalDataInferenceGeomEntity.labelState.isNull())
.orderBy(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.asc())
.limit(batchSize)
.fetch();
}
@Override
public void assignOwner(List<AllocateInfoDto> ids, String userId, Long analUid) {
// analUid로 분석 정보 조회
MapSheetAnalInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalInferenceEntity)
.where(mapSheetAnalInferenceEntity.id.eq(analUid))
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("MapSheetAnalInferenceEntity not found for analUid: ");
}
// data_geom 테이블에 label state 를 ASSIGNED 로 update
List<Long> geoUidList =
ids.stream().map(AllocateInfoDto::getGeoUid).filter(Objects::nonNull).toList();
queryFactory
.update(mapSheetAnalDataInferenceGeomEntity)
.set(mapSheetAnalDataInferenceGeomEntity.labelState, LabelState.ASSIGNED.getId())
.set(mapSheetAnalDataInferenceGeomEntity.labelStateDttm, ZonedDateTime.now())
.where(mapSheetAnalDataInferenceGeomEntity.geoUid.in(geoUidList))
.execute();
// 라벨러 할당 테이블에 insert
String sql =
"""
insert into tb_labeling_assignment
(assignment_uid, inference_geom_uid, worker_uid,
work_state, assign_group_id, anal_uid)
values (?, ?, ?, ?, ?, ?)
""";
for (AllocateInfoDto info : ids) {
em.createNativeQuery(sql)
.setParameter(1, UUID.randomUUID())
.setParameter(2, info.getGeoUid())
.setParameter(3, userId)
.setParameter(4, LabelState.ASSIGNED.getId())
.setParameter(5, info.getMapSheetNum())
.setParameter(6, analEntity.getId())
.executeUpdate();
}
em.flush();
em.clear();
}
@Override
public List<LabelingAssignmentEntity> findAssignedLabelerList(Long analUid) {
// analUid로 분석 정보 조회
MapSheetAnalInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalInferenceEntity)
.where(mapSheetAnalInferenceEntity.id.eq(analUid))
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("mapSheetAnalInferenceEntity not found for analUid: ");
}
return queryFactory
.selectFrom(labelingAssignmentEntity)
.where(
labelingAssignmentEntity.analUid.eq(analEntity.getId()),
labelingAssignmentEntity.workState.eq(LabelState.ASSIGNED.getId()),
labelingAssignmentEntity.inspectorUid.isNull())
.orderBy(labelingAssignmentEntity.workerUid.asc())
.fetch();
}
@Override
public Long findLabelUnAssignedCnt(Integer stage, Integer compareYyyy, Integer targetYyyy) {
return queryFactory
.select(mapSheetAnalDataInferenceGeomEntity.geoUid.count())
.from(mapSheetAnalDataInferenceGeomEntity)
.where(
mapSheetAnalDataInferenceGeomEntity.pnu.isNotNull(),
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(compareYyyy),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(targetYyyy),
mapSheetAnalDataInferenceGeomEntity.stage.eq(stage),
mapSheetAnalDataInferenceGeomEntity.labelState.isNull())
.fetchOne();
}
@Override
public void assignInspector(UUID assignmentUid, String inspectorUid) {
queryFactory
.update(labelingAssignmentEntity)
.set(labelingAssignmentEntity.inspectorUid, inspectorUid)
.where(labelingAssignmentEntity.assignmentUid.eq(assignmentUid))
.execute();
}
@Override
public List<UserList> availUserList(String role) {
return queryFactory
.select(
Projections.constructor(
LabelAllocateDto.UserList.class,
memberEntity.userRole,
memberEntity.employeeNo,
memberEntity.name))
.from(memberEntity)
.where(
memberEntity.userRole.eq(role),
memberEntity.status.eq(com.kamco.cd.kamcoback.common.enums.StatusType.ACTIVE.getId()))
.orderBy(memberEntity.name.asc())
.fetch();
}
@Override
public List<WorkerStatistics> findWorkerStatistics(
Long analUid, String workerType, String search, String sortType) {
// 작업자 유형에 따른 필드 선택
StringExpression workerIdField =
"REVIEWER".equals(workerType)
? labelingAssignmentEntity.inspectorUid
: labelingAssignmentEntity.workerUid;
BooleanExpression workerCondition =
"REVIEWER".equals(workerType)
? labelingAssignmentEntity.inspectorUid.isNotNull()
: labelingAssignmentEntity.workerUid.isNotNull();
// 검색 조건 (이름 또는 사번으로 검색)
BooleanExpression searchCondition = null;
if (search != null && !search.isEmpty()) {
searchCondition =
memberEntity.name.contains(search).or(memberEntity.employeeNo.contains(search));
}
// 완료, 스킵, 남은 작업 계산
NumberExpression<Long> completedSum =
new CaseBuilder()
.when(labelingAssignmentEntity.workState.eq("DONE"))
.then(1L)
.otherwise(0L)
.sum();
NumberExpression<Long> skippedSum =
new CaseBuilder()
.when(labelingAssignmentEntity.workState.eq("SKIP"))
.then(1L)
.otherwise(0L)
.sum();
NumberExpression<Long> remainingSum =
new CaseBuilder()
.when(
labelingAssignmentEntity
.workState
.notIn("DONE", "SKIP")
.and(labelingAssignmentEntity.workState.isNotNull()))
.then(1L)
.otherwise(0L)
.sum();
// 기본 통계 조회 쿼리
BooleanExpression analUidCondition =
analUid != null ? labelingAssignmentEntity.analUid.eq(analUid) : null;
var baseQuery =
queryFactory
.select(
workerIdField,
memberEntity.name,
workerIdField.count(),
completedSum,
skippedSum,
remainingSum,
labelingAssignmentEntity.stagnationYn.max())
.from(labelingAssignmentEntity)
.leftJoin(memberEntity)
.on(
"REVIEWER".equals(workerType)
? memberEntity.employeeNo.eq(labelingAssignmentEntity.inspectorUid)
: memberEntity.employeeNo.eq(labelingAssignmentEntity.workerUid))
.where(analUidCondition, workerCondition, searchCondition)
.groupBy(workerIdField, memberEntity.name);
// 정렬 조건 적용
if (sortType != null) {
switch (sortType) {
case "REMAINING_DESC":
baseQuery.orderBy(remainingSum.desc());
break;
case "REMAINING_ASC":
baseQuery.orderBy(remainingSum.asc());
break;
case "COMPLETED_DESC":
baseQuery.orderBy(completedSum.desc());
break;
case "COMPLETED_ASC":
baseQuery.orderBy(completedSum.asc());
break;
case "NAME_ASC":
baseQuery.orderBy(memberEntity.name.asc());
break;
case "NAME_DESC":
baseQuery.orderBy(memberEntity.name.desc());
break;
default:
baseQuery.orderBy(memberEntity.name.asc());
}
} else {
baseQuery.orderBy(memberEntity.name.asc());
}
// 결과를 DTO로 변환
return baseQuery.fetch().stream()
.map(
tuple -> {
Character maxStagnationYn = tuple.get(labelingAssignmentEntity.stagnationYn.max());
return WorkerStatistics.builder()
.workerId(tuple.get(workerIdField))
.workerName(tuple.get(memberEntity.name))
.workerType(workerType)
.totalAssigned(tuple.get(workerIdField.count()))
.completed(tuple.get(completedSum))
.skipped(tuple.get(skippedSum))
.remaining(tuple.get(remainingSum))
.history(null) // 3일 이력은 Service에서 채움
.isStagnated(maxStagnationYn != null && maxStagnationYn == 'Y')
.build();
})
.toList();
}
@Override
public WorkProgressInfo findWorkProgressInfo(Long analUid) {
BooleanExpression analUidCondition =
analUid != null ? labelingAssignmentEntity.analUid.eq(analUid) : null;
// 전체 배정 건수
Long totalAssigned =
queryFactory
.select(labelingAssignmentEntity.count())
.from(labelingAssignmentEntity)
.where(analUidCondition)
.fetchOne();
// === 라벨링 통계 ===
// 라벨링 완료: LABEL_FIN, TEST_ING, DONE (검수 포함)
Long labelingCompleted =
queryFactory
.select(labelingAssignmentEntity.count())
.from(labelingAssignmentEntity)
.where(
analUidCondition,
labelingAssignmentEntity.workState.in("LABEL_FIN", "TEST_ING", "DONE"))
.fetchOne();
// 스킵 건수
Long skipCount =
queryFactory
.select(labelingAssignmentEntity.count())
.from(labelingAssignmentEntity)
.where(analUidCondition, labelingAssignmentEntity.workState.eq("SKIP"))
.fetchOne();
// 투입된 라벨러 수
Long labelerCount =
queryFactory
.select(labelingAssignmentEntity.workerUid.countDistinct())
.from(labelingAssignmentEntity)
.where(analUidCondition, labelingAssignmentEntity.workerUid.isNotNull())
.fetchOne();
// === 검수 통계 ===
// 검수 완료: DONE만
Long inspectionCompleted =
queryFactory
.select(labelingAssignmentEntity.count())
.from(labelingAssignmentEntity)
.where(analUidCondition, labelingAssignmentEntity.workState.eq("DONE"))
.fetchOne();
// 투입된 검수자 수
Long inspectorCount =
queryFactory
.select(labelingAssignmentEntity.inspectorUid.countDistinct())
.from(labelingAssignmentEntity)
.where(analUidCondition, labelingAssignmentEntity.inspectorUid.isNotNull())
.fetchOne();
// 남은 작업 건수 계산
long total = totalAssigned != null ? totalAssigned : 0L;
long labelCompleted = labelingCompleted != null ? labelingCompleted : 0L;
long inspectCompleted = inspectionCompleted != null ? inspectionCompleted : 0L;
long skipped = skipCount != null ? skipCount : 0L;
long labelingRemaining = total - labelCompleted - skipped;
long inspectionRemaining = total - inspectCompleted - skipped;
// 진행률 계산
double labelingRate = total > 0 ? (double) labelCompleted / total * 100 : 0.0;
double inspectionRate = total > 0 ? (double) inspectCompleted / total * 100 : 0.0;
// 상태 판단
String labelingStatus = labelingRemaining > 0 ? "진행중" : "완료";
String inspectionStatus = inspectionRemaining > 0 ? "진행중" : "완료";
return WorkProgressInfo.builder()
// 라벨링
.labelingProgressRate(labelingRate)
.labelingStatus(labelingStatus)
.labelingTotalCount(total)
.labelingCompletedCount(labelCompleted)
.labelingSkipCount(skipped)
.labelingRemainingCount(labelingRemaining)
.labelerCount(labelerCount != null ? labelerCount : 0L)
// 검수
.inspectionProgressRate(inspectionRate)
.inspectionStatus(inspectionStatus)
.inspectionTotalCount(total)
.inspectionCompletedCount(inspectCompleted)
.inspectionSkipCount(skipped)
.inspectionRemainingCount(inspectionRemaining)
.inspectorCount(inspectorCount != null ? inspectorCount : 0L)
// 레거시 호환 필드 (Deprecated)
.progressRate(labelingRate)
.totalAssignedCount(total)
.completedCount(labelCompleted)
.remainingLabelCount(labelingRemaining)
.remainingInspectCount(inspectionRemaining)
.workStatus(labelingStatus)
.build();
}
@Override
public Long findDailyProcessedCount(
String workerId, String workerType, LocalDate date, Long analUid) {
// 해당 날짜의 시작과 끝 시간
ZonedDateTime startOfDay = date.atStartOfDay(ZoneId.systemDefault());
ZonedDateTime endOfDay = date.atTime(LocalTime.MAX).atZone(ZoneId.systemDefault());
BooleanExpression workerCondition =
"REVIEWER".equals(workerType)
? labelingAssignmentEntity.inspectorUid.eq(workerId)
: labelingAssignmentEntity.workerUid.eq(workerId);
BooleanExpression analUidCondition =
analUid != null ? labelingAssignmentEntity.analUid.eq(analUid) : null;
Long count =
queryFactory
.select(labelingAssignmentEntity.count())
.from(labelingAssignmentEntity)
.where(
analUidCondition,
workerCondition,
labelingAssignmentEntity.workState.in(
LabelState.DONE.getId(), LabelState.SKIP.getId()),
labelingAssignmentEntity.modifiedDate.between(startOfDay, endOfDay))
.fetchOne();
return count != null ? count : 0L;
}
@Override
public void assignInspectorBulk(List<UUID> assignmentUids, String inspectorUid) {
queryFactory
.update(labelingAssignmentEntity)
.set(labelingAssignmentEntity.inspectorUid, inspectorUid)
.where(labelingAssignmentEntity.assignmentUid.in(assignmentUids))
.execute();
em.clear();
}
@Override
public InferenceDetail findInferenceDetail(String uuid) {
// analUid로 분석 정보 조회
MapSheetAnalInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalInferenceEntity)
.where(mapSheetAnalInferenceEntity.uuid.eq(UUID.fromString(uuid)))
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("MapSheetAnalInferenceEntity not found for analUid: ");
}
return queryFactory
.select(
Projections.constructor(
InferenceDetail.class,
mapSheetAnalEntity.analTitle,
Expressions.numberTemplate(Integer.class, "{0}", 4),
mapSheetAnalEntity.gukyuinApplyDttm,
mapSheetAnalEntity.detectingCnt,
labelingAssignmentEntity.workerUid.countDistinct(),
labelingAssignmentEntity.inspectorUid.countDistinct()))
.from(mapSheetAnalEntity)
.innerJoin(labelingAssignmentEntity)
.on(mapSheetAnalEntity.id.eq(labelingAssignmentEntity.analUid))
.where(mapSheetAnalEntity.id.eq(analEntity.getId()))
.groupBy(
mapSheetAnalEntity.analTitle,
mapSheetAnalEntity.gukyuinApplyDttm,
mapSheetAnalEntity.detectingCnt)
.fetchOne();
}
@Override
public List<Long> fetchNextMoveIds(
Long lastId,
Long batchSize,
Integer compareYyyy,
Integer targetYyyy,
Integer stage,
String userId) {
MapSheetAnalInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalInferenceEntity)
.where(
mapSheetAnalInferenceEntity.compareYyyy.eq(compareYyyy),
mapSheetAnalInferenceEntity.targetYyyy.eq(targetYyyy),
mapSheetAnalInferenceEntity.stage.eq(stage))
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("MapSheetAnalInferenceEntity not found for analUid: ");
}
return queryFactory
.select(labelingAssignmentEntity.inferenceGeomUid)
.from(labelingAssignmentEntity)
.where(
labelingAssignmentEntity.workerUid.eq(userId),
labelingAssignmentEntity.workState.eq(LabelState.ASSIGNED.getId()),
labelingAssignmentEntity.analUid.eq(analEntity.getId()),
lastId == null ? null : labelingAssignmentEntity.inferenceGeomUid.gt(lastId))
.orderBy(labelingAssignmentEntity.inferenceGeomUid.asc())
.limit(batchSize)
.fetch();
}
@Transactional
@Override
public void assignOwnerMove(List<Long> sub, String userId) {
queryFactory
.update(labelingAssignmentEntity)
.set(labelingAssignmentEntity.workerUid, userId)
.where(labelingAssignmentEntity.inferenceGeomUid.in(sub))
.execute();
em.clear();
}
@Override
public LabelerDetail findLabelerDetail(String userId, String uuid) {
NumberExpression<Long> assignedCnt =
new CaseBuilder()
.when(labelingAssignmentEntity.workState.eq(LabelState.ASSIGNED.getId()))
.then(1L)
.otherwise((Long) null)
.count();
NumberExpression<Long> skipCnt =
new CaseBuilder()
.when(labelingAssignmentEntity.workState.eq(LabelState.SKIP.getId()))
.then(1L)
.otherwise((Long) null)
.count();
NumberExpression<Long> completeCnt =
new CaseBuilder()
.when(labelingAssignmentEntity.workState.eq(LabelState.COMPLETE.getId()))
.then(1L)
.otherwise((Long) null)
.count();
NumberExpression<Double> percent =
new CaseBuilder()
.when(completeCnt.eq(0L))
.then(0.0)
.otherwise(
Expressions.numberTemplate(
Double.class,
"round({0} / {1}, 2)",
labelingAssignmentEntity.count(),
completeCnt));
// analUid로 분석 정보 조회
MapSheetAnalInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalInferenceEntity)
.where(mapSheetAnalInferenceEntity.uuid.eq(UUID.fromString(uuid)))
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("MapSheetAnalInferenceEntity not found for analUid: ");
}
QMemberEntity worker = QMemberEntity.memberEntity;
QMemberEntity inspector = new QMemberEntity("inspector");
return queryFactory
.select(
Projections.constructor(
LabelerDetail.class,
worker.userRole,
worker.name,
worker.employeeNo,
assignedCnt,
skipCnt,
completeCnt,
percent,
Expressions.constant(0), // TODO: 순위, 꼭 해야할지?
labelingAssignmentEntity.workStatDttm.min(),
inspector.name.min()))
.from(worker)
.innerJoin(labelingAssignmentEntity)
.on(
worker.employeeNo.eq(labelingAssignmentEntity.workerUid),
labelingAssignmentEntity.analUid.eq(analEntity.getId()))
.leftJoin(inspector)
.on(labelingAssignmentEntity.inspectorUid.eq(inspector.employeeNo))
.where(worker.employeeNo.eq(userId))
.groupBy(worker.userRole, worker.name, worker.employeeNo)
.fetchOne();
}
@Override
public Long findMapSheetAnalInferenceUid(Integer compareYyyy, Integer targetYyyy, Integer stage) {
return queryFactory
.select(mapSheetAnalInferenceEntity.id)
.from(mapSheetAnalInferenceEntity)
.where(
mapSheetAnalInferenceEntity.compareYyyy.eq(compareYyyy),
mapSheetAnalInferenceEntity.targetYyyy.eq(targetYyyy),
mapSheetAnalInferenceEntity.stage.eq(stage))
.fetchOne();
}
@Override
public void insertInspector(Long analUid, String inspector) {
queryFactory
.insert(labelingInspectorEntity)
.columns(
labelingInspectorEntity.operatorUid,
labelingInspectorEntity.analUid,
labelingInspectorEntity.inspectorUid)
.values(UUID.randomUUID(), analUid, inspector)
.execute();
em.flush();
em.clear();
}
@Override
public ProjectInfo findProjectInfo(Long analUid) {
if (analUid == null) {
return null;
}
var result =
queryFactory
.select(
mapSheetAnalInferenceEntity.compareYyyy,
mapSheetAnalInferenceEntity.targetYyyy,
mapSheetAnalInferenceEntity.stage,
mapSheetAnalInferenceEntity.gukyuinApplyDttm,
mapSheetAnalInferenceEntity.createdDttm,
mapSheetAnalInferenceEntity.uuid)
.from(mapSheetAnalInferenceEntity)
.where(mapSheetAnalInferenceEntity.id.eq(analUid))
.fetchOne();
if (result == null) {
return null;
}
Integer compareYyyy = result.get(mapSheetAnalInferenceEntity.compareYyyy);
Integer targetYyyy = result.get(mapSheetAnalInferenceEntity.targetYyyy);
Integer stage = result.get(mapSheetAnalInferenceEntity.stage);
ZonedDateTime gukyuinApplyDttm = result.get(mapSheetAnalInferenceEntity.gukyuinApplyDttm);
ZonedDateTime createdDttm = result.get(mapSheetAnalInferenceEntity.createdDttm);
UUID uuid = result.get(mapSheetAnalInferenceEntity.uuid);
// 변화탐지년도 생성
String detectionYear =
(compareYyyy != null && targetYyyy != null) ? compareYyyy + "-" + targetYyyy : null;
// 회차를 stage 컬럼에서 가져옴
String round = stage != null ? String.valueOf(stage) : null;
return ProjectInfo.builder()
.detectionYear(detectionYear)
.stage(round)
.gukyuinApplyDttm(gukyuinApplyDttm)
.startDttm(createdDttm)
.uuid(uuid != null ? uuid.toString() : null)
.build();
}
@Override
public ProjectInfo findLatestProjectInfo() {
// 최신 target_yyyy를 기준으로 프로젝트 정보 조회
var result =
queryFactory
.select(
mapSheetAnalInferenceEntity.compareYyyy,
mapSheetAnalInferenceEntity.targetYyyy,
mapSheetAnalInferenceEntity.stage,
mapSheetAnalInferenceEntity.gukyuinApplyDttm,
mapSheetAnalInferenceEntity.createdDttm,
mapSheetAnalInferenceEntity.uuid)
.from(mapSheetAnalInferenceEntity)
.orderBy(
mapSheetAnalInferenceEntity.targetYyyy.desc(),
mapSheetAnalInferenceEntity.compareYyyy.desc(),
mapSheetAnalInferenceEntity.createdDttm.desc())
.limit(1)
.fetchOne();
if (result == null) {
return null;
}
Integer compareYyyy = result.get(mapSheetAnalInferenceEntity.compareYyyy);
Integer targetYyyy = result.get(mapSheetAnalInferenceEntity.targetYyyy);
Integer stage = result.get(mapSheetAnalInferenceEntity.stage);
ZonedDateTime gukyuinApplyDttm = result.get(mapSheetAnalInferenceEntity.gukyuinApplyDttm);
ZonedDateTime createdDttm = result.get(mapSheetAnalInferenceEntity.createdDttm);
UUID uuid = result.get(mapSheetAnalInferenceEntity.uuid);
// 변화탐지년도 생성
String detectionYear =
(compareYyyy != null && targetYyyy != null) ? compareYyyy + "-" + targetYyyy : null;
// 회차를 stage 컬럼에서 가져옴
String round = stage != null ? String.valueOf(stage) : null;
return ProjectInfo.builder()
.detectionYear(detectionYear)
.stage(round)
.gukyuinApplyDttm(gukyuinApplyDttm)
.startDttm(createdDttm)
.uuid(uuid != null ? uuid.toString() : null)
.build();
}
@Override
public Page<LabelingStatDto> findLabelerDailyStat(
LabelAllocateDto.searchReq searchReq, String uuid, String userId) {
// 날짜 포맷
Expression<String> workDate =
Expressions.stringTemplate(
"TO_CHAR({0}, 'YYYY-MM-DD')", labelingAssignmentEntity.workStatDttm);
// 날짜별 전체 건수
Expression<Long> dailyTotalCnt = Expressions.numberTemplate(Long.class, "COUNT(*)");
// ⭐ 전체 기간 총 건수 (윈도우 함수)
Expression<Long> totalCnt = Expressions.numberTemplate(Long.class, "SUM(COUNT(*)) OVER ()");
// 상태별 카운트 (Postgres FILTER 사용)
Expression<Long> assignedCnt =
Expressions.numberTemplate(
Long.class,
"COUNT(*) FILTER (WHERE {0} = 'ASSIGNED')",
labelingAssignmentEntity.workState);
Expression<Long> skipCnt =
Expressions.numberTemplate(
Long.class, "COUNT(*) FILTER (WHERE {0} = 'SKIP')", labelingAssignmentEntity.workState);
Expression<Long> completeCnt =
Expressions.numberTemplate(
Long.class,
"COUNT(*) FILTER (WHERE {0} = 'COMPLETE')",
labelingAssignmentEntity.workState);
Expression<Long> remainCnt =
Expressions.numberTemplate(Long.class, "({0} - {1} - {2})", totalCnt, skipCnt, completeCnt);
// analUid로 분석 정보 조회
MapSheetAnalInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalInferenceEntity)
.where(mapSheetAnalInferenceEntity.uuid.eq(UUID.fromString(uuid)))
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("MapSheetAnalInferenceEntity not found for analUid: ");
}
Pageable pageable = searchReq.toPageable();
List<LabelingStatDto> foundContent =
queryFactory
.select(
Projections.constructor(
LabelingStatDto.class,
workDate,
dailyTotalCnt,
totalCnt, // ⭐ 전체 일자 배정 건수
assignedCnt,
skipCnt,
completeCnt,
remainCnt))
.from(labelingAssignmentEntity)
.where(
labelingAssignmentEntity.workerUid.eq(userId),
labelingAssignmentEntity.analUid.eq(analEntity.getId()))
.groupBy(workDate)
.orderBy(labelingAssignmentEntity.workStatDttm.min().asc())
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.fetch();
Long countQuery =
queryFactory
.select(workDate)
.from(labelingAssignmentEntity)
.where(
labelingAssignmentEntity.workerUid.eq(userId),
labelingAssignmentEntity.analUid.eq(analEntity.getId()))
.distinct()
.fetch()
.stream()
.count();
return new PageImpl<>(foundContent, pageable, countQuery);
}
@Override
public Page<LabelingStatDto> findInspectorDailyStat(
LabelAllocateDto.searchReq searchReq, String uuid, String userId) {
// 날짜 포맷
Expression<String> workDate =
Expressions.stringTemplate(
"TO_CHAR({0}, 'YYYY-MM-DD')", labelingAssignmentEntity.inspectStatDttm);
// 날짜별 전체 건수
Expression<Long> dailyTotalCnt = Expressions.numberTemplate(Long.class, "COUNT(*)");
// ⭐ 전체 기간 총 건수 (윈도우 함수)
Expression<Long> totalCnt = Expressions.numberTemplate(Long.class, "SUM(COUNT(*)) OVER ()");
// 상태별 카운트 (Postgres FILTER 사용)
Expression<Long> assignedCnt =
Expressions.numberTemplate(
Long.class,
"COUNT(*) FILTER (WHERE {0} = 'UNCONFIRM')",
labelingAssignmentEntity.inspectState);
Expression<Long> skipCnt =
Expressions.numberTemplate(
Long.class,
"COUNT(*) FILTER (WHERE {0} = 'EXCEPT')",
labelingAssignmentEntity.inspectState);
Expression<Long> completeCnt =
Expressions.numberTemplate(
Long.class,
"COUNT(*) FILTER (WHERE {0} = 'COMPLETE')",
labelingAssignmentEntity.inspectState);
Expression<Long> remainCnt =
Expressions.numberTemplate(Long.class, "({0} - {1} - {2})", totalCnt, skipCnt, completeCnt);
// analUid로 분석 정보 조회
MapSheetAnalInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalInferenceEntity)
.where(mapSheetAnalInferenceEntity.uuid.eq(UUID.fromString(uuid)))
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("MapSheetAnalInferenceEntity not found for analUid: ");
}
Pageable pageable = searchReq.toPageable();
List<LabelingStatDto> foundContent =
queryFactory
.select(
Projections.constructor(
LabelingStatDto.class,
workDate,
dailyTotalCnt,
totalCnt, // ⭐ 전체 일자 배정 건수
assignedCnt,
skipCnt,
completeCnt,
remainCnt))
.from(labelingAssignmentEntity)
.where(
labelingAssignmentEntity.inspectorUid.eq(userId),
labelingAssignmentEntity.analUid.eq(analEntity.getId()))
.groupBy(workDate)
.orderBy(labelingAssignmentEntity.inspectStatDttm.min().asc())
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.fetch();
Long countQuery =
queryFactory
.select(workDate)
.from(labelingAssignmentEntity)
.where(
labelingAssignmentEntity.inspectorUid.eq(userId),
labelingAssignmentEntity.analUid.eq(analEntity.getId()))
.distinct()
.fetch()
.stream()
.count();
return new PageImpl<>(foundContent, pageable, countQuery);
}
@Override
public LabelerDetail findInspectorDetail(String userId, String uuid) {
NumberExpression<Long> assignedCnt =
new CaseBuilder()
.when(labelingAssignmentEntity.inspectState.eq(InspectState.UNCONFIRM.getId()))
.then(1L)
.otherwise((Long) null)
.count();
NumberExpression<Long> skipCnt =
new CaseBuilder()
.when(labelingAssignmentEntity.inspectState.eq(InspectState.EXCEPT.getId()))
.then(1L)
.otherwise((Long) null)
.count();
NumberExpression<Long> completeCnt =
new CaseBuilder()
.when(labelingAssignmentEntity.inspectState.eq(InspectState.COMPLETE.getId()))
.then(1L)
.otherwise((Long) null)
.count();
NumberExpression<Double> percent =
new CaseBuilder()
.when(completeCnt.eq(0L))
.then(0.0)
.otherwise(
Expressions.numberTemplate(
Double.class,
"round({0} / {1}, 2)",
labelingAssignmentEntity.count(),
completeCnt));
// analUid로 분석 정보 조회
MapSheetAnalInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalInferenceEntity)
.where(mapSheetAnalInferenceEntity.uuid.eq(UUID.fromString(uuid)))
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("MapSheetAnalInferenceEntity not found for analUid: ");
}
QMemberEntity inspector = QMemberEntity.memberEntity;
QMemberEntity worker = new QMemberEntity("worker");
return queryFactory
.select(
Projections.constructor(
LabelerDetail.class,
inspector.userRole,
inspector.name,
inspector.employeeNo,
assignedCnt,
skipCnt,
completeCnt,
percent,
Expressions.constant(0), // TODO: 순위, 꼭 해야할지?
labelingAssignmentEntity.inspectStatDttm.min(),
worker.name.min()))
.from(inspector)
.innerJoin(labelingAssignmentEntity)
.on(
inspector.employeeNo.eq(labelingAssignmentEntity.inspectorUid),
labelingAssignmentEntity.analUid.eq(analEntity.getId()))
.leftJoin(worker)
.on(labelingAssignmentEntity.workerUid.eq(worker.employeeNo))
.where(inspector.employeeNo.eq(userId))
.groupBy(inspector.userRole, inspector.name, inspector.employeeNo)
.fetchOne();
}
}

View File

@@ -0,0 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.label;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface LabelWorkRepository
extends JpaRepository<MapSheetAnalDataInferenceGeomEntity, Long>, LabelWorkRepositoryCustom {}

View File

@@ -0,0 +1,21 @@
package com.kamco.cd.kamcoback.postgres.repository.label;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMng;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMngDetail;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.WorkerState;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity;
import java.util.List;
import java.util.UUID;
import org.springframework.data.domain.Page;
public interface LabelWorkRepositoryCustom {
List<MapSheetAnalInferenceEntity> findChangeDetectYearList();
public Page<LabelWorkMng> labelWorkMngList(LabelWorkDto.LabelWorkMngSearchReq searchReq);
LabelWorkMngDetail findLabelWorkMngDetail(UUID uuid);
Page<WorkerState> findlabelWorkStateList(LabelWorkDto.WorkerStateSearchReq searchReq);
}

View File

@@ -0,0 +1,370 @@
package com.kamco.cd.kamcoback.postgres.repository.label;
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMemberEntity.memberEntity;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMng;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMngDetail;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.WorkerState;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberExpression;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.UUID;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
import org.springframework.stereotype.Repository;
@Slf4j
@Repository
public class LabelWorkRepositoryImpl extends QuerydslRepositorySupport
implements LabelWorkRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)");
@PersistenceContext private EntityManager em;
public LabelWorkRepositoryImpl(JPAQueryFactory queryFactory) {
super(MapSheetAnalDataGeomEntity.class);
this.queryFactory = queryFactory;
}
/**
* 변화탐지 년도 셀렉트박스 조회
*
* @return
*/
@Override
public List<MapSheetAnalInferenceEntity> findChangeDetectYearList() {
return queryFactory
.selectFrom(mapSheetAnalInferenceEntity)
.where(
mapSheetAnalInferenceEntity.id.in(
JPAExpressions.select(mapSheetAnalInferenceEntity.id.min())
.from(mapSheetAnalInferenceEntity)
.groupBy(
mapSheetAnalInferenceEntity.compareYyyy,
mapSheetAnalInferenceEntity.targetYyyy)))
.orderBy(
mapSheetAnalInferenceEntity.compareYyyy.asc(),
mapSheetAnalInferenceEntity.targetYyyy.asc())
.fetch();
}
/**
* 라벨링 작업관리 목록 조회
*
* @param searchReq
* @return
*/
@Override
public Page<LabelWorkMng> labelWorkMngList(LabelWorkDto.LabelWorkMngSearchReq searchReq) {
Pageable pageable = PageRequest.of(searchReq.getPage(), searchReq.getSize());
BooleanBuilder whereBuilder = new BooleanBuilder();
BooleanBuilder whereSubDataBuilder = new BooleanBuilder();
BooleanBuilder whereSubBuilder = new BooleanBuilder();
if (StringUtils.isNotBlank(searchReq.getDetectYear())) {
String[] years = searchReq.getDetectYear().split("-");
if (years.length == 2) {
Integer compareYear = Integer.valueOf(years[0]);
Integer targetYear = Integer.valueOf(years[1]);
whereBuilder.and(
mapSheetAnalDataInferenceEntity
.compareYyyy
.eq(compareYear)
.and(mapSheetAnalDataInferenceEntity.targetYyyy.eq(targetYear)));
}
}
whereSubDataBuilder.and(
mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid));
whereSubBuilder.and(
mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id));
if (searchReq.getStrtDttm() != null
&& !searchReq.getStrtDttm().isEmpty()
&& searchReq.getEndDttm() != null
&& !searchReq.getEndDttm().isEmpty()) {
whereSubBuilder.and(
Expressions.stringTemplate(
"to_char({0}, 'YYYYMMDD')", mapSheetAnalDataInferenceGeomEntity.labelStateDttm)
.between(searchReq.getStrtDttm(), searchReq.getEndDttm()));
}
List<LabelWorkMng> foundContent =
queryFactory
.select(
Projections.constructor(
LabelWorkMng.class,
mapSheetAnalInferenceEntity.uuid,
mapSheetAnalInferenceEntity.compareYyyy,
mapSheetAnalInferenceEntity.targetYyyy,
mapSheetAnalInferenceEntity.stage,
mapSheetAnalDataInferenceEntity.createdDttm.min(),
mapSheetAnalDataInferenceGeomEntity.dataUid.count(),
new CaseBuilder()
.when(
mapSheetAnalDataInferenceGeomEntity
.pnu
.isNotNull()
.and(mapSheetAnalDataInferenceGeomEntity.pnu.ne(0L)))
.then(1L)
.otherwise(0L)
.sum(),
new CaseBuilder()
.when(mapSheetAnalDataInferenceGeomEntity.labelState.eq("ASSIGNED"))
.then(1L)
.otherwise(0L)
.sum(),
new CaseBuilder()
.when(mapSheetAnalDataInferenceGeomEntity.labelState.eq("STOP"))
.then(1L)
.otherwise(0L)
.sum(),
new CaseBuilder()
.when(mapSheetAnalDataInferenceGeomEntity.labelState.eq("LABEL_ING"))
.then(1L)
.otherwise(0L)
.sum(),
new CaseBuilder()
.when(mapSheetAnalDataInferenceGeomEntity.labelState.eq("LABEL_COMPLETE"))
.then(1L)
.otherwise(0L)
.sum(),
mapSheetAnalDataInferenceGeomEntity.labelStateDttm.min()))
.from(mapSheetAnalInferenceEntity)
.innerJoin(mapSheetAnalDataInferenceEntity)
.on(whereSubDataBuilder)
.innerJoin(mapSheetAnalDataInferenceGeomEntity)
.on(whereSubBuilder)
.where(whereBuilder)
.groupBy(
mapSheetAnalInferenceEntity.uuid,
mapSheetAnalInferenceEntity.compareYyyy,
mapSheetAnalInferenceEntity.targetYyyy,
mapSheetAnalInferenceEntity.stage)
.orderBy(
mapSheetAnalInferenceEntity.targetYyyy.desc(),
mapSheetAnalInferenceEntity.compareYyyy.desc(),
mapSheetAnalInferenceEntity.stage.desc())
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.fetch();
/*
Long countQuery =
queryFactory
.select(mapSheetAnalDataInferenceEntity.count())
.from(mapSheetAnalDataInferenceEntity)
.leftJoin(mapSheetAnalDataInferenceGeomEntity)
.on(whereSubBuilder)
.where(whereBuilder)
.groupBy(
mapSheetAnalDataInferenceEntity.compareYyyy,
mapSheetAnalDataInferenceEntity.targetYyyy,
mapSheetAnalDataInferenceEntity.stage
)
.fetchOne();
*/
Long total =
queryFactory
.select(mapSheetAnalInferenceEntity.uuid.countDistinct())
.from(mapSheetAnalInferenceEntity)
.innerJoin(mapSheetAnalDataInferenceEntity)
.on(whereSubDataBuilder)
.innerJoin(mapSheetAnalDataInferenceGeomEntity)
.on(whereSubBuilder)
.where(whereBuilder)
.fetchOne();
return new PageImpl<>(foundContent, pageable, total);
}
@Override
public Page<WorkerState> findlabelWorkStateList(LabelWorkDto.WorkerStateSearchReq searchReq) {
Pageable pageable = PageRequest.of(searchReq.getPage(), searchReq.getSize());
BooleanBuilder whereBuilder = new BooleanBuilder();
BooleanBuilder whereSubBuilder = new BooleanBuilder();
LocalDate threeDaysAgo = LocalDate.now().minusDays(3);
String s3 = threeDaysAgo.format(DateTimeFormatter.ofPattern("YYYY-MM-DD"));
LocalDate twoDaysAgo = LocalDate.now().minusDays(2);
String s2 = twoDaysAgo.format(DateTimeFormatter.ofPattern("YYYY-MM-DD"));
LocalDate oneDaysAgo = LocalDate.now().minusDays(1);
String s1 = oneDaysAgo.format(DateTimeFormatter.ofPattern("YYYY-MM-DD"));
if (searchReq.getUserRole() != null && !searchReq.getUserRole().isEmpty()) {
whereSubBuilder.and(memberEntity.userRole.eq(searchReq.getUserRole()));
}
if (searchReq.getSearchVal() != null && !searchReq.getSearchVal().isEmpty()) {
whereSubBuilder.and(
Expressions.stringTemplate("{0}", memberEntity.userId)
.likeIgnoreCase("%" + searchReq.getSearchVal() + "%")
.or(
Expressions.stringTemplate("{0}", memberEntity.name)
.likeIgnoreCase("%" + searchReq.getSearchVal() + "%")));
}
whereSubBuilder.and(labelingAssignmentEntity.workerUid.eq(memberEntity.userId));
List<WorkerState> foundContent =
queryFactory
.select(
Projections.constructor(
WorkerState.class,
memberEntity.userRole,
memberEntity.name,
memberEntity.userId,
labelingAssignmentEntity.workerUid.count().as("assignedCnt"),
new CaseBuilder()
.when(labelingAssignmentEntity.workState.eq("DONE"))
.then(1L)
.otherwise(0L)
.sum()
.as("doneCnt"),
new CaseBuilder()
.when(labelingAssignmentEntity.workState.eq("SKIP"))
.then(1L)
.otherwise(0L)
.sum()
.as("skipCnt"),
new CaseBuilder()
.when(
labelingAssignmentEntity
.workState
.eq("DONE")
.and(
Expressions.stringTemplate(
"to_char({0}, 'YYYY-MM-DD')",
labelingAssignmentEntity.modifiedDate)
.eq(s3)))
.then(1L)
.otherwise(0L)
.sum()
.as("day3AgoDoneCnt"),
new CaseBuilder()
.when(
labelingAssignmentEntity
.workState
.eq("DONE")
.and(
Expressions.stringTemplate(
"to_char({0}, 'YYYY-MM-DD')",
labelingAssignmentEntity.modifiedDate)
.eq(s2)))
.then(1L)
.otherwise(0L)
.sum()
.as("day2AgoDoneCnt"),
new CaseBuilder()
.when(
labelingAssignmentEntity
.workState
.eq("DONE")
.and(
Expressions.stringTemplate(
"to_char({0}, 'YYYY-MM-DD')",
labelingAssignmentEntity.modifiedDate)
.eq(s1)))
.then(1L)
.otherwise(0L)
.sum()
.as("day1AgoDoneCnt")))
.from(labelingAssignmentEntity)
.innerJoin(memberEntity)
.on(whereSubBuilder)
.where(whereBuilder)
.groupBy(memberEntity.userRole, memberEntity.name, memberEntity.userId)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.fetch();
Long countQuery =
queryFactory
.select(labelingAssignmentEntity.workerUid.countDistinct())
.from(labelingAssignmentEntity)
.innerJoin(memberEntity)
.on(whereSubBuilder)
.where(whereBuilder)
// .groupBy(memberEntity.userRole, memberEntity.name, memberEntity.userId)
.fetchOne();
return new PageImpl<>(foundContent, pageable, countQuery);
}
/**
* 작업배정 상세조회
*
* @param uuid
* @return
*/
@Override
public LabelWorkMngDetail findLabelWorkMngDetail(UUID uuid) {
NumberExpression<Long> labelTotCnt = mapSheetAnalDataInferenceGeomEntity.geoUid.count();
NumberExpression<Long> labelerCnt = labelingAssignmentEntity.workerUid.count();
NumberExpression<Long> reviewerCnt = labelingAssignmentEntity.inspectorUid.count();
return queryFactory
.select(
Projections.constructor(
LabelWorkMngDetail.class,
mapSheetAnalInferenceEntity
.compareYyyy
.stringValue()
.concat("-")
.concat(mapSheetAnalInferenceEntity.targetYyyy.stringValue()),
mapSheetAnalInferenceEntity.stage,
mapSheetAnalInferenceEntity.createdDttm,
labelTotCnt,
labelerCnt,
reviewerCnt))
.from(mapSheetAnalInferenceEntity)
.leftJoin(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
.leftJoin(mapSheetAnalDataInferenceGeomEntity)
.on(mapSheetAnalDataInferenceEntity.id.eq(mapSheetAnalDataInferenceGeomEntity.dataUid))
.leftJoin(labelingAssignmentEntity)
.on(
mapSheetAnalDataInferenceGeomEntity.geoUid.eq(
labelingAssignmentEntity.inferenceGeomUid))
.where(mapSheetAnalInferenceEntity.uuid.eq(uuid))
.groupBy(
mapSheetAnalInferenceEntity.compareYyyy,
mapSheetAnalInferenceEntity.targetYyyy,
mapSheetAnalInferenceEntity.stage,
mapSheetAnalInferenceEntity.createdDttm)
.fetchOne();
}
}

View File

@@ -8,7 +8,6 @@ import com.kamco.cd.kamcoback.postgres.entity.MapSheetMngHstEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberExpression;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.validation.Valid;
@@ -76,8 +75,11 @@ public class MapSheetMngFileCheckerRepositoryImpl extends QuerydslRepositorySupp
return new PageImpl<>(foundContent, pageable, countQuery);
}
/*
private NumberExpression<Integer> rowNum() {
return Expressions.numberTemplate(
Integer.class, "row_number() over(order by {0} desc)", mapSheetMngHstEntity.createdDate);
}
*/
}

View File

@@ -1,6 +1,5 @@
package com.kamco.cd.kamcoback.postgres.repository.mapsheet;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx50kEntity.mapInkx50kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity.mapInkx5kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngEntity.mapSheetMngEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngFileEntity.mapSheetMngFileEntity;
@@ -394,9 +393,8 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
queryFactory
.select(mapSheetMngHstEntity.hstUid.count())
.from(mapSheetMngHstEntity)
.innerJoin(mapInkx5kEntity, mapSheetMngHstEntity.mapInkx5kByCode)
.fetchJoin()
.leftJoin(mapInkx5kEntity.mapInkx50k, mapInkx50kEntity)
.innerJoin(mapInkx5kEntity)
.on(mapSheetMngHstEntity.mapSheetNum.eq(mapInkx5kEntity.mapidcdNo))
.where(whereBuilder)
.fetchOne();

View File

@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.common.enums.CommonUseStatus;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto.MapList;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto.searchReq;
import java.util.List;
import java.util.Optional;
import org.springframework.data.domain.Page;
@@ -18,4 +19,7 @@ public interface MapInkx5kRepositoryCustom {
Long findByMapidCdNoExists(String mapidcdNo);
Optional<MapInkx5kEntity> findByMapidCdNoInfo(String mapidcdNo);
Page<MapInkx5kEntity> getSceneListByPage(
CommonUseStatus useInference, String searchVal, searchReq searchReq);
}

View File

@@ -102,6 +102,31 @@ public class MapInkx5kRepositoryImpl extends QuerydslRepositorySupport
.fetchOne());
}
@Override
public Page<MapInkx5kEntity> getSceneListByPage(
CommonUseStatus useInference, String searchVal, searchReq searchReq) {
Pageable pageable = searchReq.toPageable();
List<MapInkx5kEntity> content =
queryFactory
.selectFrom(mapInkx5kEntity)
.innerJoin(mapInkx5kEntity.mapInkx50k, mapInkx50kEntity)
.fetchJoin()
.where(searchUseInference(useInference), searchValueMapCdNm(searchVal))
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(mapInkx5kEntity.mapidcdNo.asc())
.fetch();
Long count =
queryFactory
.select(mapInkx5kEntity.count())
.from(mapInkx5kEntity)
.innerJoin(mapInkx5kEntity.mapInkx50k, mapInkx50kEntity)
.where(searchUseInference(useInference), searchValueMapCdNm(searchVal))
.fetchOne();
return new PageImpl<>(content, pageable, count);
}
private BooleanExpression searchUseInference(CommonUseStatus useInference) {
if (Objects.isNull(useInference)) {
return null;