Merge pull request 'feat/dev_251201' (#133) from feat/dev_251201 into develop

Reviewed-on: https://kamco.gitea.gs.dabeeo.com/dabeeo/kamco-dabeeo-backoffice/pulls/133
This commit is contained in:
2026-01-02 22:18:33 +09:00
13 changed files with 859 additions and 251 deletions

View File

@@ -2,6 +2,7 @@ package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerStatistics;
@@ -19,22 +20,25 @@ public class LabelAllocateCoreService {
private final LabelAllocateRepository labelAllocateRepository;
public List<Long> fetchNextIds(Long lastId, Long batchSize) {
return labelAllocateRepository.fetchNextIds(lastId, batchSize);
public List<Long> fetchNextIds(
Long lastId, Long batchSize, Integer compareYyyy, Integer targetYyyy, Integer stage) {
return labelAllocateRepository.fetchNextIds(lastId, batchSize, compareYyyy, targetYyyy, stage);
}
public void assignOwner(List<Long> ids, String userId) {
labelAllocateRepository.assignOwner(ids, userId);
public void assignOwner(
List<Long> ids, String userId, Integer compareYyyy, Integer targetYyyy, Integer stage) {
labelAllocateRepository.assignOwner(ids, userId, compareYyyy, targetYyyy, stage);
}
public List<LabelAllocateDto.Basic> findAssignedLabelerList(Long analUid) {
return labelAllocateRepository.findAssignedLabelerList(analUid).stream()
public List<LabelAllocateDto.Basic> findAssignedLabelerList(
Integer compareYyyy, Integer targetYyyy, Integer stage) {
return labelAllocateRepository.findAssignedLabelerList(compareYyyy, targetYyyy, stage).stream()
.map(LabelingAssignmentEntity::toDto)
.toList();
}
public Long findLabelUnAssignedCnt(Long analUid) {
return labelAllocateRepository.findLabelUnAssignedCnt(analUid);
public Long findLabelUnAssignedCnt(Integer stage, Integer compareYyyy, Integer targetYyyy) {
return labelAllocateRepository.findLabelUnAssignedCnt(stage, compareYyyy, targetYyyy);
}
public void assignInspector(UUID assignmentUid, String inspectorUid) {
@@ -68,7 +72,27 @@ public class LabelAllocateCoreService {
labelAllocateRepository.assignInspectorBulk(assignmentUids, inspectorUid);
}
public InferenceDetail findInferenceDetail(Long analUid) {
return labelAllocateRepository.findInferenceDetail(analUid);
public InferenceDetail findInferenceDetail(
Integer compareYyyy, Integer targetYyyy, Integer stage) {
return labelAllocateRepository.findInferenceDetail(compareYyyy, targetYyyy, stage);
}
public Long findLabelUnCompleteCnt(Long analUid) {
return labelAllocateRepository.findLabelUnCompleteCnt(analUid);
}
public List<Long> fetchNextMoveIds(
Long lastId, Long batchSize, Integer compareYyyy, Integer targetYyyy, Integer stage) {
return labelAllocateRepository.fetchNextMoveIds(
lastId, batchSize, compareYyyy, targetYyyy, stage);
}
public void assignOwnerMove(List<Long> sub, String userId) {
labelAllocateRepository.assignOwnerMove(sub, userId);
}
public LabelerDetail findLabelerDetail(
String userId, Integer compareYyyy, Integer targetYyyy, Integer stage) {
return labelAllocateRepository.findLabelerDetail(userId, compareYyyy, targetYyyy, stage);
}
}

View File

@@ -0,0 +1,19 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMng;
import com.kamco.cd.kamcoback.postgres.repository.label.LabelWorkRepository;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class LabelWorkCoreService {
private final LabelWorkRepository labelWorkRepository;
public Page<LabelWorkMng> labelWorkMngList(LabelWorkDto.LabelWorkMngSearchReq searchReq) {
return labelWorkRepository.labelWorkMngList(searchReq);
}
}

View File

@@ -1,6 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.label;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerStatistics;
@@ -11,13 +12,16 @@ import java.util.UUID;
public interface LabelAllocateRepositoryCustom {
List<Long> fetchNextIds(Long lastId, Long batchSize);
List<Long> fetchNextIds(
Long lastId, Long batchSize, Integer compareYyyy, Integer targetYyyy, Integer stage);
void assignOwner(List<Long> ids, String userId);
void assignOwner(
List<Long> ids, String userId, Integer compareYyyy, Integer targetYyyy, Integer stage);
List<LabelingAssignmentEntity> findAssignedLabelerList(Long analUid);
List<LabelingAssignmentEntity> findAssignedLabelerList(
Integer compareYyyy, Integer targetYyyy, Integer stage);
Long findLabelUnAssignedCnt(Long analUid);
Long findLabelUnAssignedCnt(Integer stage, Integer compareYyyy, Integer targetYyyy);
void assignInspector(UUID assignmentUid, String userId);
@@ -35,5 +39,15 @@ public interface LabelAllocateRepositoryCustom {
void assignInspectorBulk(List<UUID> assignmentUids, String inspectorUid);
InferenceDetail findInferenceDetail(Long analUid);
InferenceDetail findInferenceDetail(Integer compareYyyy, Integer targetYyyy, Integer stage);
public List<Long> fetchNextMoveIds(
Long lastId, Long batchSize, Integer compareYyyy, Integer targetYyyy, Integer stage);
Long findLabelUnCompleteCnt(Long analUid);
void assignOwnerMove(List<Long> sub, String userId);
LabelerDetail findLabelerDetail(
String userId, Integer compareYyyy, Integer targetYyyy, Integer stage);
}

View File

@@ -1,17 +1,21 @@
package com.kamco.cd.kamcoback.postgres.repository.label;
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity.mapSheetAnalEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMemberEntity.memberEntity;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InspectState;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerStatistics;
import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
@@ -23,6 +27,7 @@ import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityNotFoundException;
import jakarta.persistence.PersistenceContext;
import jakarta.transaction.Transactional;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.ZoneId;
@@ -44,16 +49,17 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
@PersistenceContext private EntityManager em;
@Override
public List<Long> fetchNextIds(Long lastId, Long batchSize) {
public List<Long> fetchNextIds(
Long lastId, Long batchSize, Integer compareYyyy, Integer targetYyyy, Integer stage) {
return queryFactory
.select(mapSheetAnalDataInferenceGeomEntity.geoUid)
.from(mapSheetAnalDataInferenceGeomEntity)
.where(
// mapSheetAnalDataGeomEntity.pnu.isNotNull(), //TODO: Mockup 진행 이후 확인하기
lastId == null ? null : mapSheetAnalDataInferenceGeomEntity.geoUid.gt(lastId),
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(2022),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(2024),
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(compareYyyy),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(targetYyyy),
mapSheetAnalDataInferenceGeomEntity.stage.eq(stage),
mapSheetAnalDataInferenceGeomEntity.labelState.isNull())
.orderBy(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.asc())
.limit(batchSize)
@@ -61,24 +67,43 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
}
@Override
public void assignOwner(List<Long> ids, String userId) {
public void assignOwner(
List<Long> ids, String userId, Integer compareYyyy, Integer targetYyyy, Integer stage) {
// analUid로 분석 정보 조회
MapSheetAnalDataInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalDataInferenceEntity)
.where(
mapSheetAnalDataInferenceEntity.compareYyyy.eq(compareYyyy),
mapSheetAnalDataInferenceEntity.targetYyyy.eq(targetYyyy),
mapSheetAnalDataInferenceEntity.stage.eq(stage))
.orderBy(mapSheetAnalDataInferenceEntity.analUid.asc())
.limit(1)
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("MapSheetAnalEntity not found for analUid: ");
}
// data_geom 테이블에 label state 를 ASSIGNED 로 update
queryFactory
.update(mapSheetAnalDataInferenceGeomEntity)
.set(mapSheetAnalDataInferenceGeomEntity.labelState, LabelState.ASSIGNED.getId())
.set(mapSheetAnalDataInferenceGeomEntity.labelStateDttm, ZonedDateTime.now())
.set(mapSheetAnalDataInferenceGeomEntity.testState, InspectState.UNCONFIRM.getId())
.set(mapSheetAnalDataInferenceGeomEntity.testStateDttm, ZonedDateTime.now())
.where(mapSheetAnalDataInferenceGeomEntity.geoUid.in(ids))
.execute();
// 라벨러 할당 테이블에 insert
String sql =
"""
insert into tb_labeling_assignment
(assignment_uid, inference_geom_uid, worker_uid,
work_state, assign_group_id, anal_uid)
values (?, ?, ?, ?, ?, ?)
""";
insert into tb_labeling_assignment
(assignment_uid, inference_geom_uid, worker_uid,
work_state, assign_group_id, anal_uid)
values (?, ?, ?, ?, ?, ?)
""";
for (Long geoUid : ids) {
em.createNativeQuery(sql)
@@ -87,7 +112,7 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
.setParameter(3, userId)
.setParameter(4, LabelState.ASSIGNED.getId())
.setParameter(5, "")
.setParameter(6, 3)
.setParameter(6, analEntity.getAnalUid())
.executeUpdate();
}
@@ -96,11 +121,28 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
}
@Override
public List<LabelingAssignmentEntity> findAssignedLabelerList(Long analUid) {
public List<LabelingAssignmentEntity> findAssignedLabelerList(
Integer compareYyyy, Integer targetYyyy, Integer stage) {
// analUid로 분석 정보 조회
MapSheetAnalDataInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalDataInferenceEntity)
.where(
mapSheetAnalDataInferenceEntity.compareYyyy.eq(compareYyyy),
mapSheetAnalDataInferenceEntity.targetYyyy.eq(targetYyyy),
mapSheetAnalDataInferenceEntity.stage.eq(stage))
.orderBy(mapSheetAnalDataInferenceEntity.analUid.asc())
.limit(1)
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("MapSheetAnalEntity not found for analUid: ");
}
return queryFactory
.selectFrom(labelingAssignmentEntity)
.where(
labelingAssignmentEntity.analUid.eq(analUid),
labelingAssignmentEntity.analUid.eq(analEntity.getAnalUid()),
labelingAssignmentEntity.workState.eq(LabelState.ASSIGNED.getId()),
labelingAssignmentEntity.inspectorUid.isNull())
.orderBy(labelingAssignmentEntity.workerUid.asc())
@@ -108,24 +150,15 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
}
@Override
public Long findLabelUnAssignedCnt(Long analUid) {
MapSheetAnalEntity entity =
queryFactory
.selectFrom(mapSheetAnalEntity)
.where(mapSheetAnalEntity.id.eq(analUid))
.fetchOne();
if (Objects.isNull(entity)) {
throw new EntityNotFoundException();
}
public Long findLabelUnAssignedCnt(Integer stage, Integer compareYyyy, Integer targetYyyy) {
return queryFactory
.select(mapSheetAnalDataInferenceGeomEntity.geoUid.count())
.from(mapSheetAnalDataInferenceGeomEntity)
.where(
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(entity.getCompareYyyy()),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(entity.getTargetYyyy()),
mapSheetAnalDataInferenceGeomEntity.stage.eq(4), // TODO: 회차 컬럼을 가져와야 할 듯?
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(compareYyyy),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(targetYyyy),
mapSheetAnalDataInferenceGeomEntity.stage.eq(stage),
mapSheetAnalDataInferenceGeomEntity.labelState.isNull())
.fetchOne();
}
@@ -149,7 +182,9 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
memberEntity.employeeNo,
memberEntity.name))
.from(memberEntity)
.where(memberEntity.userRole.eq(role), memberEntity.status.eq("ACTIVE"))
.where(
memberEntity.userRole.eq(role),
memberEntity.status.eq(com.kamco.cd.kamcoback.common.enums.StatusType.ACTIVE.getId()))
.orderBy(memberEntity.name.asc())
.fetch();
}
@@ -164,12 +199,12 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
// 작업자 유형에 따른 필드 선택
StringExpression workerIdField =
"INSPECTOR".equals(workerType)
"REVIEWER".equals(workerType)
? labelingAssignmentEntity.inspectorUid
: labelingAssignmentEntity.workerUid;
BooleanExpression workerCondition =
"INSPECTOR".equals(workerType)
"REVIEWER".equals(workerType)
? labelingAssignmentEntity.inspectorUid.isNotNull()
: labelingAssignmentEntity.workerUid.isNotNull();
@@ -223,7 +258,7 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
.from(labelingAssignmentEntity)
.leftJoin(memberEntity)
.on(
"INSPECTOR".equals(workerType)
"REVIEWER".equals(workerType)
? memberEntity.employeeNo.eq(labelingAssignmentEntity.inspectorUid)
: memberEntity.employeeNo.eq(labelingAssignmentEntity.workerUid))
.where(labelingAssignmentEntity.analUid.eq(analUid), workerCondition, searchCondition)
@@ -364,7 +399,7 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
ZonedDateTime endOfDay = date.atTime(LocalTime.MAX).atZone(ZoneId.systemDefault());
BooleanExpression workerCondition =
"INSPECTOR".equals(workerType)
"REVIEWER".equals(workerType)
? labelingAssignmentEntity.inspectorUid.eq(workerId)
: labelingAssignmentEntity.workerUid.eq(workerId);
@@ -375,7 +410,8 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
.where(
labelingAssignmentEntity.analUid.eq(analUid),
workerCondition,
labelingAssignmentEntity.workState.in("DONE", "SKIP"),
labelingAssignmentEntity.workState.in(
LabelState.DONE.getId(), LabelState.SKIP.getId()),
labelingAssignmentEntity.modifiedDate.between(startOfDay, endOfDay))
.fetchOne();
@@ -394,7 +430,20 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
}
@Override
public InferenceDetail findInferenceDetail(Long analUid) {
public InferenceDetail findInferenceDetail(
Integer compareYyyy, Integer targetYyyy, Integer stage) {
// analUid로 분석 정보 조회
MapSheetAnalDataInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalDataInferenceEntity)
.where(
mapSheetAnalDataInferenceEntity.compareYyyy.eq(compareYyyy),
mapSheetAnalDataInferenceEntity.targetYyyy.eq(targetYyyy),
mapSheetAnalDataInferenceEntity.stage.eq(stage))
.orderBy(mapSheetAnalDataInferenceEntity.analUid.asc())
.limit(1)
.fetchOne();
return queryFactory
.select(
Projections.constructor(
@@ -408,11 +457,139 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
.from(mapSheetAnalEntity)
.innerJoin(labelingAssignmentEntity)
.on(mapSheetAnalEntity.id.eq(labelingAssignmentEntity.analUid))
.where(mapSheetAnalEntity.id.eq(analUid))
.where(mapSheetAnalEntity.id.eq(analEntity.getAnalUid()))
.groupBy(
mapSheetAnalEntity.analTitle,
mapSheetAnalEntity.gukyuinApplyDttm,
mapSheetAnalEntity.detectingCnt)
.fetchOne();
}
@Override
public List<Long> fetchNextMoveIds(
Long lastId, Long batchSize, Integer compareYyyy, Integer targetYyyy, Integer stage) {
return queryFactory
.select(mapSheetAnalDataInferenceGeomEntity.geoUid)
.from(mapSheetAnalDataInferenceGeomEntity)
.where(
// mapSheetAnalDataGeomEntity.pnu.isNotNull(), //TODO: Mockup 진행 이후 확인하기
lastId == null ? null : mapSheetAnalDataInferenceGeomEntity.geoUid.gt(lastId),
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(compareYyyy),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(targetYyyy),
mapSheetAnalDataInferenceGeomEntity.stage.eq(stage),
mapSheetAnalDataInferenceGeomEntity.labelState.in(
LabelState.ASSIGNED.getId(), LabelState.SKIP.getId()))
.orderBy(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.asc())
.limit(batchSize)
.fetch();
}
@Override
public Long findLabelUnCompleteCnt(Long analUid) {
MapSheetAnalEntity entity =
queryFactory
.selectFrom(mapSheetAnalEntity)
.where(mapSheetAnalEntity.id.eq(analUid))
.fetchOne();
if (Objects.isNull(entity)) {
throw new EntityNotFoundException();
}
return queryFactory
.select(mapSheetAnalDataInferenceGeomEntity.geoUid.count())
.from(mapSheetAnalDataInferenceGeomEntity)
.where(
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(entity.getCompareYyyy()),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(entity.getTargetYyyy()),
mapSheetAnalDataInferenceGeomEntity.stage.eq(4), // TODO: 회차 컬럼을 가져와야 할 듯?
mapSheetAnalDataInferenceGeomEntity.labelState.in(
LabelState.ASSIGNED.getId(), LabelState.SKIP.getId()))
.fetchOne();
}
@Transactional
@Override
public void assignOwnerMove(List<Long> sub, String userId) {
queryFactory
.update(labelingAssignmentEntity)
.set(labelingAssignmentEntity.workerUid, userId)
.where(labelingAssignmentEntity.inferenceGeomUid.in(sub))
.execute();
em.clear();
}
@Override
public LabelerDetail findLabelerDetail(
String userId, Integer compareYyyy, Integer targetYyyy, Integer stage) {
NumberExpression<Long> assignedCnt =
new CaseBuilder()
.when(labelingAssignmentEntity.workState.eq(LabelState.ASSIGNED.getId()))
.then(1L)
.otherwise((Long) null)
.count();
NumberExpression<Long> skipCnt =
new CaseBuilder()
.when(labelingAssignmentEntity.workState.eq(LabelState.SKIP.getId()))
.then(1L)
.otherwise((Long) null)
.count();
NumberExpression<Long> completeCnt =
new CaseBuilder()
.when(labelingAssignmentEntity.workState.eq(LabelState.COMPLETE.getId()))
.then(1L)
.otherwise((Long) null)
.count();
NumberExpression<Double> percent =
new CaseBuilder()
.when(completeCnt.eq(0L))
.then(0.0)
.otherwise(
Expressions.numberTemplate(
Double.class,
"round({0} / {1}, 2)",
labelingAssignmentEntity.count(),
completeCnt));
// analUid로 분석 정보 조회
MapSheetAnalDataInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalDataInferenceEntity)
.where(
mapSheetAnalDataInferenceEntity.compareYyyy.eq(compareYyyy),
mapSheetAnalDataInferenceEntity.targetYyyy.eq(targetYyyy),
mapSheetAnalDataInferenceEntity.stage.eq(stage))
.orderBy(mapSheetAnalDataInferenceEntity.analUid.asc())
.limit(1)
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("MapSheetAnalEntity not found for analUid: ");
}
return queryFactory
.select(
Projections.constructor(
LabelerDetail.class,
memberEntity.userRole,
memberEntity.name,
memberEntity.employeeNo,
assignedCnt,
skipCnt,
completeCnt,
percent))
.from(memberEntity)
.innerJoin(labelingAssignmentEntity)
.on(
memberEntity.employeeNo.eq(labelingAssignmentEntity.workerUid),
labelingAssignmentEntity.analUid.eq(analEntity.getAnalUid()))
.where(memberEntity.employeeNo.eq(userId))
.groupBy(memberEntity.userRole, memberEntity.name, memberEntity.employeeNo)
.fetchOne();
}
}

View File

@@ -0,0 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.label;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface LabelWorkRepository
extends JpaRepository<MapSheetAnalDataInferenceGeomEntity, Long>, LabelWorkRepositoryCustom {}

View File

@@ -0,0 +1,10 @@
package com.kamco.cd.kamcoback.postgres.repository.label;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMng;
import org.springframework.data.domain.Page;
public interface LabelWorkRepositoryCustom {
public Page<LabelWorkMng> labelWorkMngList(LabelWorkDto.LabelWorkMngSearchReq searchReq);
}

View File

@@ -0,0 +1,138 @@
package com.kamco.cd.kamcoback.postgres.repository.label;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMng;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import java.time.format.DateTimeFormatter;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
import org.springframework.stereotype.Repository;
@Slf4j
@Repository
public class LabelWorkRepositoryImpl extends QuerydslRepositorySupport
implements LabelWorkRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)");
@PersistenceContext private EntityManager em;
public LabelWorkRepositoryImpl(JPAQueryFactory queryFactory) {
super(MapSheetAnalDataGeomEntity.class);
this.queryFactory = queryFactory;
}
@Override
public Page<LabelWorkMng> labelWorkMngList(LabelWorkDto.LabelWorkMngSearchReq searchReq) {
Pageable pageable = PageRequest.of(searchReq.getPage(), searchReq.getSize());
BooleanBuilder whereBuilder = new BooleanBuilder();
BooleanBuilder whereSubBuilder = new BooleanBuilder();
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMdd");
if (searchReq.getDetectYyyy() != null) {
whereBuilder.and(mapSheetAnalDataInferenceEntity.targetYyyy.eq(searchReq.getDetectYyyy()));
}
// mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id)
whereSubBuilder.and(
mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id));
if (searchReq.getStrtDttm() != null
&& !searchReq.getStrtDttm().isEmpty()
&& searchReq.getEndDttm() != null
&& !searchReq.getEndDttm().isEmpty()) {
// whereSubBuilder.and(mapSheetAnalDataInferenceGeomEntity.labelStateDttm.isNotNull());
whereSubBuilder.and(
Expressions.stringTemplate(
"to_char({0}, 'YYYYMMDD')", mapSheetAnalDataInferenceGeomEntity.labelStateDttm)
.between(searchReq.getStrtDttm(), searchReq.getEndDttm()));
// whereBuilder.and(mapSheetAnalDataInferenceGeomEntity.labelStateDttm.min().isNotNull());
}
List<LabelWorkMng> foundContent =
queryFactory
.select(
Projections.constructor(
LabelWorkMng.class,
mapSheetAnalDataInferenceEntity.compareYyyy,
mapSheetAnalDataInferenceEntity.targetYyyy,
mapSheetAnalDataInferenceEntity.stage,
mapSheetAnalDataInferenceEntity.createdDttm.min(),
mapSheetAnalDataInferenceGeomEntity.dataUid.count(),
mapSheetAnalDataInferenceGeomEntity.dataUid.count(),
new CaseBuilder()
.when(mapSheetAnalDataInferenceGeomEntity.labelState.eq("STOP"))
.then(1L)
.otherwise(0L)
.sum(),
new CaseBuilder()
.when(mapSheetAnalDataInferenceGeomEntity.labelState.eq("LABEL_ING"))
.then(1L)
.otherwise(0L)
.sum(),
new CaseBuilder()
.when(mapSheetAnalDataInferenceGeomEntity.labelState.eq("LABEL_COMPLETE"))
.then(1L)
.otherwise(0L)
.sum(),
mapSheetAnalDataInferenceGeomEntity.labelStateDttm.min()))
.from(mapSheetAnalDataInferenceEntity)
.innerJoin(mapSheetAnalDataInferenceGeomEntity)
.on(whereSubBuilder)
.where(whereBuilder)
.groupBy(
mapSheetAnalDataInferenceEntity.compareYyyy,
mapSheetAnalDataInferenceEntity.targetYyyy,
mapSheetAnalDataInferenceEntity.stage)
.orderBy(
mapSheetAnalDataInferenceEntity.targetYyyy.desc(),
mapSheetAnalDataInferenceEntity.stage.desc())
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.fetch();
/*
Long countQuery =
queryFactory
.select(mapSheetAnalDataInferenceEntity.count())
.from(mapSheetAnalDataInferenceEntity)
.leftJoin(mapSheetAnalDataInferenceGeomEntity)
.on(whereSubBuilder)
.where(whereBuilder)
.groupBy(
mapSheetAnalDataInferenceEntity.compareYyyy,
mapSheetAnalDataInferenceEntity.targetYyyy,
mapSheetAnalDataInferenceEntity.stage
)
.fetchOne();
*/
Long countQuery = foundContent.stream().count();
return new PageImpl<>(foundContent, pageable, countQuery);
}
}