Merge remote-tracking branch 'origin/feat/dev_251201' into feat/dev_251201
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
package com.kamco.cd.kamcoback.postgres.core;
|
||||
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerStatistics;
|
||||
@@ -18,7 +19,7 @@ public class LabelAllocateCoreService {
|
||||
|
||||
private final LabelAllocateRepository labelAllocateRepository;
|
||||
|
||||
public List<Long> fetchNextIds(Long lastId, int batchSize) {
|
||||
public List<Long> fetchNextIds(Long lastId, Long batchSize) {
|
||||
return labelAllocateRepository.fetchNextIds(lastId, batchSize);
|
||||
}
|
||||
|
||||
@@ -62,4 +63,12 @@ public class LabelAllocateCoreService {
|
||||
String workerId, String workerType, LocalDate date, Long analUid) {
|
||||
return labelAllocateRepository.findDailyProcessedCount(workerId, workerType, date, analUid);
|
||||
}
|
||||
|
||||
public void assignInspectorBulk(List<UUID> assignmentUids, String inspectorUid) {
|
||||
labelAllocateRepository.assignInspectorBulk(assignmentUids, inspectorUid);
|
||||
}
|
||||
|
||||
public InferenceDetail findInferenceDetail(Long analUid) {
|
||||
return labelAllocateRepository.findInferenceDetail(analUid);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -103,4 +103,7 @@ public class MapSheetAnalEntity {
|
||||
@ColumnDefault("now()")
|
||||
@Column(name = "updated_dttm")
|
||||
private ZonedDateTime updatedDttm;
|
||||
|
||||
@Column(name = "gukyuin_apply_dttm")
|
||||
private ZonedDateTime gukyuinApplyDttm;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.kamco.cd.kamcoback.postgres.repository.label;
|
||||
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerStatistics;
|
||||
@@ -10,7 +11,7 @@ import java.util.UUID;
|
||||
|
||||
public interface LabelAllocateRepositoryCustom {
|
||||
|
||||
List<Long> fetchNextIds(Long lastId, int batchSize);
|
||||
List<Long> fetchNextIds(Long lastId, Long batchSize);
|
||||
|
||||
void assignOwner(List<Long> ids, String userId);
|
||||
|
||||
@@ -31,4 +32,8 @@ public interface LabelAllocateRepositoryCustom {
|
||||
|
||||
// 작업자별 일일 처리량 조회
|
||||
Long findDailyProcessedCount(String workerId, String workerType, LocalDate date, Long analUid);
|
||||
|
||||
void assignInspectorBulk(List<UUID> assignmentUids, String inspectorUid);
|
||||
|
||||
InferenceDetail findInferenceDetail(Long analUid);
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity.mapShee
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMemberEntity.memberEntity;
|
||||
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.UserList;
|
||||
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
|
||||
@@ -15,6 +16,7 @@ import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity;
|
||||
import com.querydsl.core.types.Projections;
|
||||
import com.querydsl.core.types.dsl.BooleanExpression;
|
||||
import com.querydsl.core.types.dsl.CaseBuilder;
|
||||
import com.querydsl.core.types.dsl.Expressions;
|
||||
import com.querydsl.core.types.dsl.NumberExpression;
|
||||
import com.querydsl.core.types.dsl.StringExpression;
|
||||
import com.querydsl.jpa.impl.JPAQueryFactory;
|
||||
@@ -42,7 +44,7 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
|
||||
@PersistenceContext private EntityManager em;
|
||||
|
||||
@Override
|
||||
public List<Long> fetchNextIds(Long lastId, int batchSize) {
|
||||
public List<Long> fetchNextIds(Long lastId, Long batchSize) {
|
||||
|
||||
return queryFactory
|
||||
.select(mapSheetAnalDataInferenceGeomEntity.geoUid)
|
||||
@@ -53,7 +55,7 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
|
||||
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(2022),
|
||||
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(2024),
|
||||
mapSheetAnalDataInferenceGeomEntity.labelState.isNull())
|
||||
.orderBy(mapSheetAnalDataInferenceGeomEntity.geoUid.asc())
|
||||
.orderBy(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.asc())
|
||||
.limit(batchSize)
|
||||
.fetch();
|
||||
}
|
||||
@@ -65,28 +67,28 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
|
||||
queryFactory
|
||||
.update(mapSheetAnalDataInferenceGeomEntity)
|
||||
.set(mapSheetAnalDataInferenceGeomEntity.labelState, LabelState.ASSIGNED.getId())
|
||||
.set(mapSheetAnalDataInferenceGeomEntity.labelStateDttm, ZonedDateTime.now())
|
||||
.where(mapSheetAnalDataInferenceGeomEntity.geoUid.in(ids))
|
||||
.execute();
|
||||
|
||||
// 라벨러 할당 테이블에 insert
|
||||
String sql =
|
||||
"""
|
||||
insert into tb_labeling_assignment
|
||||
(assignment_uid, inference_geom_uid, worker_uid,
|
||||
work_state, assign_group_id, anal_uid)
|
||||
values (?, ?, ?, ?, ?, ?)
|
||||
""";
|
||||
|
||||
for (Long geoUid : ids) {
|
||||
queryFactory
|
||||
.insert(labelingAssignmentEntity)
|
||||
.columns(
|
||||
labelingAssignmentEntity.assignmentUid,
|
||||
labelingAssignmentEntity.inferenceGeomUid,
|
||||
labelingAssignmentEntity.workerUid,
|
||||
labelingAssignmentEntity.workState,
|
||||
labelingAssignmentEntity.assignGroupId,
|
||||
labelingAssignmentEntity.analUid)
|
||||
.values(
|
||||
UUID.randomUUID(),
|
||||
geoUid,
|
||||
userId,
|
||||
LabelState.ASSIGNED.getId(),
|
||||
"", // TODO: 도엽번호
|
||||
3)
|
||||
.execute();
|
||||
em.createNativeQuery(sql)
|
||||
.setParameter(1, UUID.randomUUID())
|
||||
.setParameter(2, geoUid)
|
||||
.setParameter(3, userId)
|
||||
.setParameter(4, LabelState.ASSIGNED.getId())
|
||||
.setParameter(5, "")
|
||||
.setParameter(6, 3)
|
||||
.executeUpdate();
|
||||
}
|
||||
|
||||
em.flush();
|
||||
@@ -379,4 +381,38 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
|
||||
|
||||
return count != null ? count : 0L;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void assignInspectorBulk(List<UUID> assignmentUids, String inspectorUid) {
|
||||
queryFactory
|
||||
.update(labelingAssignmentEntity)
|
||||
.set(labelingAssignmentEntity.inspectorUid, inspectorUid)
|
||||
.where(labelingAssignmentEntity.assignmentUid.in(assignmentUids))
|
||||
.execute();
|
||||
|
||||
em.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public InferenceDetail findInferenceDetail(Long analUid) {
|
||||
return queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
InferenceDetail.class,
|
||||
mapSheetAnalEntity.analTitle,
|
||||
Expressions.numberTemplate(Integer.class, "{0}", 4),
|
||||
mapSheetAnalEntity.gukyuinApplyDttm,
|
||||
mapSheetAnalEntity.detectingCnt,
|
||||
labelingAssignmentEntity.workerUid.countDistinct(),
|
||||
labelingAssignmentEntity.inspectorUid.countDistinct()))
|
||||
.from(mapSheetAnalEntity)
|
||||
.innerJoin(labelingAssignmentEntity)
|
||||
.on(mapSheetAnalEntity.id.eq(labelingAssignmentEntity.analUid))
|
||||
.where(mapSheetAnalEntity.id.eq(analUid))
|
||||
.groupBy(
|
||||
mapSheetAnalEntity.analTitle,
|
||||
mapSheetAnalEntity.gukyuinApplyDttm,
|
||||
mapSheetAnalEntity.detectingCnt)
|
||||
.fetchOne();
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user