Merge remote-tracking branch 'origin/feat/infer_dev_260107' into feat/infer_dev_260107

This commit is contained in:
DanielLee
2026-01-08 18:10:30 +09:00
34 changed files with 814 additions and 692 deletions

View File

@@ -1,12 +1,16 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetAnalDataInferenceRepository;
import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetLearnRepository;
import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository;
import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.constraints.NotNull;
@@ -21,27 +25,30 @@ import org.springframework.transaction.annotation.Transactional;
public class InferenceResultCoreService {
private final MapSheetAnalDataInferenceRepository mapSheetAnalDataRepository;
private final MapSheetLearnRepository mapSheetLearnRepository;
private final MapInkx5kRepository mapInkx5kRepository;
/**
* 추론관리 > 분석결과 목록 조회
* 추론관리 목록
*
* @param searchReq
* @param req
* @return
*/
public Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq) {
return mapSheetAnalDataRepository.getInferenceResultList(searchReq);
public Page<ResultList> getInferenceResultList(InferenceResultDto.SearchListReq req) {
Page<MapSheetLearnEntity> list = mapSheetLearnRepository.getInferenceMgnResultList(req);
return list.map(MapSheetLearnEntity::toDto);
}
/****/
/**
* 분석결과 요약정보
*
* @param id
* @return
*/
public InferenceResultDto.AnalResSummary getInferenceResultSummary(Long id) {
InferenceResultDto.AnalResSummary summary =
public InferenceDetailDto.AnalResSummary getInferenceResultSummary(Long id) {
InferenceDetailDto.AnalResSummary summary =
mapSheetAnalDataRepository
.getInferenceResultSummary(id)
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
@@ -64,8 +71,8 @@ public class InferenceResultCoreService {
* @param searchGeoReq
* @return
*/
public Page<InferenceResultDto.Geom> getInferenceResultGeomList(
Long id, InferenceResultDto.SearchGeoReq searchGeoReq) {
public Page<InferenceDetailDto.Geom> getInferenceResultGeomList(
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
return mapSheetAnalDataRepository.getInferenceGeomList(id, searchGeoReq);
}
@@ -76,8 +83,8 @@ public class InferenceResultCoreService {
* @return
*/
@Transactional(readOnly = true)
public Page<InferenceResultDto.DetailListEntity> listInferenceResultWithGeom(
@NotNull Long analyId, InferenceResultDto.SearchGeoReq searchReq) {
public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom(
@NotNull Long analyId, InferenceDetailDto.SearchGeoReq searchReq) {
// 분석 ID 에 해당하는 dataids를 가져온다.
List<Long> dataIds =
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()

View File

@@ -1,8 +0,0 @@
package com.kamco.cd.kamcoback.postgres.core;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class InferenceResultMngCoreService {}

View File

@@ -87,15 +87,8 @@ public class LabelAllocateCoreService {
return labelAllocateRepository.findInferenceDetail(uuid);
}
public List<Long> fetchNextMoveIds(
Long lastId,
Long batchSize,
Integer compareYyyy,
Integer targetYyyy,
Integer stage,
String userId) {
return labelAllocateRepository.fetchNextMoveIds(
lastId, batchSize, compareYyyy, targetYyyy, stage, userId);
public List<Long> fetchNextMoveIds(Long lastId, Long batchSize, String uuid, String userId) {
return labelAllocateRepository.fetchNextMoveIds(lastId, batchSize, uuid, userId);
}
public void assignOwnerMove(List<Long> sub, String userId) {

View File

@@ -3,7 +3,7 @@ package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.common.enums.CommonUseStatus;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto.ApiResponseCode;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto.ResponseObj;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx50kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx50kRepository;

View File

@@ -3,7 +3,7 @@ package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.common.enums.StatusType;
import com.kamco.cd.kamcoback.common.enums.error.AuthErrorCode;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.common.utils.CommonStringUtils;
import com.kamco.cd.kamcoback.common.utils.StringUtils;
import com.kamco.cd.kamcoback.common.utils.UserUtil;
import com.kamco.cd.kamcoback.members.dto.MembersDto;
import com.kamco.cd.kamcoback.members.dto.MembersDto.AddReq;
@@ -17,7 +17,6 @@ import com.kamco.cd.kamcoback.postgres.repository.members.MembersRepository;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.mindrot.jbcrypt.BCrypt;
import org.springframework.data.domain.Page;
import org.springframework.http.HttpStatus;
@@ -43,7 +42,7 @@ public class MembersCoreService {
}
// 패스워드 암호화, 초기 패스워드 고정
String hashedPassword = CommonStringUtils.hashPassword(addReq.getPassword());
String hashedPassword = StringUtils.hashPassword(addReq.getPassword());
MemberEntity memberEntity = new MemberEntity();
memberEntity.setUserId(addReq.getEmployeeNo());
@@ -67,22 +66,22 @@ public class MembersCoreService {
MemberEntity memberEntity =
membersRepository.findByUUID(uuid).orElseThrow(MemberNotFoundException::new);
if (StringUtils.isNotBlank(updateReq.getName())) {
if (org.apache.commons.lang3.StringUtils.isNotBlank(updateReq.getName())) {
memberEntity.setName(updateReq.getName());
}
if (StringUtils.isNotBlank(updateReq.getStatus())) {
if (org.apache.commons.lang3.StringUtils.isNotBlank(updateReq.getStatus())) {
memberEntity.changeStatus(updateReq.getStatus());
}
if (StringUtils.isNotBlank(updateReq.getPassword())) {
if (org.apache.commons.lang3.StringUtils.isNotBlank(updateReq.getPassword())) {
// 패스워드 유효성 검사
if (!CommonStringUtils.isValidPassword(updateReq.getPassword())) {
if (!StringUtils.isValidPassword(updateReq.getPassword())) {
throw new CustomApiException("WRONG_PASSWORD", HttpStatus.BAD_REQUEST);
}
String password = CommonStringUtils.hashPassword(updateReq.getPassword());
String password = StringUtils.hashPassword(updateReq.getPassword());
memberEntity.setStatus(StatusType.PENDING.getId());
memberEntity.setLoginFailCount(0);
@@ -107,7 +106,7 @@ public class MembersCoreService {
throw new CustomApiException(AuthErrorCode.LOGIN_PASSWORD_MISMATCH);
}
String password = CommonStringUtils.hashPassword(initReq.getNewPassword());
String password = StringUtils.hashPassword(initReq.getNewPassword());
memberEntity.setPassword(password);
memberEntity.setStatus(StatusType.ACTIVE.getId());

View File

@@ -1,6 +1,6 @@
package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
import com.kamco.cd.kamcoback.postgres.CommonDateEntity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;

View File

@@ -1,8 +1,8 @@
package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.common.enums.CommonUseStatus;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
import com.kamco.cd.kamcoback.postgres.CommonDateEntity;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto.MapListEntity;
import jakarta.persistence.Column;
@@ -71,7 +71,7 @@ public class MapInkx5kEntity extends CommonDateEntity {
this.useInference = useInference;
}
public InferenceResultDto.MapSheet toEntity() {
public InferenceDetailDto.MapSheet toEntity() {
return new MapSheet(mapidcdNo, mapidNm);
}

View File

@@ -1,8 +1,8 @@
package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.common.enums.DetectionClassification;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Clazzes;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Clazzes;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
@@ -160,20 +160,20 @@ public class MapSheetAnalDataInferenceGeomEntity {
@JoinColumn(name = "map_5k_id", referencedColumnName = "fid")
private MapInkx5kEntity map5k;
public InferenceResultDto.DetailListEntity toEntity() {
public InferenceDetailDto.DetailListEntity toEntity() {
DetectionClassification classification = DetectionClassification.fromString(classBeforeCd);
Clazzes comparedClazz = new Clazzes(classification, classBeforeProb);
DetectionClassification classification1 = DetectionClassification.fromString(classAfterCd);
Clazzes targetClazz = new Clazzes(classification1, classAfterProb);
InferenceResultDto.MapSheet mapSheet = map5k != null ? map5k.toEntity() : null;
InferenceDetailDto.MapSheet mapSheet = map5k != null ? map5k.toEntity() : null;
InferenceResultDto.Coordinate coordinate = null;
InferenceDetailDto.Coordinate coordinate = null;
if (geomCenter != null) {
org.locationtech.jts.geom.Point point = (org.locationtech.jts.geom.Point) geomCenter;
coordinate = new InferenceResultDto.Coordinate(point.getX(), point.getY());
coordinate = new InferenceDetailDto.Coordinate(point.getX(), point.getY());
}
return new InferenceResultDto.DetailListEntity(
return new InferenceDetailDto.DetailListEntity(
uuid, cdProb, comparedClazz, targetClazz, mapSheet, coordinate, createdDttm);
}
}

View File

@@ -1,5 +1,6 @@
package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
@@ -7,6 +8,7 @@ import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import java.util.UUID;
@@ -31,7 +33,16 @@ public class MapSheetLearnEntity {
@ColumnDefault("gen_random_uuid()")
@Column(name = "uuid")
private UUID uuid = UUID.randomUUID();
private UUID uuid;
@Size(max = 200)
@NotNull
@Column(name = "title", nullable = false, length = 200)
private String title;
@Size(max = 10)
@Column(name = "status", length = 10)
private String status;
@Column(name = "m1_model_uid")
private Long m1ModelUid;
@@ -52,6 +63,10 @@ public class MapSheetLearnEntity {
@Column(name = "detect_option", length = 20)
private String detectOption;
@Size(max = 100)
@Column(name = "map_sheet_cnt", length = 100)
private String mapSheetCnt;
@Size(max = 20)
@Column(name = "map_sheet_scope", length = 20)
private String mapSheetScope;
@@ -59,6 +74,12 @@ public class MapSheetLearnEntity {
@Column(name = "detecting_cnt")
private Long detectingCnt;
@Column(name = "infer_start_dttm")
private ZonedDateTime inferStartDttm;
@Column(name = "infer_end_dttm")
private ZonedDateTime inferEndDttm;
@Column(name = "elapsed_time")
private ZonedDateTime elapsedTime;
@@ -70,7 +91,7 @@ public class MapSheetLearnEntity {
@ColumnDefault("now()")
@Column(name = "created_dttm")
private ZonedDateTime createdDttm = ZonedDateTime.now();
private ZonedDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
@@ -81,4 +102,18 @@ public class MapSheetLearnEntity {
@Column(name = "updated_uid")
private Long updatedUid;
public InferenceResultDto.ResultList toDto() {
return new InferenceResultDto.ResultList(
this.uuid,
this.title,
this.status,
this.mapSheetCnt,
this.detectingCnt,
this.inferStartDttm,
this.inferEndDttm,
this.elapsedTime,
this.applyYn,
this.applyDttm);
}
}

View File

@@ -1,8 +0,0 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
public interface InferenceResultMngRepositoryCustom {
MapSheetLearnEntity getInferenceMgnResultList();
}

View File

@@ -1,18 +0,0 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class InferenceResultMngRepositoryImpl implements InferenceResultMngRepositoryCustom {
private final JPAQueryFactory queryFactory;
@Override
public MapSheetLearnEntity getInferenceMgnResultList() {
return null;
}
}

View File

@@ -1,10 +1,10 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResSummary;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResList;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResSummary;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import jakarta.validation.constraints.NotNull;
@@ -14,12 +14,12 @@ import org.springframework.data.domain.Page;
public interface MapSheetAnalDataInferenceRepositoryCustom {
Page<AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq);
Page<AnalResList> getInferenceResultList(InferenceDetailDto.SearchReq searchReq);
Optional<AnalResSummary> getInferenceResultSummary(Long id);
Page<InferenceResultDto.Geom> getInferenceGeomList(
Long id, InferenceResultDto.SearchGeoReq searchGeoReq);
Page<InferenceDetailDto.Geom> getInferenceGeomList(
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq);
Page<MapSheetAnalDataInferenceGeomEntity> listInferenceResultWithGeom(
List<Long> dataIds, SearchGeoReq searchReq);

View File

@@ -2,11 +2,11 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResSummary;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResList;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResSummary;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity;
@@ -58,7 +58,7 @@ public class MapSheetAnalDataInferenceRepositoryImpl
* @return
*/
@Override
public Page<AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq) {
public Page<AnalResList> getInferenceResultList(InferenceDetailDto.SearchReq searchReq) {
Pageable pageable = searchReq.toPageable();
// "0000" 전체조회
BooleanBuilder builder = new BooleanBuilder();
@@ -75,7 +75,7 @@ public class MapSheetAnalDataInferenceRepositoryImpl
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResList.class,
InferenceDetailDto.AnalResList.class,
mapSheetAnalInferenceEntity.id,
mapSheetAnalInferenceEntity.analTitle,
mapSheetAnalInferenceEntity.detectingCnt,
@@ -117,12 +117,12 @@ public class MapSheetAnalDataInferenceRepositoryImpl
JPQLQuery<Long> latestVerUidSub =
JPAExpressions.select(tmv.id.max()).from(tmv).where(tmv.modelUid.eq(tmm.id));
Optional<InferenceResultDto.AnalResSummary> content =
Optional<InferenceDetailDto.AnalResSummary> content =
Optional.ofNullable(
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResSummary.class,
InferenceDetailDto.AnalResSummary.class,
mapSheetAnalInferenceEntity.id,
mapSheetAnalInferenceEntity.analTitle,
tmm.modelNm.concat(" ").concat(tmv.modelVer).as("modelInfo"),
@@ -228,7 +228,7 @@ public class MapSheetAnalDataInferenceRepositoryImpl
* @return
*/
@Override
public Page<InferenceResultDto.Geom> getInferenceGeomList(Long id, SearchGeoReq searchGeoReq) {
public Page<InferenceDetailDto.Geom> getInferenceGeomList(Long id, SearchGeoReq searchGeoReq) {
Pageable pageable = searchGeoReq.toPageable();
BooleanBuilder builder = new BooleanBuilder();
@@ -257,11 +257,11 @@ public class MapSheetAnalDataInferenceRepositoryImpl
builder.and(MapSheetAnalDataInferenceGeomEntity.mapSheetNum.in(mapSheetNum));
}
List<InferenceResultDto.Geom> content =
List<InferenceDetailDto.Geom> content =
queryFactory
.select(
Projections.constructor(
InferenceResultDto.Geom.class,
InferenceDetailDto.Geom.class,
MapSheetAnalDataInferenceGeomEntity.compareYyyy,
MapSheetAnalDataInferenceGeomEntity.targetYyyy,
MapSheetAnalDataInferenceGeomEntity.classBeforeCd,

View File

@@ -3,5 +3,5 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface InferenceResultMngRepository
extends JpaRepository<MapSheetLearnEntity, Long>, InferenceResultMngRepositoryCustom {}
public interface MapSheetLearnRepository
extends JpaRepository<MapSheetLearnEntity, Long>, MapSheetLearnRepositoryCustom {}

View File

@@ -0,0 +1,10 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import org.springframework.data.domain.Page;
public interface MapSheetLearnRepositoryCustom {
Page<MapSheetLearnEntity> getInferenceMgnResultList(InferenceResultDto.SearchListReq req);
}

View File

@@ -0,0 +1,76 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity;
import com.kamco.cd.kamcoback.common.utils.DateRange;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.NumberExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCustom {
private final JPAQueryFactory queryFactory;
@Override
public Page<MapSheetLearnEntity> getInferenceMgnResultList(InferenceResultDto.SearchListReq req) {
Pageable pageable = req.toPageable();
BooleanBuilder builder = new BooleanBuilder();
NumberExpression<Integer> statusOrder =
new CaseBuilder().when(mapSheetLearnEntity.status.eq("Y")).then(0).otherwise(1);
// 국유인 반영 여부
if (StringUtils.isNotBlank(req.getApplyYn())) {
if ("Y".equalsIgnoreCase(req.getApplyYn())) {
builder.and(mapSheetLearnEntity.applyYn.isTrue());
} else if ("N".equalsIgnoreCase(req.getApplyYn())) {
builder.and(mapSheetLearnEntity.applyYn.isNull().or(mapSheetLearnEntity.applyYn.isFalse()));
}
}
// 국유인 반영일
if (req.getStrtDttm() != null && req.getEndDttm() != null) {
builder.and(
mapSheetLearnEntity
.applyDttm
.goe(DateRange.start(req.getStrtDttm()))
.and(mapSheetLearnEntity.applyDttm.lt(DateRange.end(req.getEndDttm()))));
}
// 제목
if (StringUtils.isNotBlank(req.getTitle())) {
builder.and(mapSheetLearnEntity.title.equalsIgnoreCase(req.getTitle()));
}
List<MapSheetLearnEntity> content =
queryFactory
.select(mapSheetLearnEntity)
.from(mapSheetLearnEntity)
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(statusOrder.asc())
.fetch();
Long total =
queryFactory
.select(mapSheetLearnEntity.count())
.from(mapSheetLearnEntity)
.where(builder)
.fetchOne();
return new PageImpl<>(content, pageable, total == null ? 0L : total);
}
}

View File

@@ -54,13 +54,7 @@ public interface LabelAllocateRepositoryCustom {
InferenceDetail findInferenceDetail(String uuid);
List<Long> fetchNextMoveIds(
Long lastId,
Long batchSize,
Integer compareYyyy,
Integer targetYyyy,
Integer stage,
String userId);
List<Long> fetchNextMoveIds(Long lastId, Long batchSize, String uuid, String userId);
void assignOwnerMove(List<Long> sub, String userId);

View File

@@ -7,6 +7,7 @@ import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceG
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMemberEntity.memberEntity;
import com.kamco.cd.kamcoback.common.enums.StatusType;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.AllocateInfoDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
@@ -493,38 +494,35 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
Projections.constructor(
InferenceDetail.class,
mapSheetAnalInferenceEntity.analTitle,
Expressions.numberTemplate(Integer.class, "{0}", 4),
mapSheetAnalInferenceEntity.stage,
mapSheetAnalInferenceEntity.gukyuinApplyDttm,
mapSheetAnalInferenceEntity.detectingCnt,
labelingAssignmentEntity.workerUid.countDistinct(),
labelingAssignmentEntity.inspectorUid.countDistinct()))
mapSheetAnalDataInferenceGeomEntity.geoUid.count()))
.from(mapSheetAnalInferenceEntity)
.innerJoin(labelingAssignmentEntity)
.on(mapSheetAnalInferenceEntity.id.eq(labelingAssignmentEntity.analUid))
.leftJoin(mapSheetAnalDataInferenceGeomEntity)
.on(
mapSheetAnalInferenceEntity.compareYyyy.eq(
mapSheetAnalDataInferenceGeomEntity.compareYyyy),
mapSheetAnalInferenceEntity.targetYyyy.eq(
mapSheetAnalDataInferenceGeomEntity.targetYyyy),
mapSheetAnalInferenceEntity.stage.eq(mapSheetAnalDataInferenceGeomEntity.stage),
mapSheetAnalDataInferenceGeomEntity.pnu.gt(0),
mapSheetAnalDataInferenceGeomEntity.passYn.isFalse())
.where(mapSheetAnalInferenceEntity.id.eq(analEntity.getId()))
.groupBy(
mapSheetAnalInferenceEntity.analTitle,
mapSheetAnalInferenceEntity.stage,
mapSheetAnalInferenceEntity.gukyuinApplyDttm,
mapSheetAnalInferenceEntity.detectingCnt)
.fetchOne();
}
@Override
public List<Long> fetchNextMoveIds(
Long lastId,
Long batchSize,
Integer compareYyyy,
Integer targetYyyy,
Integer stage,
String userId) {
public List<Long> fetchNextMoveIds(Long lastId, Long batchSize, String uuid, String userId) {
MapSheetAnalInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalInferenceEntity)
.where(
mapSheetAnalInferenceEntity.compareYyyy.eq(compareYyyy),
mapSheetAnalInferenceEntity.targetYyyy.eq(targetYyyy),
mapSheetAnalInferenceEntity.stage.eq(stage))
.where(mapSheetAnalInferenceEntity.uuid.eq(UUID.fromString(uuid)))
.fetchOne();
if (Objects.isNull(analEntity)) {
@@ -539,7 +537,9 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
labelingAssignmentEntity.workState.eq(LabelState.ASSIGNED.getId()),
labelingAssignmentEntity.analUid.eq(analEntity.getId()),
lastId == null ? null : labelingAssignmentEntity.inferenceGeomUid.gt(lastId))
.orderBy(labelingAssignmentEntity.inferenceGeomUid.asc())
.orderBy(
labelingAssignmentEntity.assignGroupId.asc(),
labelingAssignmentEntity.inferenceGeomUid.asc())
.limit(batchSize)
.fetch();
}
@@ -574,7 +574,7 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
NumberExpression<Long> completeCnt =
new CaseBuilder()
.when(labelingAssignmentEntity.workState.eq(LabelState.COMPLETE.getId()))
.when(labelingAssignmentEntity.workState.eq(LabelState.DONE.getId()))
.then(1L)
.otherwise((Long) null)
.count();
@@ -1050,7 +1050,7 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
.when(
labelingAssignmentEntity
.workState
.eq(LabelState.COMPLETE.getId())
.eq(LabelState.DONE.getId())
.or(labelingAssignmentEntity.workState.eq(LabelState.SKIP.getId())))
.then(1L)
.otherwise(0L)
@@ -1103,7 +1103,9 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
percent))
.from(labelingAssignmentEntity)
.innerJoin(memberEntity)
.on(labelingAssignmentEntity.workerUid.eq(memberEntity.employeeNo))
.on(
labelingAssignmentEntity.workerUid.eq(memberEntity.employeeNo),
memberEntity.status.eq(StatusType.ACTIVE.getId()))
.where(
labelingAssignmentEntity.analUid.eq(analEntity.getId()),
labelingAssignmentEntity.workerUid.ne(userId))
@@ -1112,7 +1114,10 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
completeCnt
.multiply(2)
.goe(totalCnt)) // 진행률 평균 이상인 것들만 조회 => percent 를 바로 쓰면 having절에 무리가 갈 수 있다고 함
.orderBy(completeCnt.desc()) // TODO: 도엽번호? PNU? 로 정렬하여 보여주기?
.orderBy(
completeCnt
.desc()) // TODO: 현재는 잔여건수가 제일 적은(=완료건수가 높은) 순서로 desc, 추후 도엽번호? PNU? 로 정렬하여
// 보여주기?
.fetch();
return new MoveInfo(userChargeCnt, list);