추론 결과 조회 수정

This commit is contained in:
2026-01-22 12:13:19 +09:00
parent c70abbdb6d
commit e2eb653df7
6 changed files with 530 additions and 441 deletions

View File

@@ -29,20 +29,22 @@ public class InferenceDetailDto {
private String dataName; private String dataName;
private Long mapSheepNum; private Long mapSheepNum;
private Long detectingCnt; private Long detectingCnt;
@JsonFormatDttm private ZonedDateTime analStrtDttm; @JsonFormatDttm
@JsonFormatDttm private ZonedDateTime analEndDttm; private ZonedDateTime analStrtDttm;
@JsonFormatDttm
private ZonedDateTime analEndDttm;
private Long analSec; private Long analSec;
private String analState; private String analState;
public Basic( public Basic(
Long id, Long id,
String dataName, String dataName,
Long mapSheepNum, Long mapSheepNum,
Long detectingCnt, Long detectingCnt,
ZonedDateTime analStrtDttm, ZonedDateTime analStrtDttm,
ZonedDateTime analEndDttm, ZonedDateTime analEndDttm,
Long analSec, Long analSec,
String analState) { String analState) {
this.id = id; this.id = id;
this.dataName = dataName; this.dataName = dataName;
this.mapSheepNum = mapSheepNum; this.mapSheepNum = mapSheepNum;
@@ -61,8 +63,10 @@ public class InferenceDetailDto {
private Long id; private Long id;
private String analTitle; private String analTitle;
private Long detectingCnt; private Long detectingCnt;
@JsonFormatDttm private ZonedDateTime analStrtDttm; @JsonFormatDttm
@JsonFormatDttm private ZonedDateTime analEndDttm; private ZonedDateTime analStrtDttm;
@JsonFormatDttm
private ZonedDateTime analEndDttm;
private Long analSec; private Long analSec;
private Long analPredSec; private Long analPredSec;
private String analState; private String analState;
@@ -70,16 +74,16 @@ public class InferenceDetailDto {
private String gukyuinUsed; private String gukyuinUsed;
public AnalResList( public AnalResList(
Long id, Long id,
String analTitle, String analTitle,
Long detectingCnt, Long detectingCnt,
ZonedDateTime analStrtDttm, ZonedDateTime analStrtDttm,
ZonedDateTime analEndDttm, ZonedDateTime analEndDttm,
Long analSec, Long analSec,
Long analPredSec, Long analPredSec,
String analState, String analState,
String analStateNm, String analStateNm,
String gukyuinUsed) { String gukyuinUsed) {
this.id = id; this.id = id;
this.analTitle = analTitle; this.analTitle = analTitle;
this.detectingCnt = detectingCnt; this.detectingCnt = detectingCnt;
@@ -102,8 +106,10 @@ public class InferenceDetailDto {
private String modelInfo; private String modelInfo;
private Integer targetYyyy; private Integer targetYyyy;
private Integer compareYyyy; private Integer compareYyyy;
@JsonFormatDttm private ZonedDateTime analStrtDttm; @JsonFormatDttm
@JsonFormatDttm private ZonedDateTime analEndDttm; private ZonedDateTime analStrtDttm;
@JsonFormatDttm
private ZonedDateTime analEndDttm;
private Long analSec; private Long analSec;
private Long analPredSec; private Long analPredSec;
private String resultUrl; private String resultUrl;
@@ -113,20 +119,20 @@ public class InferenceDetailDto {
private String analStateNm; private String analStateNm;
public AnalResSummary( public AnalResSummary(
Long id, Long id,
String analTitle, String analTitle,
String modelInfo, String modelInfo,
Integer targetYyyy, Integer targetYyyy,
Integer compareYyyy, Integer compareYyyy,
ZonedDateTime analStrtDttm, ZonedDateTime analStrtDttm,
ZonedDateTime analEndDttm, ZonedDateTime analEndDttm,
Long analSec, Long analSec,
Long analPredSec, Long analPredSec,
String resultUrl, String resultUrl,
Long detectingCnt, Long detectingCnt,
Double accuracy, Double accuracy,
String analState, String analState,
String analStateNm) { String analStateNm) {
this.id = id; this.id = id;
this.analTitle = analTitle; this.analTitle = analTitle;
this.modelInfo = modelInfo; this.modelInfo = modelInfo;
@@ -183,16 +189,17 @@ public class InferenceDetailDto {
private Clazzes target; private Clazzes target;
private MapSheet mapSheet; private MapSheet mapSheet;
private Coordinate center; private Coordinate center;
@JsonFormatDttm private ZonedDateTime updatedDttm; @JsonFormatDttm
private ZonedDateTime updatedDttm;
public DetailListEntity( public DetailListEntity(
UUID uuid, UUID uuid,
Double detectionScore, Double detectionScore,
Clazzes compare, Clazzes compare,
Clazzes target, Clazzes target,
MapSheet mapSheet, MapSheet mapSheet,
Coordinate center, Coordinate center,
ZonedDateTime updatedDttm) { ZonedDateTime updatedDttm) {
this.code = new Uid(uuid); this.code = new Uid(uuid);
this.detectionScore = detectionScore; this.detectionScore = detectionScore;
this.compare = compare; this.compare = compare;
@@ -233,7 +240,8 @@ public class InferenceDetailDto {
private String code; private String code;
private String name; private String name;
@JsonIgnore private Double score; @JsonIgnore
private Double score;
public Clazz(String code, Double score) { public Clazz(String code, Double score) {
this.code = code; this.code = code;
@@ -305,6 +313,7 @@ public class InferenceDetailDto {
Long mapSheetNum; Long mapSheetNum;
String mapSheetName; String mapSheetName;
String subUid; String subUid;
String pnu;
// @JsonIgnore String gemoStr; // @JsonIgnore String gemoStr;
// @JsonIgnore String geomCenterStr; // @JsonIgnore String geomCenterStr;
@@ -312,18 +321,19 @@ public class InferenceDetailDto {
// JsonNode geomCenter; // JsonNode geomCenter;
public Geom( public Geom(
UUID uuid, UUID uuid,
String uid, String uid,
Integer compareYyyy, Integer compareYyyy,
Integer targetYyyy, Integer targetYyyy,
Double cdProb, Double cdProb,
String classBeforeCd, String classBeforeCd,
Double classBeforeProb, Double classBeforeProb,
String classAfterCd, String classAfterCd,
Double classAfterProb, Double classAfterProb,
Long mapSheetNum, Long mapSheetNum,
String mapSheetName, String mapSheetName,
String subUid) { String subUid,
String pnu) {
this.uuid = uuid; this.uuid = uuid;
this.uid = uid; this.uid = uid;
this.compareYyyy = compareYyyy; this.compareYyyy = compareYyyy;
@@ -338,20 +348,7 @@ public class InferenceDetailDto {
this.mapSheetNum = mapSheetNum; this.mapSheetNum = mapSheetNum;
this.mapSheetName = mapSheetName; this.mapSheetName = mapSheetName;
this.subUid = subUid; this.subUid = subUid;
// this.gemoStr = gemoStr; this.pnu = pnu;
// this.geomCenterStr = geomCenterStr;
//
// ObjectMapper mapper = new ObjectMapper();
// JsonNode geomJson;
// JsonNode geomCenterJson;
// try {
// geomJson = mapper.readTree(gemoStr);
// geomCenterJson = mapper.readTree(geomCenterStr);
// } catch (JsonProcessingException e) {
// throw new RuntimeException(e);
// }
// this.gemo = geomJson;
// this.geomCenter = geomCenterJson;
} }
} }
@@ -399,7 +396,7 @@ public class InferenceDetailDto {
String[] sortParams = sort.split(","); String[] sortParams = sort.split(",");
String property = sortParams[0]; String property = sortParams[0];
Sort.Direction direction = Sort.Direction direction =
sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC; sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC;
return PageRequest.of(page, size, Sort.by(direction, property)); return PageRequest.of(page, size, Sort.by(direction, property));
} }
return PageRequest.of(page, size); return PageRequest.of(page, size);
@@ -439,25 +436,27 @@ public class InferenceDetailDto {
private Integer targetYyyy; private Integer targetYyyy;
private String detectOption; private String detectOption;
private String mapSheetScope; private String mapSheetScope;
@JsonFormatDttm private ZonedDateTime inferStartDttm; @JsonFormatDttm
@JsonFormatDttm private ZonedDateTime inferEndDttm; private ZonedDateTime inferStartDttm;
@JsonFormatDttm
private ZonedDateTime inferEndDttm;
private Integer stage; private Integer stage;
private String elapsedDuration; private String elapsedDuration;
private String subUid; private String subUid;
public AnalResultInfo( public AnalResultInfo(
String analTitle, String analTitle,
String modelVer1, String modelVer1,
String modelVer2, String modelVer2,
String modelVer3, String modelVer3,
Integer compareYyyy, Integer compareYyyy,
Integer targetYyyy, Integer targetYyyy,
String detectOption, String detectOption,
String mapSheetScope, String mapSheetScope,
ZonedDateTime inferStartDttm, ZonedDateTime inferStartDttm,
ZonedDateTime inferEndDttm, ZonedDateTime inferEndDttm,
Integer stage, Integer stage,
String subUid) { String subUid) {
this.analTitle = analTitle; this.analTitle = analTitle;
this.modelVer1 = modelVer1; this.modelVer1 = modelVer1;
this.modelVer2 = modelVer2; this.modelVer2 = modelVer2;
@@ -471,9 +470,9 @@ public class InferenceDetailDto {
this.stage = stage; this.stage = stage;
this.subUid = subUid; this.subUid = subUid;
Duration elapsed = Duration elapsed =
(inferStartDttm != null && inferEndDttm != null) (inferStartDttm != null && inferEndDttm != null)
? Duration.between(inferStartDttm, inferEndDttm) ? Duration.between(inferStartDttm, inferEndDttm)
: null; : null;
if (elapsed != null) { if (elapsed != null) {
long seconds = elapsed.getSeconds(); long seconds = elapsed.getSeconds();

View File

@@ -23,7 +23,9 @@ import org.springframework.data.domain.Pageable;
public class InferenceResultDto { public class InferenceResultDto {
/** 분석대상 도엽 enum */ /**
* 분석대상 도엽 enum
*/
@Getter @Getter
@AllArgsConstructor @AllArgsConstructor
public enum MapSheetScope implements EnumType { public enum MapSheetScope implements EnumType {
@@ -52,7 +54,9 @@ public class InferenceResultDto {
} }
} }
/** 탐지 데이터 옵션 dto */ /**
* 탐지 데이터 옵션 dto
*/
@Getter @Getter
@AllArgsConstructor @AllArgsConstructor
public enum DetectOption implements EnumType { public enum DetectOption implements EnumType {
@@ -129,7 +133,9 @@ public class InferenceResultDto {
} }
} }
/** 목록조회 dto */ /**
* 목록조회 dto
*/
@Schema(name = "ResultList", description = "추론관리 목록") @Schema(name = "ResultList", description = "추론관리 목록")
@Getter @Getter
@Setter @Setter
@@ -178,6 +184,12 @@ public class InferenceResultDto {
@Schema(description = "uid") @Schema(description = "uid")
private String uid; private String uid;
@Schema(description = "uid 앞 8자리")
@JsonProperty("subUid")
public String subUid() {
return this.uid.substring(0, 8).toUpperCase();
}
@Schema(description = "상태명") @Schema(description = "상태명")
@JsonProperty("statusName") @JsonProperty("statusName")
public String statusName() { public String statusName() {
@@ -207,7 +219,9 @@ public class InferenceResultDto {
} }
} }
/** 목록조회 검색 조건 dto */ /**
* 목록조회 검색 조건 dto
*/
@Getter @Getter
@Setter @Setter
@NoArgsConstructor @NoArgsConstructor
@@ -229,7 +243,9 @@ public class InferenceResultDto {
} }
} }
/** 변화탐지 실행 정보 저장 요청 정보 */ /**
* 변화탐지 실행 정보 저장 요청 정보
*/
@Getter @Getter
@Setter @Setter
@NoArgsConstructor @NoArgsConstructor
@@ -268,8 +284,8 @@ public class InferenceResultDto {
@Schema(description = "탐지 데이터 옵션 - 추론제외(EXCL), 이전 년도 도엽 사용(PREV)", example = "EXCL") @Schema(description = "탐지 데이터 옵션 - 추론제외(EXCL), 이전 년도 도엽 사용(PREV)", example = "EXCL")
@NotBlank @NotBlank
@EnumValid( @EnumValid(
enumClass = DetectOption.class, enumClass = DetectOption.class,
message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.") message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.")
private String detectOption; private String detectOption;
@Schema(description = "5k 도협 번호 목록", example = "[33605,33606, 33610, 34802, 35603, 35611]") @Schema(description = "5k 도협 번호 목록", example = "[33605,33606, 33610, 34802, 35603, 35611]")
@@ -381,33 +397,33 @@ public class InferenceResultDto {
private String modelVer3; private String modelVer3;
public InferenceStatusDetailDto( public InferenceStatusDetailDto(
Long detectingCnt, Long detectingCnt,
Integer m1PendingJobs, Integer m1PendingJobs,
Integer m2PendingJobs, Integer m2PendingJobs,
Integer m3PendingJobs, Integer m3PendingJobs,
Integer m1CompletedJobs, Integer m1CompletedJobs,
Integer m2CompletedJobs, Integer m2CompletedJobs,
Integer m3CompletedJobs, Integer m3CompletedJobs,
Integer m1FailedJobs, Integer m1FailedJobs,
Integer m2FailedJobs, Integer m2FailedJobs,
Integer m3FailedJobs, Integer m3FailedJobs,
ZonedDateTime m1ModelStartDttm, ZonedDateTime m1ModelStartDttm,
ZonedDateTime m2ModelStartDttm, ZonedDateTime m2ModelStartDttm,
ZonedDateTime m3ModelStartDttm, ZonedDateTime m3ModelStartDttm,
ZonedDateTime m1ModelEndDttm, ZonedDateTime m1ModelEndDttm,
ZonedDateTime m2ModelEndDttm, ZonedDateTime m2ModelEndDttm,
ZonedDateTime m3ModelEndDttm, ZonedDateTime m3ModelEndDttm,
String title, String title,
Integer compareYyyy, Integer compareYyyy,
Integer targetYyyy, Integer targetYyyy,
Integer stage, Integer stage,
ZonedDateTime inferStartDttm, ZonedDateTime inferStartDttm,
ZonedDateTime inferEndDttm, ZonedDateTime inferEndDttm,
String detectOption, String detectOption,
String mapSheetScope, String mapSheetScope,
String modelVer1, String modelVer1,
String modelVer2, String modelVer2,
String modelVer3) { String modelVer3) {
this.detectingCnt = detectingCnt; this.detectingCnt = detectingCnt;
this.m1PendingJobs = m1PendingJobs; this.m1PendingJobs = m1PendingJobs;
this.m2PendingJobs = m2PendingJobs; this.m2PendingJobs = m2PendingJobs;
@@ -443,12 +459,12 @@ public class InferenceResultDto {
long tiles = this.detectingCnt; // 도엽수 long tiles = this.detectingCnt; // 도엽수
int models = 3; // 모델 개수 int models = 3; // 모델 개수
int completed = int completed =
this.m1CompletedJobs this.m1CompletedJobs
+ this.m2CompletedJobs + this.m2CompletedJobs
+ this.m3CompletedJobs + this.m3CompletedJobs
+ this.m1FailedJobs + this.m1FailedJobs
+ this.m2FailedJobs + this.m2FailedJobs
+ this.m3FailedJobs; // 완료수 + this.m3FailedJobs; // 완료수
long total = tiles * models; // 전체 작업량 long total = tiles * models; // 전체 작업량
if (completed >= total) { if (completed >= total) {
@@ -513,9 +529,12 @@ public class InferenceResultDto {
public static class InferenceServerStatusDto { public static class InferenceServerStatusDto {
private String serverName; private String serverName;
@JsonIgnore private float cpu_user; @JsonIgnore
@JsonIgnore private float cpu_system; private float cpu_user;
@JsonIgnore private float memused; @JsonIgnore
private float cpu_system;
@JsonIgnore
private float memused;
private Long kbmemused; private Long kbmemused;
private float gpuUtil; private float gpuUtil;

View File

@@ -25,9 +25,9 @@ public class MapSheetLearnEntity {
@Id @Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_learn_id_gen") @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_learn_id_gen")
@SequenceGenerator( @SequenceGenerator(
name = "tb_map_sheet_learn_id_gen", name = "tb_map_sheet_learn_id_gen",
sequenceName = "tb_map_sheet_learn_uid", sequenceName = "tb_map_sheet_learn_uid",
allocationSize = 1) allocationSize = 1)
@Column(name = "id", nullable = false) @Column(name = "id", nullable = false)
private Long id; private Long id;
@@ -191,18 +191,18 @@ public class MapSheetLearnEntity {
public InferenceResultDto.ResultList toDto() { public InferenceResultDto.ResultList toDto() {
return new InferenceResultDto.ResultList( return new InferenceResultDto.ResultList(
this.uuid, this.uuid,
this.title, this.title,
this.stage, this.stage,
this.status, this.status,
this.mapSheetCnt, this.mapSheetCnt,
this.detectingCnt, this.detectingCnt,
this.inferStartDttm, this.inferStartDttm,
this.inferEndDttm, this.inferEndDttm,
this.applyYn, this.applyYn,
this.applyDttm, this.applyDttm,
this.compareYyyy, this.compareYyyy,
this.targetYyyy, this.targetYyyy,
this.uid); this.uid);
} }
} }

View File

@@ -0,0 +1,51 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import java.time.OffsetDateTime;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@Getter
@Setter
@Entity
@Table(name = "tb_pnu")
public class PnuEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_pnu_id_gen")
@SequenceGenerator(name = "tb_pnu_id_gen", sequenceName = "tb_pnu_uid", allocationSize = 1)
@Column(name = "id", nullable = false)
private Long id;
@NotNull
@ManyToOne(fetch = FetchType.LAZY, optional = false)
@JoinColumn(name = "geo_uid", nullable = false)
private MapSheetAnalDataInferenceGeomEntity geo;
@Size(max = 19)
@Column(name = "pnu", length = 19)
private String pnu;
@Column(name = "created_dttm")
private OffsetDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
@ColumnDefault("false")
@Column(name = "del_yn")
private Boolean delYn;
}

View File

@@ -1,8 +1,10 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference; package com.kamco.cd.kamcoback.postgres.repository.Inference;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx50kEntity.mapInkx50kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity.mapInkx5kEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity.mapInkx5kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearn5kEntity.mapSheetLearn5kEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearn5kEntity.mapSheetLearn5kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity;
import static com.querydsl.jpa.JPAExpressions.select;
import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto; import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto;
import com.querydsl.core.BooleanBuilder; import com.querydsl.core.BooleanBuilder;
@@ -10,7 +12,6 @@ import com.querydsl.core.types.dsl.BooleanPath;
import com.querydsl.core.types.dsl.Expressions; import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberPath; import com.querydsl.core.types.dsl.NumberPath;
import com.querydsl.core.types.dsl.StringPath; import com.querydsl.core.types.dsl.StringPath;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.impl.JPAQueryFactory; import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
@@ -55,22 +56,22 @@ public class MapSheetLearn5kRepositoryImpl implements MapSheetLearn5kRepositoryC
} }
var learnIdSubQuery = var learnIdSubQuery =
JPAExpressions.select(mapSheetLearnEntity.id) select(mapSheetLearnEntity.id)
.from(mapSheetLearnEntity) .from(mapSheetLearnEntity)
.where(mapSheetLearnEntity.uuid.eq(uuid)); .where(mapSheetLearnEntity.uuid.eq(uuid));
queryFactory queryFactory
.update(mapSheetLearn5kEntity) .update(mapSheetLearn5kEntity)
.set(failPath, Boolean.TRUE) .set(failPath, Boolean.TRUE)
.set(jobIdPath, jobDto.getId()) .set(jobIdPath, jobDto.getId())
.set(errorMsgPath, jobDto.getErrorMessage()) .set(errorMsgPath, jobDto.getErrorMessage())
.where( .where(
mapSheetLearn5kEntity mapSheetLearn5kEntity
.learn .learn
.id .id
.eq(learnIdSubQuery) .eq(learnIdSubQuery)
.and(mapSheetLearn5kEntity.mapSheetNum.eq(Long.valueOf(jobDto.getSceneId())))) .and(mapSheetLearn5kEntity.mapSheetNum.eq(Long.valueOf(jobDto.getSceneId()))))
.execute(); .execute();
} }
@Override @Override
@@ -102,21 +103,21 @@ public class MapSheetLearn5kRepositoryImpl implements MapSheetLearn5kRepositoryC
} }
var learnIdSubQuery = var learnIdSubQuery =
JPAExpressions.select(mapSheetLearnEntity.id) select(mapSheetLearnEntity.id)
.from(mapSheetLearnEntity) .from(mapSheetLearnEntity)
.where(mapSheetLearnEntity.uuid.eq(uuid)); .where(mapSheetLearnEntity.uuid.eq(uuid));
queryFactory queryFactory
.update(mapSheetLearn5kEntity) .update(mapSheetLearn5kEntity)
.set(failPath, Boolean.FALSE) .set(failPath, Boolean.FALSE)
.set(jobIdPath, jobDto.getId()) .set(jobIdPath, jobDto.getId())
.where( .where(
mapSheetLearn5kEntity mapSheetLearn5kEntity
.learn .learn
.id .id
.eq(learnIdSubQuery) .eq(learnIdSubQuery)
.and(mapSheetLearn5kEntity.mapSheetNum.eq(Long.valueOf(jobDto.getSceneId())))) .and(mapSheetLearn5kEntity.mapSheetNum.eq(Long.valueOf(jobDto.getSceneId()))))
.execute(); .execute();
} }
@Override @Override
@@ -152,16 +153,16 @@ public class MapSheetLearn5kRepositoryImpl implements MapSheetLearn5kRepositoryC
} }
return queryFactory return queryFactory
.select(jobIdPath) .select(jobIdPath)
.from(mapSheetLearn5kEntity) .from(mapSheetLearn5kEntity)
.join(mapSheetLearn5kEntity.learn, mapSheetLearnEntity) .join(mapSheetLearn5kEntity.learn, mapSheetLearnEntity)
.where( .where(
mapSheetLearnEntity mapSheetLearnEntity
.uuid .uuid
.eq(uuid) .eq(uuid)
.and(failPath.isFalse().or(failPath.isNull())) .and(failPath.isFalse().or(failPath.isNull()))
.and(jobIdPath.in(failIds))) .and(jobIdPath.in(failIds)))
.fetch(); .fetch();
} }
@Override @Override
@@ -194,31 +195,37 @@ public class MapSheetLearn5kRepositoryImpl implements MapSheetLearn5kRepositoryC
} }
return queryFactory return queryFactory
.select(jobIdPath) .select(jobIdPath)
.from(mapSheetLearn5kEntity) .from(mapSheetLearn5kEntity)
.join(mapSheetLearn5kEntity.learn, mapSheetLearnEntity) .join(mapSheetLearn5kEntity.learn, mapSheetLearnEntity)
.where(mapSheetLearnEntity.uuid.eq(uuid).and(jobIdPath.in(completedIds))) .where(mapSheetLearnEntity.uuid.eq(uuid).and(jobIdPath.in(completedIds)))
.fetch(); .fetch();
} }
/**
* 추론 진행 도엽 목록
*
* @param uuid uuid
* @return
*/
@Override @Override
public List<String> getInferenceRunMapId(UUID uuid) { public List<String> getInferenceRunMapId(UUID uuid) {
return queryFactory return queryFactory
.select(mapInkx5kEntity.mapidNm) .select(mapInkx50kEntity.mapidNm.concat(mapInkx50kEntity.mapidcdNo))
.from(mapSheetLearnEntity) .from(mapInkx50kEntity)
.innerJoin(mapSheetLearn5kEntity) .join(mapInkx5kEntity).on(mapInkx5kEntity.mapInkx50k.fid.eq(mapInkx50kEntity.fid))
.on(mapSheetLearn5kEntity.learn.id.eq(mapSheetLearnEntity.id)) .where(
.innerJoin(mapInkx5kEntity) mapInkx5kEntity.mapidcdNo.in(
.on( select(
Expressions.booleanTemplate( Expressions.stringTemplate("CAST({0} AS text)", mapSheetLearn5kEntity.mapSheetNum)
"function('regexp_match', {0}, '^[0-9]+$') is not null", )
mapInkx5kEntity.mapidcdNo) .from(mapSheetLearnEntity)
.and( .join(mapSheetLearn5kEntity).on(mapSheetLearn5kEntity.learn.id.eq(mapSheetLearnEntity.id))
mapSheetLearn5kEntity.mapSheetNum.eq( .where(mapSheetLearnEntity.uuid.eq(uuid))
Expressions.numberTemplate( )
Long.class, "cast({0} as long)", mapInkx5kEntity.mapidcdNo)))) )
.where(mapSheetLearnEntity.uuid.eq(uuid)) .groupBy(mapInkx50kEntity.mapidcdNo, mapInkx50kEntity.mapidNm)
.groupBy(mapInkx5kEntity.mapidNm) .orderBy(mapInkx50kEntity.mapidNm.asc())
.fetch(); .fetch();
} }
} }

View File

@@ -7,6 +7,7 @@ import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceG
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QPnuEntity.pnuEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QSystemMetricEntity.systemMetricEntity; import static com.kamco.cd.kamcoback.postgres.entity.QSystemMetricEntity.systemMetricEntity;
import com.kamco.cd.kamcoback.common.exception.CustomApiException; import com.kamco.cd.kamcoback.common.exception.CustomApiException;
@@ -28,6 +29,8 @@ import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.CaseBuilder; import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions; import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberExpression; import com.querydsl.core.types.dsl.NumberExpression;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.impl.JPAQueryFactory; import com.querydsl.jpa.impl.JPAQueryFactory;
import java.time.OffsetDateTime; import java.time.OffsetDateTime;
import java.util.List; import java.util.List;
@@ -55,7 +58,7 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
BooleanBuilder builder = new BooleanBuilder(); BooleanBuilder builder = new BooleanBuilder();
NumberExpression<Integer> statusOrder = NumberExpression<Integer> statusOrder =
new CaseBuilder().when(mapSheetLearnEntity.status.eq("Y")).then(0).otherwise(1); new CaseBuilder().when(mapSheetLearnEntity.status.eq("Y")).then(0).otherwise(1);
// 국유인 반영 여부 // 국유인 반영 여부
if (StringUtils.isNotBlank(req.getApplyYn())) { if (StringUtils.isNotBlank(req.getApplyYn())) {
@@ -69,10 +72,10 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
// 국유인 반영일 // 국유인 반영일
if (req.getStrtDttm() != null && req.getEndDttm() != null) { if (req.getStrtDttm() != null && req.getEndDttm() != null) {
builder.and( builder.and(
mapSheetLearnEntity mapSheetLearnEntity
.applyDttm .applyDttm
.goe(DateRange.start(req.getStrtDttm())) .goe(DateRange.start(req.getStrtDttm()))
.and(mapSheetLearnEntity.applyDttm.lt(DateRange.end(req.getEndDttm())))); .and(mapSheetLearnEntity.applyDttm.lt(DateRange.end(req.getEndDttm()))));
} }
// 제목 // 제목
@@ -81,21 +84,21 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
} }
List<MapSheetLearnEntity> content = List<MapSheetLearnEntity> content =
queryFactory queryFactory
.select(mapSheetLearnEntity) .select(mapSheetLearnEntity)
.from(mapSheetLearnEntity) .from(mapSheetLearnEntity)
.where(builder) .where(builder)
.offset(pageable.getOffset()) .offset(pageable.getOffset())
.limit(pageable.getPageSize()) .limit(pageable.getPageSize())
.orderBy(mapSheetLearnEntity.id.desc()) .orderBy(mapSheetLearnEntity.id.desc())
.fetch(); .fetch();
Long total = Long total =
queryFactory queryFactory
.select(mapSheetLearnEntity.count()) .select(mapSheetLearnEntity.count())
.from(mapSheetLearnEntity) .from(mapSheetLearnEntity)
.where(builder) .where(builder)
.fetchOne(); .fetchOne();
return new PageImpl<>(content, pageable, total == null ? 0L : total); return new PageImpl<>(content, pageable, total == null ? 0L : total);
} }
@@ -103,10 +106,10 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
@Override @Override
public Optional<MapSheetLearnEntity> getInferenceResultByUuid(UUID uuid) { public Optional<MapSheetLearnEntity> getInferenceResultByUuid(UUID uuid) {
return Optional.ofNullable( return Optional.ofNullable(
queryFactory queryFactory
.selectFrom(mapSheetLearnEntity) .selectFrom(mapSheetLearnEntity)
.where(mapSheetLearnEntity.uuid.eq(uuid)) .where(mapSheetLearnEntity.uuid.eq(uuid))
.fetchOne()); .fetchOne());
} }
@Override @Override
@@ -115,44 +118,44 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
BooleanBuilder builder = new BooleanBuilder(); BooleanBuilder builder = new BooleanBuilder();
List<OffsetDateTime> latestIds = List<OffsetDateTime> latestIds =
queryFactory queryFactory
.select(systemMetricEntity.timestamp.max()) .select(systemMetricEntity.timestamp.max())
.from(systemMetricEntity) .from(systemMetricEntity)
.groupBy(systemMetricEntity.serverName) .groupBy(systemMetricEntity.serverName)
.fetch(); .fetch();
List<OffsetDateTime> latestGpuIds = List<OffsetDateTime> latestGpuIds =
queryFactory queryFactory
.select(gpuMetricEntity.timestamp.max()) .select(gpuMetricEntity.timestamp.max())
.from(gpuMetricEntity) .from(gpuMetricEntity)
.groupBy(gpuMetricEntity.serverName) .groupBy(gpuMetricEntity.serverName)
.fetch(); .fetch();
List<InferenceServerStatusDto> foundContent = List<InferenceServerStatusDto> foundContent =
queryFactory queryFactory
.select( .select(
Projections.constructor( Projections.constructor(
InferenceServerStatusDto.class, InferenceServerStatusDto.class,
systemMetricEntity.serverName, systemMetricEntity.serverName,
systemMetricEntity.cpuUser, systemMetricEntity.cpuUser,
systemMetricEntity.cpuSystem, systemMetricEntity.cpuSystem,
systemMetricEntity.memused, systemMetricEntity.memused,
systemMetricEntity.kbmemused, systemMetricEntity.kbmemused,
gpuMetricEntity.gpuUtil gpuMetricEntity.gpuUtil
// gpuMetricEntity.gpuMemUsed, // gpuMetricEntity.gpuMemUsed,
// gpuMetricEntity.gpuMemTotal // gpuMetricEntity.gpuMemTotal
)) ))
.from(systemMetricEntity) .from(systemMetricEntity)
.leftJoin(gpuMetricEntity) .leftJoin(gpuMetricEntity)
.on( .on(
gpuMetricEntity gpuMetricEntity
.timestamp .timestamp
.in(latestGpuIds) .in(latestGpuIds)
.and(gpuMetricEntity.serverName.eq(systemMetricEntity.serverName))) .and(gpuMetricEntity.serverName.eq(systemMetricEntity.serverName)))
.where(systemMetricEntity.timestamp.in(latestIds)) // In 절 사용 .where(systemMetricEntity.timestamp.in(latestIds)) // In 절 사용
.orderBy(systemMetricEntity.serverName.asc()) .orderBy(systemMetricEntity.serverName.asc())
.limit(4) .limit(4)
.fetch(); .fetch();
return foundContent; return foundContent;
} }
@@ -160,11 +163,11 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
@Override @Override
public Optional<MapSheetLearnEntity> getInferenceResultByStatus(String status) { public Optional<MapSheetLearnEntity> getInferenceResultByStatus(String status) {
return Optional.ofNullable( return Optional.ofNullable(
queryFactory queryFactory
.selectFrom(mapSheetLearnEntity) .selectFrom(mapSheetLearnEntity)
.where(mapSheetLearnEntity.status.eq(status)) .where(mapSheetLearnEntity.status.eq(status))
.limit(1) .limit(1)
.fetchOne()); .fetchOne());
} }
/** /**
@@ -183,46 +186,46 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
QModelMngEntity m3Model = new QModelMngEntity("m3Model"); QModelMngEntity m3Model = new QModelMngEntity("m3Model");
InferenceStatusDetailDto foundContent = InferenceStatusDetailDto foundContent =
queryFactory queryFactory
.select( .select(
Projections.constructor( Projections.constructor(
InferenceStatusDetailDto.class, InferenceStatusDetailDto.class,
mapSheetLearnEntity.detectingCnt, mapSheetLearnEntity.detectingCnt,
mapSheetLearnEntity.m1PendingJobs, mapSheetLearnEntity.m1PendingJobs,
mapSheetLearnEntity.m2PendingJobs, mapSheetLearnEntity.m2PendingJobs,
mapSheetLearnEntity.m3PendingJobs, mapSheetLearnEntity.m3PendingJobs,
mapSheetLearnEntity.m1CompletedJobs, mapSheetLearnEntity.m1CompletedJobs,
mapSheetLearnEntity.m2CompletedJobs, mapSheetLearnEntity.m2CompletedJobs,
mapSheetLearnEntity.m3CompletedJobs, mapSheetLearnEntity.m3CompletedJobs,
mapSheetLearnEntity.m1FailedJobs, mapSheetLearnEntity.m1FailedJobs,
mapSheetLearnEntity.m2FailedJobs, mapSheetLearnEntity.m2FailedJobs,
mapSheetLearnEntity.m3FailedJobs, mapSheetLearnEntity.m3FailedJobs,
mapSheetLearnEntity.m1ModelStartDttm, mapSheetLearnEntity.m1ModelStartDttm,
mapSheetLearnEntity.m2ModelStartDttm, mapSheetLearnEntity.m2ModelStartDttm,
mapSheetLearnEntity.m3ModelStartDttm, mapSheetLearnEntity.m3ModelStartDttm,
mapSheetLearnEntity.m1ModelEndDttm, mapSheetLearnEntity.m1ModelEndDttm,
mapSheetLearnEntity.m2ModelEndDttm, mapSheetLearnEntity.m2ModelEndDttm,
mapSheetLearnEntity.m3ModelEndDttm, mapSheetLearnEntity.m3ModelEndDttm,
mapSheetLearnEntity.title, mapSheetLearnEntity.title,
mapSheetLearnEntity.compareYyyy, mapSheetLearnEntity.compareYyyy,
mapSheetLearnEntity.targetYyyy, mapSheetLearnEntity.targetYyyy,
mapSheetLearnEntity.stage, mapSheetLearnEntity.stage,
mapSheetLearnEntity.inferStartDttm, mapSheetLearnEntity.inferStartDttm,
mapSheetLearnEntity.inferEndDttm, mapSheetLearnEntity.inferEndDttm,
mapSheetLearnEntity.detectOption, mapSheetLearnEntity.detectOption,
mapSheetLearnEntity.mapSheetScope, mapSheetLearnEntity.mapSheetScope,
m1Model.modelVer.as("model1Ver"), m1Model.modelVer.as("model1Ver"),
m2Model.modelVer.as("model2Ver"), m2Model.modelVer.as("model2Ver"),
m3Model.modelVer.as("model3Ver"))) m3Model.modelVer.as("model3Ver")))
.from(mapSheetLearnEntity) .from(mapSheetLearnEntity)
.leftJoin(m1Model) .leftJoin(m1Model)
.on(m1Model.uuid.eq(mapSheetLearnEntity.m1ModelUuid)) .on(m1Model.uuid.eq(mapSheetLearnEntity.m1ModelUuid))
.leftJoin(m2Model) .leftJoin(m2Model)
.on(m2Model.uuid.eq(mapSheetLearnEntity.m2ModelUuid)) .on(m2Model.uuid.eq(mapSheetLearnEntity.m2ModelUuid))
.leftJoin(m3Model) .leftJoin(m3Model)
.on(m3Model.uuid.eq(mapSheetLearnEntity.m3ModelUuid)) .on(m3Model.uuid.eq(mapSheetLearnEntity.m3ModelUuid))
.where(mapSheetLearnEntity.uuid.eq(uuid)) .where(mapSheetLearnEntity.uuid.eq(uuid))
.fetchOne(); .fetchOne();
return foundContent; return foundContent;
} }
@@ -233,30 +236,30 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
QModelMngEntity model = new QModelMngEntity("model"); QModelMngEntity model = new QModelMngEntity("model");
InferenceProgressDto dto = InferenceProgressDto dto =
queryFactory queryFactory
.select( .select(
Projections.constructor( Projections.constructor(
InferenceProgressDto.class, InferenceProgressDto.class,
Projections.constructor( Projections.constructor(
InferenceProgressDto.pred_requests_areas.class, InferenceProgressDto.pred_requests_areas.class,
mapSheetLearnEntity.compareYyyy, mapSheetLearnEntity.compareYyyy,
mapSheetLearnEntity.targetYyyy, mapSheetLearnEntity.targetYyyy,
mapSheetLearnEntity.modelComparePath, mapSheetLearnEntity.modelComparePath,
mapSheetLearnEntity.modelTargetPath), mapSheetLearnEntity.modelTargetPath),
model.modelVer.as("modelVer"), model.modelVer.as("modelVer"),
model.cdModelPath.as("cdModelPath"), model.cdModelPath.as("cdModelPath"),
model.cdModelFileName.as("cdModelFileName"), model.cdModelFileName.as("cdModelFileName"),
model.cdModelConfigPath.as("cdModelConfigPath"), model.cdModelConfigPath.as("cdModelConfigPath"),
model.cdModelConfigFileName.as("cdModelConfigFileName"), model.cdModelConfigFileName.as("cdModelConfigFileName"),
model.clsModelPath, model.clsModelPath,
model.clsModelFileName, model.clsModelFileName,
model.clsModelVersion, model.clsModelVersion,
model.priority)) model.priority))
.from(mapSheetLearnEntity) .from(mapSheetLearnEntity)
.join(model) .join(model)
.on(model.uuid.eq(modelUuid)) .on(model.uuid.eq(modelUuid))
.where(mapSheetLearnEntity.id.eq(id)) .where(mapSheetLearnEntity.id.eq(id))
.fetchOne(); .fetchOne();
return dto; return dto;
} }
@@ -267,26 +270,26 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
*/ */
public MapSheetLearnEntity getProcessing() { public MapSheetLearnEntity getProcessing() {
return queryFactory return queryFactory
.select(mapSheetLearnEntity) .select(mapSheetLearnEntity)
.from(mapSheetLearnEntity) .from(mapSheetLearnEntity)
.where(mapSheetLearnEntity.status.eq("IN_PROGRESS")) .where(mapSheetLearnEntity.status.eq("IN_PROGRESS"))
.fetchOne(); .fetchOne();
} }
@Override @Override
public Integer getLearnStage(Integer compareYear, Integer targetYear) { public Integer getLearnStage(Integer compareYear, Integer targetYear) {
Integer stage = Integer stage =
queryFactory queryFactory
.select(mapSheetLearnEntity.stage) .select(mapSheetLearnEntity.stage)
.from(mapSheetLearnEntity) .from(mapSheetLearnEntity)
.where( .where(
mapSheetLearnEntity mapSheetLearnEntity
.compareYyyy .compareYyyy
.eq(compareYear) .eq(compareYear)
.and(mapSheetLearnEntity.targetYyyy.eq(targetYear))) .and(mapSheetLearnEntity.targetYyyy.eq(targetYear)))
.orderBy(mapSheetLearnEntity.id.desc()) .orderBy(mapSheetLearnEntity.id.desc())
.limit(1) .limit(1)
.fetchOne(); .fetchOne();
return stage == null ? 1 : stage + 1; return stage == null ? 1 : stage + 1;
} }
@@ -298,30 +301,30 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
QModelMngEntity m3 = new QModelMngEntity("m3"); QModelMngEntity m3 = new QModelMngEntity("m3");
return queryFactory return queryFactory
.select( .select(
Projections.constructor( Projections.constructor(
AnalResultInfo.class, AnalResultInfo.class,
mapSheetLearnEntity.title, mapSheetLearnEntity.title,
m1.modelVer, m1.modelVer,
m2.modelVer, m2.modelVer,
m3.modelVer, m3.modelVer,
mapSheetLearnEntity.compareYyyy, mapSheetLearnEntity.compareYyyy,
mapSheetLearnEntity.targetYyyy, mapSheetLearnEntity.targetYyyy,
mapSheetLearnEntity.detectOption, mapSheetLearnEntity.detectOption,
mapSheetLearnEntity.mapSheetScope, mapSheetLearnEntity.mapSheetScope,
mapSheetLearnEntity.inferStartDttm, mapSheetLearnEntity.inferStartDttm,
mapSheetLearnEntity.inferEndDttm, mapSheetLearnEntity.inferEndDttm,
mapSheetLearnEntity.stage, mapSheetLearnEntity.stage,
Expressions.stringTemplate("substring({0} from 1 for 8)", mapSheetLearnEntity.uid))) Expressions.stringTemplate("substring({0} from 1 for 8)", mapSheetLearnEntity.uid)))
.from(mapSheetLearnEntity) .from(mapSheetLearnEntity)
.leftJoin(m1) .leftJoin(m1)
.on(mapSheetLearnEntity.m1ModelUuid.eq(m1.uuid)) .on(mapSheetLearnEntity.m1ModelUuid.eq(m1.uuid))
.leftJoin(m2) .leftJoin(m2)
.on(mapSheetLearnEntity.m2ModelUuid.eq(m2.uuid)) .on(mapSheetLearnEntity.m2ModelUuid.eq(m2.uuid))
.leftJoin(m3) .leftJoin(m3)
.on(mapSheetLearnEntity.m3ModelUuid.eq(m3.uuid)) .on(mapSheetLearnEntity.m3ModelUuid.eq(m3.uuid))
.where(mapSheetLearnEntity.uuid.eq(uuid)) .where(mapSheetLearnEntity.uuid.eq(uuid))
.fetchOne(); .fetchOne();
} }
@Override @Override
@@ -329,28 +332,28 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
// analUid로 분석 정보 조회 // analUid로 분석 정보 조회
MapSheetLearnEntity learnEntity = MapSheetLearnEntity learnEntity =
queryFactory queryFactory
.selectFrom(mapSheetLearnEntity) .selectFrom(mapSheetLearnEntity)
.where(mapSheetLearnEntity.uuid.eq(uuid)) .where(mapSheetLearnEntity.uuid.eq(uuid))
.fetchOne(); .fetchOne();
if (Objects.isNull(learnEntity)) { if (Objects.isNull(learnEntity)) {
throw new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND); throw new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND);
} }
return queryFactory return queryFactory
.select( .select(
Projections.constructor( Projections.constructor(
Dashboard.class, Dashboard.class,
mapSheetAnalSttcEntity.id.classAfterCd.toUpperCase(), mapSheetAnalSttcEntity.id.classAfterCd.toUpperCase(),
mapSheetAnalSttcEntity.classAfterCnt.sum())) mapSheetAnalSttcEntity.classAfterCnt.sum()))
.from(mapSheetAnalInferenceEntity) .from(mapSheetAnalInferenceEntity)
.innerJoin(mapSheetAnalSttcEntity) .innerJoin(mapSheetAnalSttcEntity)
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalSttcEntity.id.analUid)) .on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalSttcEntity.id.analUid))
.where(mapSheetAnalInferenceEntity.learnId.eq(learnEntity.getId())) .where(mapSheetAnalInferenceEntity.learnId.eq(learnEntity.getId()))
.groupBy(mapSheetAnalSttcEntity.id.classAfterCd) .groupBy(mapSheetAnalSttcEntity.id.classAfterCd)
.orderBy(mapSheetAnalSttcEntity.id.classAfterCd.asc()) .orderBy(mapSheetAnalSttcEntity.id.classAfterCd.asc())
.fetch(); .fetch();
} }
/** /**
@@ -366,10 +369,10 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
// 1) 분석 엔티티 조회 // 1) 분석 엔티티 조회
MapSheetLearnEntity analEntity = MapSheetLearnEntity analEntity =
queryFactory queryFactory
.selectFrom(mapSheetLearnEntity) .selectFrom(mapSheetLearnEntity)
.where(mapSheetLearnEntity.uuid.eq(uuid)) .where(mapSheetLearnEntity.uuid.eq(uuid))
.fetchOne(); .fetchOne();
if (analEntity == null) { if (analEntity == null) {
throw new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND); throw new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND);
@@ -380,22 +383,22 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().isBlank()) { if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().isBlank()) {
where.and( where.and(
mapSheetAnalDataInferenceGeomEntity.classAfterCd.eq( mapSheetAnalDataInferenceGeomEntity.classAfterCd.eq(
searchGeoReq.getTargetClass().toLowerCase())); searchGeoReq.getTargetClass().toLowerCase()));
} }
if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().isBlank()) { if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().isBlank()) {
where.and( where.and(
mapSheetAnalDataInferenceGeomEntity.classBeforeCd.eq( mapSheetAnalDataInferenceGeomEntity.classBeforeCd.eq(
searchGeoReq.getCompareClass().toLowerCase())); searchGeoReq.getCompareClass().toLowerCase()));
} }
if (searchGeoReq.getMapSheetNum() != null) { if (searchGeoReq.getMapSheetNum() != null) {
// //
// where.and(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(searchGeoReq.getMapSheetNum())); // where.and(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(searchGeoReq.getMapSheetNum()));
where.and( where.and(
mapSheetAnalDataInferenceGeomEntity.mapSheetNum.like( mapSheetAnalDataInferenceGeomEntity.mapSheetNum.like(
"%" + searchGeoReq.getMapSheetNum() + "%")); "%" + searchGeoReq.getMapSheetNum() + "%"));
} }
where.and(mapSheetAnalDataInferenceGeomEntity.classAfterCd.isNotNull()); where.and(mapSheetAnalDataInferenceGeomEntity.classAfterCd.isNotNull());
@@ -405,56 +408,66 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
// 3) inkx 조인 조건: JPQL/HQL에서 '~' 불가 → function('regexp_match', ...) 사용 // 3) inkx 조인 조건: JPQL/HQL에서 '~' 불가 → function('regexp_match', ...) 사용
BooleanExpression inkxIsNumeric = BooleanExpression inkxIsNumeric =
Expressions.booleanTemplate( Expressions.booleanTemplate(
"function('regexp_match', {0}, '^[0-9]+$') is not null", mapInkx5kEntity.mapidcdNo); "function('regexp_match', {0}, '^[0-9]+$') is not null", mapInkx5kEntity.mapidcdNo);
NumberExpression<Long> inkxNoAsLong = NumberExpression<Long> inkxNoAsLong =
Expressions.numberTemplate(Long.class, "cast({0} as long)", mapInkx5kEntity.mapidcdNo); Expressions.numberTemplate(Long.class, "cast({0} as long)", mapInkx5kEntity.mapidcdNo);
StringExpression pnu =
Expressions.stringTemplate(
"coalesce(({0}), '')",
JPAExpressions
.select(Expressions.stringTemplate("string_agg({0}, ',')", pnuEntity.pnu))
.from(pnuEntity)
.where(pnuEntity.geo.geoUid.eq(mapSheetAnalDataInferenceGeomEntity.geoUid))
);
// 4) content // 4) content
List<Geom> content = List<Geom> content =
queryFactory queryFactory
.select( .select(
Projections.constructor( Projections.constructor(
Geom.class, Geom.class,
mapSheetAnalDataInferenceGeomEntity.uuid, mapSheetAnalDataInferenceGeomEntity.uuid,
mapSheetAnalDataInferenceGeomEntity.resultUid, mapSheetAnalDataInferenceGeomEntity.resultUid,
mapSheetAnalDataInferenceGeomEntity.compareYyyy, mapSheetAnalDataInferenceGeomEntity.compareYyyy,
mapSheetAnalDataInferenceGeomEntity.targetYyyy, mapSheetAnalDataInferenceGeomEntity.targetYyyy,
mapSheetAnalDataInferenceGeomEntity.cdProb, mapSheetAnalDataInferenceGeomEntity.cdProb,
mapSheetAnalDataInferenceGeomEntity.classBeforeCd, mapSheetAnalDataInferenceGeomEntity.classBeforeCd,
mapSheetAnalDataInferenceGeomEntity.classBeforeProb, mapSheetAnalDataInferenceGeomEntity.classBeforeProb,
mapSheetAnalDataInferenceGeomEntity.classAfterCd, mapSheetAnalDataInferenceGeomEntity.classAfterCd,
mapSheetAnalDataInferenceGeomEntity.classAfterProb, mapSheetAnalDataInferenceGeomEntity.classAfterProb,
mapSheetAnalDataInferenceGeomEntity.mapSheetNum, mapSheetAnalDataInferenceGeomEntity.mapSheetNum,
mapInkx5kEntity.mapidNm, mapInkx5kEntity.mapidNm,
Expressions.stringTemplate( Expressions.stringTemplate(
"substring({0} from 1 for 8)", "substring({0} from 1 for 8)",
mapSheetAnalDataInferenceGeomEntity.resultUid))) mapSheetAnalDataInferenceGeomEntity.resultUid
.from(mapSheetAnalInferenceEntity) ), pnu))
.join(mapSheetAnalDataInferenceEntity) .from(mapSheetAnalInferenceEntity)
.on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id)) .join(mapSheetAnalDataInferenceEntity)
.join(mapSheetAnalDataInferenceGeomEntity) .on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id)) .join(mapSheetAnalDataInferenceGeomEntity)
.join(mapInkx5kEntity) .on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
.on(inkxIsNumeric.and(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(inkxNoAsLong))) .join(mapInkx5kEntity)
.where(where) .on(inkxIsNumeric.and(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(inkxNoAsLong)))
.orderBy(mapSheetAnalDataInferenceGeomEntity.geoUid.desc()) .where(where)
.offset(pageable.getOffset()) .orderBy(mapSheetAnalDataInferenceGeomEntity.geoUid.desc())
.limit(pageable.getPageSize()) .offset(pageable.getOffset())
.fetch(); .limit(pageable.getPageSize())
.fetch();
// 5) total (조인 최소화 유지) // 5) total (조인 최소화 유지)
Long total = Long total =
queryFactory queryFactory
.select(mapSheetAnalDataInferenceGeomEntity.geoUid.count()) .select(mapSheetAnalDataInferenceGeomEntity.geoUid.count())
.from(mapSheetAnalInferenceEntity) .from(mapSheetAnalInferenceEntity)
.join(mapSheetAnalDataInferenceEntity) .join(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id)) .on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
.join(mapSheetAnalDataInferenceGeomEntity) .join(mapSheetAnalDataInferenceGeomEntity)
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id)) .on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
.where(where) .where(where)
.fetchOne(); .fetchOne();
return new PageImpl<>(content, pageable, total == null ? 0L : total); return new PageImpl<>(content, pageable, total == null ? 0L : total);
} }