Merge remote-tracking branch 'origin/feat/infer_dev_260107' into feat/infer_dev_260107
# Conflicts: # src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java
This commit is contained in:
@@ -323,9 +323,9 @@ public class InferenceResultApiController {
|
|||||||
})
|
})
|
||||||
@GetMapping("/infer-result-info")
|
@GetMapping("/infer-result-info")
|
||||||
public ApiResponseDto<InferenceDetailDto.AnalResultInfo> getInferenceResultInfo(
|
public ApiResponseDto<InferenceDetailDto.AnalResultInfo> getInferenceResultInfo(
|
||||||
@Parameter(description = "회차 uuid", example = "932fbd72-2e8e-4a49-b189-09046787f9d1")
|
@Parameter(description = "회차 uuid", example = "f30e8817-9625-4fff-ba43-c1e6ed2067c4")
|
||||||
@RequestParam
|
@RequestParam
|
||||||
String uuid) {
|
UUID uuid) {
|
||||||
return ApiResponseDto.ok(inferenceResultService.getInferenceResultInfo(uuid));
|
return ApiResponseDto.ok(inferenceResultService.getInferenceResultInfo(uuid));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -344,9 +344,9 @@ public class InferenceResultApiController {
|
|||||||
})
|
})
|
||||||
@GetMapping("/infer-class-count")
|
@GetMapping("/infer-class-count")
|
||||||
public ApiResponseDto<List<InferenceDetailDto.Dashboard>> getInferenceClassCountList(
|
public ApiResponseDto<List<InferenceDetailDto.Dashboard>> getInferenceClassCountList(
|
||||||
@Parameter(description = "회차 uuid", example = "8584e8d4-53b3-4582-bde2-28a81495a626")
|
@Parameter(description = "회차 uuid", example = "242750c5-a627-429b-950a-dce5a87c1c01")
|
||||||
@RequestParam
|
@RequestParam
|
||||||
String uuid) {
|
UUID uuid) {
|
||||||
return ApiResponseDto.ok(inferenceResultService.getInferenceClassCountList(uuid));
|
return ApiResponseDto.ok(inferenceResultService.getInferenceClassCountList(uuid));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -365,7 +365,7 @@ public class InferenceResultApiController {
|
|||||||
})
|
})
|
||||||
@GetMapping("/geom-list")
|
@GetMapping("/geom-list")
|
||||||
public ApiResponseDto<Page<InferenceDetailDto.Geom>> getInferenceGeomList(
|
public ApiResponseDto<Page<InferenceDetailDto.Geom>> getInferenceGeomList(
|
||||||
@Parameter(description = "회차 uuid", example = "8584e8d4-53b3-4582-bde2-28a81495a626")
|
@Parameter(description = "회차 uuid", example = "242750c5-a627-429b-950a-dce5a87c1c01")
|
||||||
@RequestParam(required = true)
|
@RequestParam(required = true)
|
||||||
UUID uuid,
|
UUID uuid,
|
||||||
@Parameter(description = "기준년도 분류", example = "land") @RequestParam(required = false)
|
@Parameter(description = "기준년도 분류", example = "land") @RequestParam(required = false)
|
||||||
|
|||||||
@@ -472,14 +472,16 @@ public class InferenceDetailDto {
|
|||||||
? Duration.between(inferStartDttm, inferEndDttm)
|
? Duration.between(inferStartDttm, inferEndDttm)
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
long seconds = elapsed.getSeconds();
|
if (elapsed != null) {
|
||||||
long abs = Math.abs(seconds);
|
long seconds = elapsed.getSeconds();
|
||||||
|
long abs = Math.abs(seconds);
|
||||||
|
|
||||||
long h = abs / 3600;
|
long h = abs / 3600;
|
||||||
long m = (abs % 3600) / 60;
|
long m = (abs % 3600) / 60;
|
||||||
long s = abs % 60;
|
long s = abs % 60;
|
||||||
|
|
||||||
this.elapsedDuration = String.format("%02d:%02d:%02d", h, m, s);
|
this.elapsedDuration = String.format("%02d:%02d:%02d", h, m, s);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -62,6 +62,7 @@ public class InferenceResultService {
|
|||||||
private final ModelMngCoreService modelMngCoreService;
|
private final ModelMngCoreService modelMngCoreService;
|
||||||
private final ExternalHttpClient externalHttpClient;
|
private final ExternalHttpClient externalHttpClient;
|
||||||
private final ObjectMapper objectMapper;
|
private final ObjectMapper objectMapper;
|
||||||
|
private final UserUtil userUtil;
|
||||||
|
|
||||||
@Value("${inference.url}")
|
@Value("${inference.url}")
|
||||||
private String inferenceUrl;
|
private String inferenceUrl;
|
||||||
@@ -72,8 +73,6 @@ public class InferenceResultService {
|
|||||||
@Value("${spring.profiles.active}")
|
@Value("${spring.profiles.active}")
|
||||||
private String profile;
|
private String profile;
|
||||||
|
|
||||||
private final UserUtil userUtil;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 추론관리 목록
|
* 추론관리 목록
|
||||||
*
|
*
|
||||||
@@ -494,11 +493,11 @@ public class InferenceResultService {
|
|||||||
return dto;
|
return dto;
|
||||||
}
|
}
|
||||||
|
|
||||||
public AnalResultInfo getInferenceResultInfo(String uuid) {
|
public AnalResultInfo getInferenceResultInfo(UUID uuid) {
|
||||||
return inferenceResultCoreService.getInferenceResultInfo(uuid);
|
return inferenceResultCoreService.getInferenceResultInfo(uuid);
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<Dashboard> getInferenceClassCountList(String uuid) {
|
public List<Dashboard> getInferenceClassCountList(UUID uuid) {
|
||||||
return inferenceResultCoreService.getInferenceClassCountList(uuid);
|
return inferenceResultCoreService.getInferenceClassCountList(uuid);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -256,35 +256,23 @@ public class LabelAllocateApiController {
|
|||||||
mediaType = "application/json",
|
mediaType = "application/json",
|
||||||
schema = @Schema(implementation = UpdateClosedRequest.class),
|
schema = @Schema(implementation = UpdateClosedRequest.class),
|
||||||
examples = {
|
examples = {
|
||||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
|
||||||
name = "라벨링 종료",
|
|
||||||
value =
|
|
||||||
"""
|
|
||||||
{"closedType": "LABELING", "closedYn": "Y"}
|
|
||||||
"""),
|
|
||||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
|
||||||
name = "검수 종료",
|
|
||||||
value =
|
|
||||||
"""
|
|
||||||
{"closedType": "INSPECTION", "closedYn": "Y"}
|
|
||||||
"""),
|
|
||||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
|
||||||
name = "라벨링 재개",
|
|
||||||
value =
|
|
||||||
"""
|
|
||||||
{"closedType": "LABELING", "closedYn": "N"}
|
|
||||||
"""),
|
|
||||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
|
||||||
name = "검수 재개",
|
|
||||||
value =
|
|
||||||
"""
|
|
||||||
{"closedType": "INSPECTION", "closedYn": "N"}
|
|
||||||
"""),
|
|
||||||
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||||
name = "특정 프로젝트 라벨링 전체 종료",
|
name = "특정 프로젝트 라벨링 전체 종료",
|
||||||
value =
|
value =
|
||||||
"""
|
"""
|
||||||
|
{"uuid": "f97dc186-e6d3-4645-9737-3173dde8dc64", "closedType": "LABELING", "closedYn": "Y"}
|
||||||
|
"""),
|
||||||
|
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||||
|
name = "특정 프로젝트 검수 전체 종료",
|
||||||
|
value =
|
||||||
|
"""
|
||||||
{"uuid": "f97dc186-e6d3-4645-9737-3173dde8dc64", "closedType": "INSPECTION", "closedYn": "Y"}
|
{"uuid": "f97dc186-e6d3-4645-9737-3173dde8dc64", "closedType": "INSPECTION", "closedYn": "Y"}
|
||||||
|
"""),
|
||||||
|
@io.swagger.v3.oas.annotations.media.ExampleObject(
|
||||||
|
name = "특정 프로젝트 라벨링+검수 동시 종료",
|
||||||
|
value =
|
||||||
|
"""
|
||||||
|
{"uuid": "f97dc186-e6d3-4645-9737-3173dde8dc64", "closedType": "BOTH", "closedYn": "Y"}
|
||||||
""")
|
""")
|
||||||
}))
|
}))
|
||||||
@RequestBody
|
@RequestBody
|
||||||
@@ -294,7 +282,17 @@ public class LabelAllocateApiController {
|
|||||||
labelAllocateService.updateClosedYn(
|
labelAllocateService.updateClosedYn(
|
||||||
request.getUuid(), request.getClosedType(), request.getClosedYn());
|
request.getUuid(), request.getClosedType(), request.getClosedYn());
|
||||||
|
|
||||||
String typeLabel = "LABELING".equals(request.getClosedType()) ? "라벨링" : "검수";
|
String typeLabel;
|
||||||
|
if ("LABELING".equals(request.getClosedType())) {
|
||||||
|
typeLabel = "라벨링";
|
||||||
|
} else if ("INSPECTION".equals(request.getClosedType())) {
|
||||||
|
typeLabel = "검수";
|
||||||
|
} else if ("BOTH".equals(request.getClosedType())) {
|
||||||
|
typeLabel = "라벨링 및 검수";
|
||||||
|
} else {
|
||||||
|
typeLabel = "작업";
|
||||||
|
}
|
||||||
|
|
||||||
String statusMessage =
|
String statusMessage =
|
||||||
"Y".equals(request.getClosedYn())
|
"Y".equals(request.getClosedYn())
|
||||||
? typeLabel + "이(가) 종료되었습니다."
|
? typeLabel + "이(가) 종료되었습니다."
|
||||||
|
|||||||
@@ -57,13 +57,14 @@ public class WorkerStatsDto {
|
|||||||
example = "f97dc186-e6d3-4645-9737-3173dde8dc64")
|
example = "f97dc186-e6d3-4645-9737-3173dde8dc64")
|
||||||
private String uuid;
|
private String uuid;
|
||||||
|
|
||||||
@NotBlank(message = "종료 유형은 필수입니다.")
|
@Pattern(
|
||||||
@Pattern(regexp = "^(LABELING|INSPECTION)$", message = "종료 유형은 LABELING 또는 INSPECTION이어야 합니다.")
|
regexp = "^(LABELING|INSPECTION|BOTH)$",
|
||||||
|
message = "종료 유형은 LABELING, INSPECTION 또는 BOTH 이어야 합니다.")
|
||||||
@Schema(
|
@Schema(
|
||||||
description = "종료 유형 (LABELING: 라벨링, INSPECTION: 검수)",
|
description = "종료 유형 (LABELING: 라벨링만, INSPECTION: 검수만, BOTH: 라벨링+검수 동시)",
|
||||||
example = "LABELING",
|
example = "LABELING",
|
||||||
allowableValues = {"LABELING", "INSPECTION"},
|
allowableValues = {"LABELING", "INSPECTION", "BOTH"},
|
||||||
requiredMode = Schema.RequiredMode.REQUIRED)
|
requiredMode = Schema.RequiredMode.NOT_REQUIRED)
|
||||||
private String closedType;
|
private String closedType;
|
||||||
|
|
||||||
@NotBlank(message = "종료 여부는 필수입니다.")
|
@NotBlank(message = "종료 여부는 필수입니다.")
|
||||||
|
|||||||
@@ -234,13 +234,19 @@ public class LabelAllocateService {
|
|||||||
* 프로젝트 종료 여부 업데이트
|
* 프로젝트 종료 여부 업데이트
|
||||||
*
|
*
|
||||||
* @param uuid 프로젝트 UUID (선택, 미입력 시 최신 프로젝트 대상)
|
* @param uuid 프로젝트 UUID (선택, 미입력 시 최신 프로젝트 대상)
|
||||||
* @param closedType 종료 유형 (LABELING/INSPECTION)
|
* @param closedType 종료 유형 (LABELING/INSPECTION/BOTH)
|
||||||
* @param closedYn 종료 여부 (Y/N)
|
* @param closedYn 종료 여부 (Y/N)
|
||||||
*/
|
*/
|
||||||
@Transactional
|
@Transactional
|
||||||
public void updateClosedYn(String uuid, String closedType, String closedYn) {
|
public void updateClosedYn(String uuid, String closedType, String closedYn) {
|
||||||
String targetUuid = uuid;
|
String targetUuid = uuid;
|
||||||
|
|
||||||
|
// closedType 유효성 검증
|
||||||
|
if (closedType == null || closedType.isBlank()) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"종료 유형(closedType)은 필수입니다. (LABELING, INSPECTION, BOTH 중 하나)");
|
||||||
|
}
|
||||||
|
|
||||||
// uuid가 없으면 최신 프로젝트 uuid 조회
|
// uuid가 없으면 최신 프로젝트 uuid 조회
|
||||||
if (targetUuid == null || targetUuid.isBlank()) {
|
if (targetUuid == null || targetUuid.isBlank()) {
|
||||||
var latestProjectInfo = labelAllocateCoreService.findLatestProjectInfo();
|
var latestProjectInfo = labelAllocateCoreService.findLatestProjectInfo();
|
||||||
|
|||||||
@@ -403,11 +403,11 @@ public class InferenceResultCoreService {
|
|||||||
return dto;
|
return dto;
|
||||||
}
|
}
|
||||||
|
|
||||||
public AnalResultInfo getInferenceResultInfo(String uuid) {
|
public AnalResultInfo getInferenceResultInfo(UUID uuid) {
|
||||||
return mapSheetLearnRepository.getInferenceResultInfo(uuid);
|
return mapSheetLearnRepository.getInferenceResultInfo(uuid);
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<Dashboard> getInferenceClassCountList(String uuid) {
|
public List<Dashboard> getInferenceClassCountList(UUID uuid) {
|
||||||
return mapSheetLearnRepository.getInferenceClassCountList(uuid);
|
return mapSheetLearnRepository.getInferenceClassCountList(uuid);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -19,19 +19,15 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
private final EntityManager em;
|
private final EntityManager em;
|
||||||
|
|
||||||
private final QMapSheetAnalInferenceEntity inferenceEntity =
|
private final QMapSheetAnalInferenceEntity inferenceEntity =
|
||||||
QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
|
QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
|
||||||
|
|
||||||
/**
|
/** tb_map_sheet_anal_data_inference */
|
||||||
* tb_map_sheet_anal_data_inference
|
|
||||||
*/
|
|
||||||
private final QMapSheetAnalDataInferenceEntity inferenceDataEntity =
|
private final QMapSheetAnalDataInferenceEntity inferenceDataEntity =
|
||||||
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
|
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
|
||||||
|
|
||||||
/**
|
/** tb_map_sheet_anal_data_inference_geom */
|
||||||
* tb_map_sheet_anal_data_inference_geom
|
|
||||||
*/
|
|
||||||
private final QMapSheetAnalDataInferenceGeomEntity inferenceGeomEntity =
|
private final QMapSheetAnalDataInferenceGeomEntity inferenceGeomEntity =
|
||||||
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
|
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
|
||||||
|
|
||||||
// ===============================
|
// ===============================
|
||||||
// Upsert (Native only)
|
// Upsert (Native only)
|
||||||
@@ -40,39 +36,39 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
@Override
|
@Override
|
||||||
public Long upsertGroupsFromMapSheetAnal(Long id) {
|
public Long upsertGroupsFromMapSheetAnal(Long id) {
|
||||||
String sql =
|
String sql =
|
||||||
"""
|
"""
|
||||||
INSERT INTO tb_map_sheet_anal_inference (
|
INSERT INTO tb_map_sheet_anal_inference (
|
||||||
stage,
|
stage,
|
||||||
compare_yyyy,
|
compare_yyyy,
|
||||||
target_yyyy,
|
target_yyyy,
|
||||||
anal_title,
|
anal_title,
|
||||||
detecting_cnt,
|
detecting_cnt,
|
||||||
created_dttm,
|
created_dttm,
|
||||||
m1_model_batch_id,
|
m1_model_batch_id,
|
||||||
m2_model_batch_id,
|
m2_model_batch_id,
|
||||||
m3_model_batch_id,
|
m3_model_batch_id,
|
||||||
learn_id
|
learn_id
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
r.stage,
|
r.stage,
|
||||||
r.compare_yyyy,
|
r.compare_yyyy,
|
||||||
r.target_yyyy,
|
r.target_yyyy,
|
||||||
CONCAT(r.stage, '_', r.compare_yyyy, '_', r.target_yyyy) AS anal_title,
|
r.title,
|
||||||
r.detecting_cnt,
|
r.detecting_cnt,
|
||||||
now(),
|
now(),
|
||||||
r.m1_model_batch_id,
|
r.m1_model_batch_id,
|
||||||
r.m2_model_batch_id,
|
r.m2_model_batch_id,
|
||||||
r.m3_model_batch_id,
|
r.m3_model_batch_id,
|
||||||
r.id
|
r.id
|
||||||
FROM tb_map_sheet_learn r
|
FROM tb_map_sheet_learn r
|
||||||
WHERE r.id = :id
|
WHERE r.id = :id
|
||||||
ON CONFLICT (stage, compare_yyyy, target_yyyy)
|
ON CONFLICT (stage, compare_yyyy, target_yyyy)
|
||||||
DO UPDATE SET
|
DO UPDATE SET
|
||||||
detecting_cnt = EXCLUDED.detecting_cnt,
|
detecting_cnt = EXCLUDED.detecting_cnt,
|
||||||
anal_title = EXCLUDED.anal_title,
|
anal_title = EXCLUDED.anal_title,
|
||||||
updated_dttm = now(),
|
updated_dttm = now(),
|
||||||
learn_id = EXCLUDED.learn_id
|
learn_id = EXCLUDED.learn_id
|
||||||
RETURNING anal_uid
|
RETURNING anal_uid
|
||||||
""";
|
""";
|
||||||
|
|
||||||
Object result = em.createNativeQuery(sql).setParameter("id", id).getSingleResult();
|
Object result = em.createNativeQuery(sql).setParameter("id", id).getSingleResult();
|
||||||
@@ -81,7 +77,8 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를 생성/갱신한다.
|
* inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를
|
||||||
|
* 생성/갱신한다.
|
||||||
*
|
*
|
||||||
* <p>- 최초 생성 시 file_created_yn = false - detecting_cnt는 inference_results 건수 기준
|
* <p>- 최초 생성 시 file_created_yn = false - detecting_cnt는 inference_results 건수 기준
|
||||||
*
|
*
|
||||||
@@ -91,7 +88,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
public void upsertGroupsFromInferenceResults(Long analId) {
|
public void upsertGroupsFromInferenceResults(Long analId) {
|
||||||
|
|
||||||
String sql =
|
String sql =
|
||||||
"""
|
"""
|
||||||
INSERT INTO tb_map_sheet_anal_data_inference (
|
INSERT INTO tb_map_sheet_anal_data_inference (
|
||||||
anal_uid,
|
anal_uid,
|
||||||
compare_yyyy,
|
compare_yyyy,
|
||||||
@@ -147,7 +144,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
public void upsertGeomsFromInferenceResults(Long analUid) {
|
public void upsertGeomsFromInferenceResults(Long analUid) {
|
||||||
|
|
||||||
String sql =
|
String sql =
|
||||||
"""
|
"""
|
||||||
INSERT INTO tb_map_sheet_anal_data_inference_geom (
|
INSERT INTO tb_map_sheet_anal_data_inference_geom (
|
||||||
result_uid,
|
result_uid,
|
||||||
stage,
|
stage,
|
||||||
@@ -187,9 +184,9 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
SELECT
|
SELECT
|
||||||
r.uid AS result_uid,
|
r.uid AS result_uid,
|
||||||
msadi.stage,
|
msadi.stage,
|
||||||
r.after_p as cd_prob,
|
r.cd_prob,
|
||||||
msl.compare_yyyy,
|
r.input1 AS compare_yyyy,
|
||||||
msl.target_yyyy,
|
r.input2 AS target_yyyy,
|
||||||
CASE
|
CASE
|
||||||
WHEN r.map_id ~ '^[0-9]+$' THEN r.map_id::bigint
|
WHEN r.map_id ~ '^[0-9]+$' THEN r.map_id::bigint
|
||||||
ELSE NULL
|
ELSE NULL
|
||||||
@@ -242,7 +239,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
public void upsertSttcFromInferenceResults(Long analUid) {
|
public void upsertSttcFromInferenceResults(Long analUid) {
|
||||||
|
|
||||||
String sql =
|
String sql =
|
||||||
"""
|
"""
|
||||||
INSERT INTO tb_map_sheet_anal_sttc
|
INSERT INTO tb_map_sheet_anal_sttc
|
||||||
(
|
(
|
||||||
compare_yyyy
|
compare_yyyy
|
||||||
@@ -327,22 +324,22 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
public List<Long> findPendingDataUids(int limit, Long learnId) {
|
public List<Long> findPendingDataUids(int limit, Long learnId) {
|
||||||
|
|
||||||
return queryFactory
|
return queryFactory
|
||||||
.select(inferenceDataEntity.id)
|
.select(inferenceDataEntity.id)
|
||||||
.from(inferenceEntity)
|
.from(inferenceEntity)
|
||||||
.innerJoin(inferenceDataEntity)
|
.innerJoin(inferenceDataEntity)
|
||||||
.on(inferenceEntity.id.eq(inferenceDataEntity.analUid))
|
.on(inferenceEntity.id.eq(inferenceDataEntity.analUid))
|
||||||
.where(
|
.where(
|
||||||
inferenceEntity
|
inferenceEntity
|
||||||
.learnId
|
.learnId
|
||||||
.eq(learnId)
|
.eq(learnId)
|
||||||
.and(
|
.and(
|
||||||
inferenceDataEntity
|
inferenceDataEntity
|
||||||
.fileCreatedYn
|
.fileCreatedYn
|
||||||
.isFalse()
|
.isFalse()
|
||||||
.or(inferenceDataEntity.fileCreatedYn.isNull())))
|
.or(inferenceDataEntity.fileCreatedYn.isNull())))
|
||||||
.orderBy(inferenceDataEntity.id.asc())
|
.orderBy(inferenceDataEntity.id.asc())
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
.fetch();
|
.fetch();
|
||||||
}
|
}
|
||||||
|
|
||||||
// ===============================
|
// ===============================
|
||||||
@@ -361,13 +358,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
ZonedDateTime now = ZonedDateTime.now();
|
ZonedDateTime now = ZonedDateTime.now();
|
||||||
|
|
||||||
return (int)
|
return (int)
|
||||||
queryFactory
|
queryFactory
|
||||||
.update(inferenceDataEntity)
|
.update(inferenceDataEntity)
|
||||||
.set(inferenceDataEntity.fileCreatedYn, false)
|
.set(inferenceDataEntity.fileCreatedYn, false)
|
||||||
.set(inferenceDataEntity.fileCreatedDttm, (ZonedDateTime) null)
|
.set(inferenceDataEntity.fileCreatedDttm, (ZonedDateTime) null)
|
||||||
.set(inferenceDataEntity.updatedDttm, now)
|
.set(inferenceDataEntity.updatedDttm, now)
|
||||||
.where(inferenceDataEntity.id.eq(dataUid))
|
.where(inferenceDataEntity.id.eq(dataUid))
|
||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -380,13 +377,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
ZonedDateTime now = ZonedDateTime.now();
|
ZonedDateTime now = ZonedDateTime.now();
|
||||||
|
|
||||||
return (int)
|
return (int)
|
||||||
queryFactory
|
queryFactory
|
||||||
.update(inferenceDataEntity)
|
.update(inferenceDataEntity)
|
||||||
.set(inferenceDataEntity.fileCreatedYn, true)
|
.set(inferenceDataEntity.fileCreatedYn, true)
|
||||||
.set(inferenceDataEntity.fileCreatedDttm, now)
|
.set(inferenceDataEntity.fileCreatedDttm, now)
|
||||||
.set(inferenceDataEntity.updatedDttm, now)
|
.set(inferenceDataEntity.updatedDttm, now)
|
||||||
.where(inferenceDataEntity.id.eq(dataUid))
|
.where(inferenceDataEntity.id.eq(dataUid))
|
||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -399,13 +396,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
ZonedDateTime now = ZonedDateTime.now();
|
ZonedDateTime now = ZonedDateTime.now();
|
||||||
|
|
||||||
return (int)
|
return (int)
|
||||||
queryFactory
|
queryFactory
|
||||||
.update(inferenceGeomEntity)
|
.update(inferenceGeomEntity)
|
||||||
.set(inferenceGeomEntity.fileCreatedYn, false)
|
.set(inferenceGeomEntity.fileCreatedYn, false)
|
||||||
.set(inferenceGeomEntity.fileCreatedDttm, (ZonedDateTime) null)
|
.set(inferenceGeomEntity.fileCreatedDttm, (ZonedDateTime) null)
|
||||||
.set(inferenceGeomEntity.updatedDttm, now)
|
.set(inferenceGeomEntity.updatedDttm, now)
|
||||||
.where(inferenceGeomEntity.dataUid.eq(dataUid))
|
.where(inferenceGeomEntity.dataUid.eq(dataUid))
|
||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -423,13 +420,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
ZonedDateTime now = ZonedDateTime.now();
|
ZonedDateTime now = ZonedDateTime.now();
|
||||||
|
|
||||||
return (int)
|
return (int)
|
||||||
queryFactory
|
queryFactory
|
||||||
.update(inferenceGeomEntity)
|
.update(inferenceGeomEntity)
|
||||||
.set(inferenceGeomEntity.fileCreatedYn, true)
|
.set(inferenceGeomEntity.fileCreatedYn, true)
|
||||||
.set(inferenceGeomEntity.fileCreatedDttm, now)
|
.set(inferenceGeomEntity.fileCreatedDttm, now)
|
||||||
.set(inferenceGeomEntity.updatedDttm, now)
|
.set(inferenceGeomEntity.updatedDttm, now)
|
||||||
.where(inferenceGeomEntity.geoUid.in(geoUids))
|
.where(inferenceGeomEntity.geoUid.in(geoUids))
|
||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
// ===============================
|
// ===============================
|
||||||
@@ -443,18 +440,18 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(
|
public List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(
|
||||||
Long dataUid, int limit) {
|
Long dataUid, int limit) {
|
||||||
return queryFactory
|
return queryFactory
|
||||||
.selectFrom(inferenceGeomEntity)
|
.selectFrom(inferenceGeomEntity)
|
||||||
.where(
|
.where(
|
||||||
inferenceGeomEntity.dataUid.eq(dataUid),
|
inferenceGeomEntity.dataUid.eq(dataUid),
|
||||||
inferenceGeomEntity.geom.isNotNull(),
|
inferenceGeomEntity.geom.isNotNull(),
|
||||||
inferenceGeomEntity
|
inferenceGeomEntity
|
||||||
.fileCreatedYn
|
.fileCreatedYn
|
||||||
.isFalse()
|
.isFalse()
|
||||||
.or(inferenceGeomEntity.fileCreatedYn.isNull()))
|
.or(inferenceGeomEntity.fileCreatedYn.isNull()))
|
||||||
.orderBy(inferenceGeomEntity.geoUid.asc())
|
.orderBy(inferenceGeomEntity.geoUid.asc())
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
.fetch();
|
.fetch();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -32,9 +32,9 @@ public interface MapSheetLearnRepositoryCustom {
|
|||||||
|
|
||||||
Integer getLearnStage(Integer compareYear, Integer targetYear);
|
Integer getLearnStage(Integer compareYear, Integer targetYear);
|
||||||
|
|
||||||
AnalResultInfo getInferenceResultInfo(String uuid);
|
AnalResultInfo getInferenceResultInfo(UUID uuid);
|
||||||
|
|
||||||
List<Dashboard> getInferenceClassCountList(String uuid);
|
List<Dashboard> getInferenceClassCountList(UUID uuid);
|
||||||
|
|
||||||
Page<Geom> getInferenceGeomList(UUID uuid, SearchGeoReq searchGeoReq);
|
Page<Geom> getInferenceGeomList(UUID uuid, SearchGeoReq searchGeoReq);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
|||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
|
||||||
import com.kamco.cd.kamcoback.model.service.ModelMngService;
|
import com.kamco.cd.kamcoback.model.service.ModelMngService;
|
||||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity;
|
|
||||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
|
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
|
||||||
import com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity;
|
import com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity;
|
||||||
import com.querydsl.core.BooleanBuilder;
|
import com.querydsl.core.BooleanBuilder;
|
||||||
@@ -28,7 +27,6 @@ import com.querydsl.core.types.dsl.CaseBuilder;
|
|||||||
import com.querydsl.core.types.dsl.Expressions;
|
import com.querydsl.core.types.dsl.Expressions;
|
||||||
import com.querydsl.core.types.dsl.NumberExpression;
|
import com.querydsl.core.types.dsl.NumberExpression;
|
||||||
import com.querydsl.jpa.impl.JPAQueryFactory;
|
import com.querydsl.jpa.impl.JPAQueryFactory;
|
||||||
import jakarta.persistence.EntityNotFoundException;
|
|
||||||
import java.time.OffsetDateTime;
|
import java.time.OffsetDateTime;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
@@ -292,7 +290,7 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public AnalResultInfo getInferenceResultInfo(String uuid) {
|
public AnalResultInfo getInferenceResultInfo(UUID uuid) {
|
||||||
QModelMngEntity m1 = new QModelMngEntity("m1");
|
QModelMngEntity m1 = new QModelMngEntity("m1");
|
||||||
QModelMngEntity m2 = new QModelMngEntity("m2");
|
QModelMngEntity m2 = new QModelMngEntity("m2");
|
||||||
QModelMngEntity m3 = new QModelMngEntity("m3");
|
QModelMngEntity m3 = new QModelMngEntity("m3");
|
||||||
@@ -319,22 +317,22 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
|||||||
.on(mapSheetLearnEntity.m2ModelUuid.eq(m2.uuid))
|
.on(mapSheetLearnEntity.m2ModelUuid.eq(m2.uuid))
|
||||||
.leftJoin(m3)
|
.leftJoin(m3)
|
||||||
.on(mapSheetLearnEntity.m3ModelUuid.eq(m3.uuid))
|
.on(mapSheetLearnEntity.m3ModelUuid.eq(m3.uuid))
|
||||||
.where(mapSheetLearnEntity.uuid.eq(UUID.fromString(uuid)))
|
.where(mapSheetLearnEntity.uuid.eq(uuid))
|
||||||
.fetchOne();
|
.fetchOne();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Dashboard> getInferenceClassCountList(String uuid) {
|
public List<Dashboard> getInferenceClassCountList(UUID uuid) {
|
||||||
|
|
||||||
// analUid로 분석 정보 조회
|
// analUid로 분석 정보 조회
|
||||||
MapSheetAnalInferenceEntity analEntity =
|
MapSheetLearnEntity learnEntity =
|
||||||
queryFactory
|
queryFactory
|
||||||
.selectFrom(mapSheetAnalInferenceEntity)
|
.selectFrom(mapSheetLearnEntity)
|
||||||
.where(mapSheetAnalInferenceEntity.uuid.eq(UUID.fromString(uuid)))
|
.where(mapSheetLearnEntity.uuid.eq(uuid))
|
||||||
.fetchOne();
|
.fetchOne();
|
||||||
|
|
||||||
if (Objects.isNull(analEntity)) {
|
if (Objects.isNull(learnEntity)) {
|
||||||
throw new EntityNotFoundException("MapSheetAnalInferenceEntity not found for analUid: ");
|
throw new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND);
|
||||||
}
|
}
|
||||||
|
|
||||||
return queryFactory
|
return queryFactory
|
||||||
@@ -343,8 +341,10 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
|||||||
Dashboard.class,
|
Dashboard.class,
|
||||||
mapSheetAnalSttcEntity.id.classAfterCd,
|
mapSheetAnalSttcEntity.id.classAfterCd,
|
||||||
mapSheetAnalSttcEntity.classAfterCnt.sum()))
|
mapSheetAnalSttcEntity.classAfterCnt.sum()))
|
||||||
.from(mapSheetAnalSttcEntity)
|
.from(mapSheetAnalInferenceEntity)
|
||||||
.where(mapSheetAnalSttcEntity.id.analUid.eq(analEntity.getId()))
|
.innerJoin(mapSheetAnalSttcEntity)
|
||||||
|
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalSttcEntity.id.analUid))
|
||||||
|
.where(mapSheetAnalInferenceEntity.learnId.eq(learnEntity.getId()))
|
||||||
.groupBy(mapSheetAnalSttcEntity.id.classAfterCd)
|
.groupBy(mapSheetAnalSttcEntity.id.classAfterCd)
|
||||||
.orderBy(mapSheetAnalSttcEntity.id.classAfterCd.asc())
|
.orderBy(mapSheetAnalSttcEntity.id.classAfterCd.asc())
|
||||||
.fetch();
|
.fetch();
|
||||||
@@ -362,10 +362,10 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
|||||||
BooleanBuilder builder = new BooleanBuilder();
|
BooleanBuilder builder = new BooleanBuilder();
|
||||||
|
|
||||||
// analUid로 분석 정보 조회
|
// analUid로 분석 정보 조회
|
||||||
MapSheetAnalInferenceEntity analEntity =
|
MapSheetLearnEntity analEntity =
|
||||||
queryFactory
|
queryFactory
|
||||||
.selectFrom(mapSheetAnalInferenceEntity)
|
.selectFrom(mapSheetLearnEntity)
|
||||||
.where(mapSheetAnalInferenceEntity.uuid.eq(uuid))
|
.where(mapSheetLearnEntity.uuid.eq(uuid))
|
||||||
.fetchOne();
|
.fetchOne();
|
||||||
|
|
||||||
if (Objects.isNull(analEntity)) {
|
if (Objects.isNull(analEntity)) {
|
||||||
@@ -373,7 +373,7 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 추론결과 id
|
// 추론결과 id
|
||||||
builder.and(mapSheetAnalInferenceEntity.id.eq(analEntity.getId()));
|
builder.and(mapSheetAnalInferenceEntity.learnId.eq(analEntity.getId()));
|
||||||
|
|
||||||
// 기준년도 분류
|
// 기준년도 분류
|
||||||
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) {
|
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) {
|
||||||
|
|||||||
@@ -1553,6 +1553,10 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
|
|||||||
updateQuery.set(mapSheetAnalInferenceEntity.labelingClosedYn, closedYn);
|
updateQuery.set(mapSheetAnalInferenceEntity.labelingClosedYn, closedYn);
|
||||||
} else if ("INSPECTION".equals(closedType)) {
|
} else if ("INSPECTION".equals(closedType)) {
|
||||||
updateQuery.set(mapSheetAnalInferenceEntity.inspectionClosedYn, closedYn);
|
updateQuery.set(mapSheetAnalInferenceEntity.inspectionClosedYn, closedYn);
|
||||||
|
} else if ("BOTH".equals(closedType)) {
|
||||||
|
updateQuery
|
||||||
|
.set(mapSheetAnalInferenceEntity.labelingClosedYn, closedYn)
|
||||||
|
.set(mapSheetAnalInferenceEntity.inspectionClosedYn, closedYn);
|
||||||
}
|
}
|
||||||
|
|
||||||
updateQuery
|
updateQuery
|
||||||
|
|||||||
Reference in New Issue
Block a user