Merge pull request 'feat/infer_dev_260107' (#285) from feat/infer_dev_260107 into develop

Reviewed-on: https://kamco.gitea.gs.dabeeo.com/dabeeo/kamco-dabeeo-backoffice/pulls/285
This commit is contained in:
2026-01-20 14:24:02 +09:00
15 changed files with 174 additions and 45 deletions

View File

@@ -74,11 +74,16 @@ public class ChangeDetectionApiController {
@Parameter(description = "5k/50k 구분(SCALE_5K/SCALE_50K))", required = true)
@RequestParam(defaultValue = "SCALE_50K")
MapScaleType scale,
@Parameter(
description = "변화탐지 년도(차수) /year-list 의 uuid",
example = "8584e8d4-53b3-4582-bde2-28a81495a626")
@RequestParam
UUID uuid,
@Parameter(description = "이전 년도", example = "2023") @RequestParam Integer beforeYear,
@Parameter(description = "이후 년도", example = "2024") @RequestParam Integer afterYear,
@Parameter(description = "도엽번호(5k)", example = "35905086") @RequestParam String mapSheetNum) {
ChangeDetectionDto.CogUrlReq req =
new ChangeDetectionDto.CogUrlReq(beforeYear, afterYear, mapSheetNum, type, scale);
new ChangeDetectionDto.CogUrlReq(uuid, beforeYear, afterYear, mapSheetNum, type, scale);
return ApiResponseDto.ok(changeDetectionService.getChangeDetectionCogUrl(req));
}

View File

@@ -86,6 +86,7 @@ public class ChangeDetectionDto {
@AllArgsConstructor
public static class CogUrlReq {
private UUID uuid;
private Integer beforeYear;
private Integer afterYear;
private String mapSheetNum;

View File

@@ -490,4 +490,24 @@ public class InferenceResultApiController {
return ApiResponseDto.ok(inferenceResultService.getDownloadAudit(searchReq, downloadReq));
}
@Operation(summary = "추론 실행중인 도엽 목록", description = "추론관리 실행중인 도엽명 5k 목록")
@ApiResponses({
@ApiResponse(
responseCode = "200",
description = "검색 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = ApiResponseDto.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@GetMapping(value = "/running-map/{uuid}")
public ApiResponseDto<List<String>> getInferenceRunMapName(
@Parameter(description = "uuid", example = "9d213416-0e9e-429a-b037-070e6a29946e")
@PathVariable
UUID uuid) {
return ApiResponseDto.ok(inferenceResultService.getInferenceRunMapId(uuid));
}
}

View File

@@ -595,4 +595,14 @@ public class InferenceResultService {
AuditLogDto.searchReq searchReq, DownloadReq downloadReq) {
return auditLogCoreService.findLogByAccount(searchReq, downloadReq);
}
/**
* 실행중인 추론 도엽명 목록
*
* @param uuid uuid
* @return
*/
public List<String> getInferenceRunMapId(UUID uuid) {
return inferenceResultCoreService.getInferenceRunMapId(uuid);
}
}

View File

@@ -476,4 +476,14 @@ public class InferenceResultCoreService {
dto.setUid(entity.getUid());
return dto;
}
/**
* 실행중인 추론 도엽명 목록
*
* @param uuid 추론 실행중인 uuid
* @return
*/
public List<String> getInferenceRunMapId(UUID uuid) {
return mapSheetLearn5kRepository.getInferenceRunMapId(uuid);
}
}

View File

@@ -37,4 +37,8 @@ public class TrainingDataReviewJobCoreService {
public void lockInspectors(Long analUid, List<String> reviewerIds) {
trainingDataReviewJobRepository.lockInspectors(analUid, reviewerIds);
}
public void updateGeomUidTestState(List<Long> geomUids) {
trainingDataReviewJobRepository.updateGeomUidTestState(geomUids);
}
}

View File

@@ -217,11 +217,15 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
msl.m2_model_batch_id,
msl.m3_model_batch_id
)
inner join tb_map_sheet_anal_data_inference msadi
on msadi.anal_uid = msl.anal_uid
INNER JOIN tb_map_sheet_anal_data_inference msadi
ON msadi.anal_uid = msl.anal_uid
AND r.map_id ~ '^[0-9]+$'
AND r.map_id::bigint = msadi.map_sheet_num
where msl.anal_uid = :analUid
WHERE msl.anal_uid = :analUid
AND r.before_c is not null
AND r.before_p is not null
AND r.after_c is not null
AND r.after_p is not null
ORDER BY r.uid, r.created_date DESC NULLS LAST
) x
ON CONFLICT (result_uid)

View File

@@ -6,4 +6,6 @@ import java.util.UUID;
public interface MapSheetLearn5kRepositoryCustom {
void saveFail5k(UUID uuid, List<Long> failMapIds, String type);
List<String> getInferenceRunMapId(UUID uuid);
}

View File

@@ -1,9 +1,11 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity.mapInkx5kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearn5kEntity.mapSheetLearn5kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity;
import com.querydsl.core.types.dsl.BooleanPath;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.List;
import java.util.UUID;
@@ -59,4 +61,25 @@ public class MapSheetLearn5kRepositoryImpl implements MapSheetLearn5kRepositoryC
.and(mapSheetLearn5kEntity.mapSheetNum.in(failMapIds)))
.execute();
}
@Override
public List<String> getInferenceRunMapId(UUID uuid) {
return queryFactory
.select(mapInkx5kEntity.mapidNm)
.from(mapSheetLearnEntity)
.innerJoin(mapSheetLearn5kEntity)
.on(mapSheetLearn5kEntity.learn.id.eq(mapSheetLearnEntity.id))
.innerJoin(mapInkx5kEntity)
.on(
Expressions.booleanTemplate(
"function('regexp_match', {0}, '^[0-9]+$') is not null",
mapInkx5kEntity.mapidcdNo)
.and(
mapSheetLearn5kEntity.mapSheetNum.eq(
Expressions.numberTemplate(
Long.class, "cast({0} as long)", mapInkx5kEntity.mapidcdNo))))
.where(mapSheetLearnEntity.uuid.eq(uuid))
.groupBy(mapInkx5kEntity.mapidNm)
.fetch();
}
}

View File

@@ -24,6 +24,7 @@ import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberExpression;
@@ -360,42 +361,51 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
@Override
public Page<Geom> getInferenceGeomList(UUID uuid, SearchGeoReq searchGeoReq) {
Pageable pageable = searchGeoReq.toPageable();
BooleanBuilder builder = new BooleanBuilder();
BooleanBuilder where = new BooleanBuilder();
// analUid로 분석 정보 조회
// 1) 분석 엔티티 조회
MapSheetLearnEntity analEntity =
queryFactory
.selectFrom(mapSheetLearnEntity)
.where(mapSheetLearnEntity.uuid.eq(uuid))
.fetchOne();
if (Objects.isNull(analEntity)) {
if (analEntity == null) {
throw new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND);
}
// 추론결과 id
builder.and(mapSheetAnalInferenceEntity.learnId.eq(analEntity.getId()));
// 2) where 조건
where.and(mapSheetAnalInferenceEntity.learnId.eq(analEntity.getId()));
// 기준년도 분류
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) {
builder.and(
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().isBlank()) {
where.and(
mapSheetAnalDataInferenceGeomEntity.classAfterCd.eq(
searchGeoReq.getTargetClass().toLowerCase()));
}
// 비교년도 분류
if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")) {
builder.and(
if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().isBlank()) {
where.and(
mapSheetAnalDataInferenceGeomEntity.classBeforeCd.eq(
searchGeoReq.getCompareClass().toLowerCase()));
}
// 분석도엽
if (searchGeoReq.getMapSheetNum() != null) {
Long mapSheetNum = searchGeoReq.getMapSheetNum();
builder.and(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.like("%" + mapSheetNum + "%"));
//
// where.and(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(searchGeoReq.getMapSheetNum()));
where.and(
mapSheetAnalDataInferenceGeomEntity.mapSheetNum.like(
"%" + searchGeoReq.getMapSheetNum() + "%"));
}
// 3) inkx 조인 조건: JPQL/HQL에서 '~' 불가 → function('regexp_match', ...) 사용
BooleanExpression inkxIsNumeric =
Expressions.booleanTemplate(
"function('regexp_match', {0}, '^[0-9]+$') is not null", mapInkx5kEntity.mapidcdNo);
NumberExpression<Long> inkxNoAsLong =
Expressions.numberTemplate(Long.class, "cast({0} as long)", mapInkx5kEntity.mapidcdNo);
// 4) content
List<Geom> content =
queryFactory
.select(
@@ -411,45 +421,32 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
mapSheetAnalDataInferenceGeomEntity.classAfterCd,
mapSheetAnalDataInferenceGeomEntity.classAfterProb,
mapSheetAnalDataInferenceGeomEntity.mapSheetNum,
mapInkx5kEntity.mapidNm
// Expressions.stringTemplate(
// "ST_AsGeoJSON({0})",
// mapSheetAnalDataInferenceGeomEntity.geom),
// Expressions.stringTemplate(
// "ST_AsGeoJSON({0})",
// mapSheetAnalDataInferenceGeomEntity.geomCenter)
))
mapInkx5kEntity.mapidNm))
.from(mapSheetAnalInferenceEntity)
.join(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
.join(mapSheetAnalDataInferenceGeomEntity)
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
.join(mapInkx5kEntity)
.on(
mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(
Expressions.numberTemplate(
Long.class, "CAST({0} AS long)", mapInkx5kEntity.mapidcdNo)))
.where(builder)
.on(inkxIsNumeric.and(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(inkxNoAsLong)))
.where(where)
.orderBy(mapSheetAnalDataInferenceGeomEntity.geoUid.desc())
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.fetch();
long total =
// 5) total (조인 최소화 유지)
Long total =
queryFactory
.select(mapSheetAnalDataInferenceGeomEntity.geoUid)
.select(mapSheetAnalDataInferenceGeomEntity.geoUid.count())
.from(mapSheetAnalInferenceEntity)
.join(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
.join(mapSheetAnalDataInferenceGeomEntity)
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
.join(mapInkx5kEntity)
.on(
mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(
Expressions.numberTemplate(
Long.class, "CAST({0} AS long)", mapInkx5kEntity.mapidcdNo)))
.where(builder)
.fetchCount();
.where(where)
.fetchOne();
return new PageImpl<>(content, pageable, total);
return new PageImpl<>(content, pageable, total == null ? 0L : total);
}
}

View File

@@ -12,6 +12,7 @@ import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.DetectSearchType;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapScaleType;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapSheetList;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
@@ -111,6 +112,26 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
@Override
public ChangeDetectionDto.CogUrlDto getChangeDetectionCogUrl(ChangeDetectionDto.CogUrlReq req) {
String mapSheetNum = req.getMapSheetNum();
if (req.getType().equals(DetectSearchType.MAPSHEET)
&& req.getScale().equals(MapScaleType.SCALE_50K)) {
mapSheetNum =
queryFactory
.select(mapSheetAnalDataInferenceEntity.mapSheetNum.stringValue())
.from(mapSheetAnalInferenceEntity)
.innerJoin(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
.where(
mapSheetAnalInferenceEntity.uuid.eq(req.getUuid()),
mapSheetAnalDataInferenceEntity
.mapSheetNum
.stringValue()
.like("%" + req.getMapSheetNum() + "%"))
.orderBy(mapSheetAnalDataInferenceEntity.mapSheetNum.asc())
.fetchFirst();
}
ChangeDetectionDto.CogUrlData data =
queryFactory
.select(
@@ -128,7 +149,7 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
.year
.eq(req.getBeforeYear())
.or(imageryEntity.year.eq(req.getAfterYear())),
imageryEntity.scene5k.eq(req.getMapSheetNum()))
imageryEntity.scene5k.eq(mapSheetNum))
.groupBy(mapInkx5kEntity.geom)
.fetchOne();
@@ -191,6 +212,7 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
.where(
mapSheetAnalInferenceEntity.uuid.eq(uuid),
mapScaleTypeAnalDataSearchExpression(scale, mapSheetNum))
.orderBy(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.asc())
.fetch();
ObjectMapper mapper = new ObjectMapper();
@@ -313,7 +335,8 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
.on(mapSheetAnalDataInferenceEntity.mapSheetNum.stringValue().eq(mapInkx5kEntity.mapidcdNo))
.innerJoin(mapInkx5kEntity.mapInkx50k, mapInkx50kEntity)
.where(mapSheetAnalInferenceEntity.uuid.eq(uuid))
.orderBy(mapInkx5kEntity.mapidcdNo.asc())
.groupBy(mapInkx50kEntity.mapidcdNo, mapInkx50kEntity.mapidNm)
.orderBy(mapInkx50kEntity.mapidcdNo.asc())
.fetch();
}

View File

@@ -16,4 +16,6 @@ public interface TrainingDataReviewJobRepositoryCustom {
void assignReviewerBatch(List<UUID> assignmentUids, String reviewerId);
Tasks findAssignmentTask(String assignmentUid);
void updateGeomUidTestState(List<Long> geomUids);
}

View File

@@ -2,6 +2,7 @@ package com.kamco.cd.kamcoback.postgres.repository.scheduler;
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingInspectorEntity.labelingInspectorEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InspectState;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState;
@@ -99,6 +100,7 @@ public class TrainingDataReviewJobRepositoryImpl extends QuerydslRepositorySuppo
.update(labelingAssignmentEntity)
.set(labelingAssignmentEntity.inspectorUid, reviewerId)
.set(labelingAssignmentEntity.inspectState, InspectState.UNCONFIRM.getId())
.set(labelingAssignmentEntity.modifiedDate, ZonedDateTime.now())
.where(labelingAssignmentEntity.assignmentUid.eq(assignmentUid))
.execute();
}
@@ -115,6 +117,7 @@ public class TrainingDataReviewJobRepositoryImpl extends QuerydslRepositorySuppo
.update(labelingAssignmentEntity)
.set(labelingAssignmentEntity.inspectorUid, reviewerId)
.set(labelingAssignmentEntity.inspectState, InspectState.UNCONFIRM.getId())
.set(labelingAssignmentEntity.modifiedDate, ZonedDateTime.now())
.where(labelingAssignmentEntity.assignmentUid.in(assignmentUids))
.execute();
}
@@ -132,4 +135,14 @@ public class TrainingDataReviewJobRepositoryImpl extends QuerydslRepositorySuppo
.where(labelingAssignmentEntity.assignmentUid.eq(UUID.fromString(assignmentUid)))
.fetchOne();
}
@Override
public void updateGeomUidTestState(List<Long> geomUids) {
queryFactory
.update(mapSheetAnalDataInferenceGeomEntity)
.set(mapSheetAnalDataInferenceGeomEntity.testState, InspectState.UNCONFIRM.getId())
.set(mapSheetAnalDataInferenceGeomEntity.updatedDttm, ZonedDateTime.now())
.where(mapSheetAnalDataInferenceGeomEntity.geoUid.in(geomUids))
.execute();
}
}

View File

@@ -209,6 +209,15 @@ public class TrainingDataReviewRepositoryImpl extends QuerydslRepositorySupport
@Override
public void updateReviewPolygonClass(
Long inferenceGeomUid, Geometry geometry, Properties properties, String status) {
// inference_geom 테이블에 상태 업데이트
queryFactory
.update(mapSheetAnalDataInferenceGeomEntity)
.set(mapSheetAnalDataInferenceGeomEntity.testStateDttm, ZonedDateTime.now())
.set(mapSheetAnalDataInferenceGeomEntity.testState, status)
.where(mapSheetAnalDataInferenceGeomEntity.geoUid.eq(inferenceGeomUid))
.execute();
// inference_geom 테이블 정보 가져오기
MapSheetAnalDataInferenceGeomEntity entity =
queryFactory

View File

@@ -80,8 +80,10 @@ public class TrainingDataReviewJobService {
List<UUID> assignmentUids =
assignedTasks.stream().map(Tasks::getAssignmentUid).toList();
trainingDataReviewJobCoreService.assignReviewerBatch(assignmentUids, reviewerId);
List<Long> geomUids = assignedTasks.stream().map(Tasks::getInferenceUid).toList();
trainingDataReviewJobCoreService.updateGeomUidTestState(geomUids);
});
}
} catch (Exception e) {
@@ -127,5 +129,9 @@ public class TrainingDataReviewJobService {
trainingDataReviewJobCoreService.lockInspectors(analUid, order);
trainingDataReviewJobCoreService.assignReviewer(task.getAssignmentUid(), order.getFirst());
List<Long> geomUids = new ArrayList<>();
geomUids.add(task.getInferenceUid());
trainingDataReviewJobCoreService.updateGeomUidTestState(geomUids);
}
}