Merge pull request 'feat/infer_dev_260107' (#285) from feat/infer_dev_260107 into develop
Reviewed-on: https://kamco.gitea.gs.dabeeo.com/dabeeo/kamco-dabeeo-backoffice/pulls/285
This commit is contained in:
@@ -74,11 +74,16 @@ public class ChangeDetectionApiController {
|
|||||||
@Parameter(description = "5k/50k 구분(SCALE_5K/SCALE_50K))", required = true)
|
@Parameter(description = "5k/50k 구분(SCALE_5K/SCALE_50K))", required = true)
|
||||||
@RequestParam(defaultValue = "SCALE_50K")
|
@RequestParam(defaultValue = "SCALE_50K")
|
||||||
MapScaleType scale,
|
MapScaleType scale,
|
||||||
|
@Parameter(
|
||||||
|
description = "변화탐지 년도(차수) /year-list 의 uuid",
|
||||||
|
example = "8584e8d4-53b3-4582-bde2-28a81495a626")
|
||||||
|
@RequestParam
|
||||||
|
UUID uuid,
|
||||||
@Parameter(description = "이전 년도", example = "2023") @RequestParam Integer beforeYear,
|
@Parameter(description = "이전 년도", example = "2023") @RequestParam Integer beforeYear,
|
||||||
@Parameter(description = "이후 년도", example = "2024") @RequestParam Integer afterYear,
|
@Parameter(description = "이후 년도", example = "2024") @RequestParam Integer afterYear,
|
||||||
@Parameter(description = "도엽번호(5k)", example = "35905086") @RequestParam String mapSheetNum) {
|
@Parameter(description = "도엽번호(5k)", example = "35905086") @RequestParam String mapSheetNum) {
|
||||||
ChangeDetectionDto.CogUrlReq req =
|
ChangeDetectionDto.CogUrlReq req =
|
||||||
new ChangeDetectionDto.CogUrlReq(beforeYear, afterYear, mapSheetNum, type, scale);
|
new ChangeDetectionDto.CogUrlReq(uuid, beforeYear, afterYear, mapSheetNum, type, scale);
|
||||||
return ApiResponseDto.ok(changeDetectionService.getChangeDetectionCogUrl(req));
|
return ApiResponseDto.ok(changeDetectionService.getChangeDetectionCogUrl(req));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -86,6 +86,7 @@ public class ChangeDetectionDto {
|
|||||||
@AllArgsConstructor
|
@AllArgsConstructor
|
||||||
public static class CogUrlReq {
|
public static class CogUrlReq {
|
||||||
|
|
||||||
|
private UUID uuid;
|
||||||
private Integer beforeYear;
|
private Integer beforeYear;
|
||||||
private Integer afterYear;
|
private Integer afterYear;
|
||||||
private String mapSheetNum;
|
private String mapSheetNum;
|
||||||
|
|||||||
@@ -490,4 +490,24 @@ public class InferenceResultApiController {
|
|||||||
|
|
||||||
return ApiResponseDto.ok(inferenceResultService.getDownloadAudit(searchReq, downloadReq));
|
return ApiResponseDto.ok(inferenceResultService.getDownloadAudit(searchReq, downloadReq));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Operation(summary = "추론 실행중인 도엽 목록", description = "추론관리 실행중인 도엽명 5k 목록")
|
||||||
|
@ApiResponses({
|
||||||
|
@ApiResponse(
|
||||||
|
responseCode = "200",
|
||||||
|
description = "검색 성공",
|
||||||
|
content =
|
||||||
|
@Content(
|
||||||
|
mediaType = "application/json",
|
||||||
|
schema = @Schema(implementation = ApiResponseDto.class))),
|
||||||
|
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
|
||||||
|
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||||
|
})
|
||||||
|
@GetMapping(value = "/running-map/{uuid}")
|
||||||
|
public ApiResponseDto<List<String>> getInferenceRunMapName(
|
||||||
|
@Parameter(description = "uuid", example = "9d213416-0e9e-429a-b037-070e6a29946e")
|
||||||
|
@PathVariable
|
||||||
|
UUID uuid) {
|
||||||
|
return ApiResponseDto.ok(inferenceResultService.getInferenceRunMapId(uuid));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -595,4 +595,14 @@ public class InferenceResultService {
|
|||||||
AuditLogDto.searchReq searchReq, DownloadReq downloadReq) {
|
AuditLogDto.searchReq searchReq, DownloadReq downloadReq) {
|
||||||
return auditLogCoreService.findLogByAccount(searchReq, downloadReq);
|
return auditLogCoreService.findLogByAccount(searchReq, downloadReq);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 실행중인 추론 도엽명 목록
|
||||||
|
*
|
||||||
|
* @param uuid uuid
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public List<String> getInferenceRunMapId(UUID uuid) {
|
||||||
|
return inferenceResultCoreService.getInferenceRunMapId(uuid);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -476,4 +476,14 @@ public class InferenceResultCoreService {
|
|||||||
dto.setUid(entity.getUid());
|
dto.setUid(entity.getUid());
|
||||||
return dto;
|
return dto;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 실행중인 추론 도엽명 목록
|
||||||
|
*
|
||||||
|
* @param uuid 추론 실행중인 uuid
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public List<String> getInferenceRunMapId(UUID uuid) {
|
||||||
|
return mapSheetLearn5kRepository.getInferenceRunMapId(uuid);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -37,4 +37,8 @@ public class TrainingDataReviewJobCoreService {
|
|||||||
public void lockInspectors(Long analUid, List<String> reviewerIds) {
|
public void lockInspectors(Long analUid, List<String> reviewerIds) {
|
||||||
trainingDataReviewJobRepository.lockInspectors(analUid, reviewerIds);
|
trainingDataReviewJobRepository.lockInspectors(analUid, reviewerIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void updateGeomUidTestState(List<Long> geomUids) {
|
||||||
|
trainingDataReviewJobRepository.updateGeomUidTestState(geomUids);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -217,11 +217,15 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
msl.m2_model_batch_id,
|
msl.m2_model_batch_id,
|
||||||
msl.m3_model_batch_id
|
msl.m3_model_batch_id
|
||||||
)
|
)
|
||||||
inner join tb_map_sheet_anal_data_inference msadi
|
INNER JOIN tb_map_sheet_anal_data_inference msadi
|
||||||
on msadi.anal_uid = msl.anal_uid
|
ON msadi.anal_uid = msl.anal_uid
|
||||||
AND r.map_id ~ '^[0-9]+$'
|
AND r.map_id ~ '^[0-9]+$'
|
||||||
AND r.map_id::bigint = msadi.map_sheet_num
|
AND r.map_id::bigint = msadi.map_sheet_num
|
||||||
where msl.anal_uid = :analUid
|
WHERE msl.anal_uid = :analUid
|
||||||
|
AND r.before_c is not null
|
||||||
|
AND r.before_p is not null
|
||||||
|
AND r.after_c is not null
|
||||||
|
AND r.after_p is not null
|
||||||
ORDER BY r.uid, r.created_date DESC NULLS LAST
|
ORDER BY r.uid, r.created_date DESC NULLS LAST
|
||||||
) x
|
) x
|
||||||
ON CONFLICT (result_uid)
|
ON CONFLICT (result_uid)
|
||||||
|
|||||||
@@ -6,4 +6,6 @@ import java.util.UUID;
|
|||||||
public interface MapSheetLearn5kRepositoryCustom {
|
public interface MapSheetLearn5kRepositoryCustom {
|
||||||
|
|
||||||
void saveFail5k(UUID uuid, List<Long> failMapIds, String type);
|
void saveFail5k(UUID uuid, List<Long> failMapIds, String type);
|
||||||
|
|
||||||
|
List<String> getInferenceRunMapId(UUID uuid);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
package com.kamco.cd.kamcoback.postgres.repository.Inference;
|
package com.kamco.cd.kamcoback.postgres.repository.Inference;
|
||||||
|
|
||||||
|
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity.mapInkx5kEntity;
|
||||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearn5kEntity.mapSheetLearn5kEntity;
|
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearn5kEntity.mapSheetLearn5kEntity;
|
||||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity;
|
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity;
|
||||||
|
|
||||||
import com.querydsl.core.types.dsl.BooleanPath;
|
import com.querydsl.core.types.dsl.BooleanPath;
|
||||||
|
import com.querydsl.core.types.dsl.Expressions;
|
||||||
import com.querydsl.jpa.impl.JPAQueryFactory;
|
import com.querydsl.jpa.impl.JPAQueryFactory;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
@@ -59,4 +61,25 @@ public class MapSheetLearn5kRepositoryImpl implements MapSheetLearn5kRepositoryC
|
|||||||
.and(mapSheetLearn5kEntity.mapSheetNum.in(failMapIds)))
|
.and(mapSheetLearn5kEntity.mapSheetNum.in(failMapIds)))
|
||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<String> getInferenceRunMapId(UUID uuid) {
|
||||||
|
return queryFactory
|
||||||
|
.select(mapInkx5kEntity.mapidNm)
|
||||||
|
.from(mapSheetLearnEntity)
|
||||||
|
.innerJoin(mapSheetLearn5kEntity)
|
||||||
|
.on(mapSheetLearn5kEntity.learn.id.eq(mapSheetLearnEntity.id))
|
||||||
|
.innerJoin(mapInkx5kEntity)
|
||||||
|
.on(
|
||||||
|
Expressions.booleanTemplate(
|
||||||
|
"function('regexp_match', {0}, '^[0-9]+$') is not null",
|
||||||
|
mapInkx5kEntity.mapidcdNo)
|
||||||
|
.and(
|
||||||
|
mapSheetLearn5kEntity.mapSheetNum.eq(
|
||||||
|
Expressions.numberTemplate(
|
||||||
|
Long.class, "cast({0} as long)", mapInkx5kEntity.mapidcdNo))))
|
||||||
|
.where(mapSheetLearnEntity.uuid.eq(uuid))
|
||||||
|
.groupBy(mapInkx5kEntity.mapidNm)
|
||||||
|
.fetch();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
|
|||||||
import com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity;
|
import com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity;
|
||||||
import com.querydsl.core.BooleanBuilder;
|
import com.querydsl.core.BooleanBuilder;
|
||||||
import com.querydsl.core.types.Projections;
|
import com.querydsl.core.types.Projections;
|
||||||
|
import com.querydsl.core.types.dsl.BooleanExpression;
|
||||||
import com.querydsl.core.types.dsl.CaseBuilder;
|
import com.querydsl.core.types.dsl.CaseBuilder;
|
||||||
import com.querydsl.core.types.dsl.Expressions;
|
import com.querydsl.core.types.dsl.Expressions;
|
||||||
import com.querydsl.core.types.dsl.NumberExpression;
|
import com.querydsl.core.types.dsl.NumberExpression;
|
||||||
@@ -360,42 +361,51 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
|||||||
@Override
|
@Override
|
||||||
public Page<Geom> getInferenceGeomList(UUID uuid, SearchGeoReq searchGeoReq) {
|
public Page<Geom> getInferenceGeomList(UUID uuid, SearchGeoReq searchGeoReq) {
|
||||||
Pageable pageable = searchGeoReq.toPageable();
|
Pageable pageable = searchGeoReq.toPageable();
|
||||||
BooleanBuilder builder = new BooleanBuilder();
|
BooleanBuilder where = new BooleanBuilder();
|
||||||
|
|
||||||
// analUid로 분석 정보 조회
|
// 1) 분석 엔티티 조회
|
||||||
MapSheetLearnEntity analEntity =
|
MapSheetLearnEntity analEntity =
|
||||||
queryFactory
|
queryFactory
|
||||||
.selectFrom(mapSheetLearnEntity)
|
.selectFrom(mapSheetLearnEntity)
|
||||||
.where(mapSheetLearnEntity.uuid.eq(uuid))
|
.where(mapSheetLearnEntity.uuid.eq(uuid))
|
||||||
.fetchOne();
|
.fetchOne();
|
||||||
|
|
||||||
if (Objects.isNull(analEntity)) {
|
if (analEntity == null) {
|
||||||
throw new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND);
|
throw new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 추론결과 id
|
// 2) where 조건
|
||||||
builder.and(mapSheetAnalInferenceEntity.learnId.eq(analEntity.getId()));
|
where.and(mapSheetAnalInferenceEntity.learnId.eq(analEntity.getId()));
|
||||||
|
|
||||||
// 기준년도 분류
|
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().isBlank()) {
|
||||||
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) {
|
where.and(
|
||||||
builder.and(
|
|
||||||
mapSheetAnalDataInferenceGeomEntity.classAfterCd.eq(
|
mapSheetAnalDataInferenceGeomEntity.classAfterCd.eq(
|
||||||
searchGeoReq.getTargetClass().toLowerCase()));
|
searchGeoReq.getTargetClass().toLowerCase()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 비교년도 분류
|
if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().isBlank()) {
|
||||||
if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")) {
|
where.and(
|
||||||
builder.and(
|
|
||||||
mapSheetAnalDataInferenceGeomEntity.classBeforeCd.eq(
|
mapSheetAnalDataInferenceGeomEntity.classBeforeCd.eq(
|
||||||
searchGeoReq.getCompareClass().toLowerCase()));
|
searchGeoReq.getCompareClass().toLowerCase()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 분석도엽
|
|
||||||
if (searchGeoReq.getMapSheetNum() != null) {
|
if (searchGeoReq.getMapSheetNum() != null) {
|
||||||
Long mapSheetNum = searchGeoReq.getMapSheetNum();
|
//
|
||||||
builder.and(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.like("%" + mapSheetNum + "%"));
|
// where.and(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(searchGeoReq.getMapSheetNum()));
|
||||||
|
where.and(
|
||||||
|
mapSheetAnalDataInferenceGeomEntity.mapSheetNum.like(
|
||||||
|
"%" + searchGeoReq.getMapSheetNum() + "%"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 3) inkx 조인 조건: JPQL/HQL에서 '~' 불가 → function('regexp_match', ...) 사용
|
||||||
|
BooleanExpression inkxIsNumeric =
|
||||||
|
Expressions.booleanTemplate(
|
||||||
|
"function('regexp_match', {0}, '^[0-9]+$') is not null", mapInkx5kEntity.mapidcdNo);
|
||||||
|
|
||||||
|
NumberExpression<Long> inkxNoAsLong =
|
||||||
|
Expressions.numberTemplate(Long.class, "cast({0} as long)", mapInkx5kEntity.mapidcdNo);
|
||||||
|
|
||||||
|
// 4) content
|
||||||
List<Geom> content =
|
List<Geom> content =
|
||||||
queryFactory
|
queryFactory
|
||||||
.select(
|
.select(
|
||||||
@@ -411,45 +421,32 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
|||||||
mapSheetAnalDataInferenceGeomEntity.classAfterCd,
|
mapSheetAnalDataInferenceGeomEntity.classAfterCd,
|
||||||
mapSheetAnalDataInferenceGeomEntity.classAfterProb,
|
mapSheetAnalDataInferenceGeomEntity.classAfterProb,
|
||||||
mapSheetAnalDataInferenceGeomEntity.mapSheetNum,
|
mapSheetAnalDataInferenceGeomEntity.mapSheetNum,
|
||||||
mapInkx5kEntity.mapidNm
|
mapInkx5kEntity.mapidNm))
|
||||||
// Expressions.stringTemplate(
|
|
||||||
// "ST_AsGeoJSON({0})",
|
|
||||||
// mapSheetAnalDataInferenceGeomEntity.geom),
|
|
||||||
// Expressions.stringTemplate(
|
|
||||||
// "ST_AsGeoJSON({0})",
|
|
||||||
// mapSheetAnalDataInferenceGeomEntity.geomCenter)
|
|
||||||
))
|
|
||||||
.from(mapSheetAnalInferenceEntity)
|
.from(mapSheetAnalInferenceEntity)
|
||||||
.join(mapSheetAnalDataInferenceEntity)
|
.join(mapSheetAnalDataInferenceEntity)
|
||||||
.on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
|
.on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
|
||||||
.join(mapSheetAnalDataInferenceGeomEntity)
|
.join(mapSheetAnalDataInferenceGeomEntity)
|
||||||
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
|
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
|
||||||
.join(mapInkx5kEntity)
|
.join(mapInkx5kEntity)
|
||||||
.on(
|
.on(inkxIsNumeric.and(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(inkxNoAsLong)))
|
||||||
mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(
|
.where(where)
|
||||||
Expressions.numberTemplate(
|
.orderBy(mapSheetAnalDataInferenceGeomEntity.geoUid.desc())
|
||||||
Long.class, "CAST({0} AS long)", mapInkx5kEntity.mapidcdNo)))
|
|
||||||
.where(builder)
|
|
||||||
.offset(pageable.getOffset())
|
.offset(pageable.getOffset())
|
||||||
.limit(pageable.getPageSize())
|
.limit(pageable.getPageSize())
|
||||||
.fetch();
|
.fetch();
|
||||||
|
|
||||||
long total =
|
// 5) total (조인 최소화 유지)
|
||||||
|
Long total =
|
||||||
queryFactory
|
queryFactory
|
||||||
.select(mapSheetAnalDataInferenceGeomEntity.geoUid)
|
.select(mapSheetAnalDataInferenceGeomEntity.geoUid.count())
|
||||||
.from(mapSheetAnalInferenceEntity)
|
.from(mapSheetAnalInferenceEntity)
|
||||||
.join(mapSheetAnalDataInferenceEntity)
|
.join(mapSheetAnalDataInferenceEntity)
|
||||||
.on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
|
.on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
|
||||||
.join(mapSheetAnalDataInferenceGeomEntity)
|
.join(mapSheetAnalDataInferenceGeomEntity)
|
||||||
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
|
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
|
||||||
.join(mapInkx5kEntity)
|
.where(where)
|
||||||
.on(
|
.fetchOne();
|
||||||
mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(
|
|
||||||
Expressions.numberTemplate(
|
|
||||||
Long.class, "CAST({0} AS long)", mapInkx5kEntity.mapidcdNo)))
|
|
||||||
.where(builder)
|
|
||||||
.fetchCount();
|
|
||||||
|
|
||||||
return new PageImpl<>(content, pageable, total);
|
return new PageImpl<>(content, pageable, total == null ? 0L : total);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import com.fasterxml.jackson.core.JsonProcessingException;
|
|||||||
import com.fasterxml.jackson.databind.JsonNode;
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
|
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
|
||||||
|
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.DetectSearchType;
|
||||||
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapScaleType;
|
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapScaleType;
|
||||||
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapSheetList;
|
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapSheetList;
|
||||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
|
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
|
||||||
@@ -111,6 +112,26 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ChangeDetectionDto.CogUrlDto getChangeDetectionCogUrl(ChangeDetectionDto.CogUrlReq req) {
|
public ChangeDetectionDto.CogUrlDto getChangeDetectionCogUrl(ChangeDetectionDto.CogUrlReq req) {
|
||||||
|
String mapSheetNum = req.getMapSheetNum();
|
||||||
|
|
||||||
|
if (req.getType().equals(DetectSearchType.MAPSHEET)
|
||||||
|
&& req.getScale().equals(MapScaleType.SCALE_50K)) {
|
||||||
|
mapSheetNum =
|
||||||
|
queryFactory
|
||||||
|
.select(mapSheetAnalDataInferenceEntity.mapSheetNum.stringValue())
|
||||||
|
.from(mapSheetAnalInferenceEntity)
|
||||||
|
.innerJoin(mapSheetAnalDataInferenceEntity)
|
||||||
|
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
|
||||||
|
.where(
|
||||||
|
mapSheetAnalInferenceEntity.uuid.eq(req.getUuid()),
|
||||||
|
mapSheetAnalDataInferenceEntity
|
||||||
|
.mapSheetNum
|
||||||
|
.stringValue()
|
||||||
|
.like("%" + req.getMapSheetNum() + "%"))
|
||||||
|
.orderBy(mapSheetAnalDataInferenceEntity.mapSheetNum.asc())
|
||||||
|
.fetchFirst();
|
||||||
|
}
|
||||||
|
|
||||||
ChangeDetectionDto.CogUrlData data =
|
ChangeDetectionDto.CogUrlData data =
|
||||||
queryFactory
|
queryFactory
|
||||||
.select(
|
.select(
|
||||||
@@ -128,7 +149,7 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
|
|||||||
.year
|
.year
|
||||||
.eq(req.getBeforeYear())
|
.eq(req.getBeforeYear())
|
||||||
.or(imageryEntity.year.eq(req.getAfterYear())),
|
.or(imageryEntity.year.eq(req.getAfterYear())),
|
||||||
imageryEntity.scene5k.eq(req.getMapSheetNum()))
|
imageryEntity.scene5k.eq(mapSheetNum))
|
||||||
.groupBy(mapInkx5kEntity.geom)
|
.groupBy(mapInkx5kEntity.geom)
|
||||||
.fetchOne();
|
.fetchOne();
|
||||||
|
|
||||||
@@ -191,6 +212,7 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
|
|||||||
.where(
|
.where(
|
||||||
mapSheetAnalInferenceEntity.uuid.eq(uuid),
|
mapSheetAnalInferenceEntity.uuid.eq(uuid),
|
||||||
mapScaleTypeAnalDataSearchExpression(scale, mapSheetNum))
|
mapScaleTypeAnalDataSearchExpression(scale, mapSheetNum))
|
||||||
|
.orderBy(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.asc())
|
||||||
.fetch();
|
.fetch();
|
||||||
|
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
@@ -313,7 +335,8 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
|
|||||||
.on(mapSheetAnalDataInferenceEntity.mapSheetNum.stringValue().eq(mapInkx5kEntity.mapidcdNo))
|
.on(mapSheetAnalDataInferenceEntity.mapSheetNum.stringValue().eq(mapInkx5kEntity.mapidcdNo))
|
||||||
.innerJoin(mapInkx5kEntity.mapInkx50k, mapInkx50kEntity)
|
.innerJoin(mapInkx5kEntity.mapInkx50k, mapInkx50kEntity)
|
||||||
.where(mapSheetAnalInferenceEntity.uuid.eq(uuid))
|
.where(mapSheetAnalInferenceEntity.uuid.eq(uuid))
|
||||||
.orderBy(mapInkx5kEntity.mapidcdNo.asc())
|
.groupBy(mapInkx50kEntity.mapidcdNo, mapInkx50kEntity.mapidNm)
|
||||||
|
.orderBy(mapInkx50kEntity.mapidcdNo.asc())
|
||||||
.fetch();
|
.fetch();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -16,4 +16,6 @@ public interface TrainingDataReviewJobRepositoryCustom {
|
|||||||
void assignReviewerBatch(List<UUID> assignmentUids, String reviewerId);
|
void assignReviewerBatch(List<UUID> assignmentUids, String reviewerId);
|
||||||
|
|
||||||
Tasks findAssignmentTask(String assignmentUid);
|
Tasks findAssignmentTask(String assignmentUid);
|
||||||
|
|
||||||
|
void updateGeomUidTestState(List<Long> geomUids);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package com.kamco.cd.kamcoback.postgres.repository.scheduler;
|
|||||||
|
|
||||||
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity;
|
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity;
|
||||||
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingInspectorEntity.labelingInspectorEntity;
|
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingInspectorEntity.labelingInspectorEntity;
|
||||||
|
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
|
||||||
|
|
||||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InspectState;
|
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InspectState;
|
||||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState;
|
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState;
|
||||||
@@ -99,6 +100,7 @@ public class TrainingDataReviewJobRepositoryImpl extends QuerydslRepositorySuppo
|
|||||||
.update(labelingAssignmentEntity)
|
.update(labelingAssignmentEntity)
|
||||||
.set(labelingAssignmentEntity.inspectorUid, reviewerId)
|
.set(labelingAssignmentEntity.inspectorUid, reviewerId)
|
||||||
.set(labelingAssignmentEntity.inspectState, InspectState.UNCONFIRM.getId())
|
.set(labelingAssignmentEntity.inspectState, InspectState.UNCONFIRM.getId())
|
||||||
|
.set(labelingAssignmentEntity.modifiedDate, ZonedDateTime.now())
|
||||||
.where(labelingAssignmentEntity.assignmentUid.eq(assignmentUid))
|
.where(labelingAssignmentEntity.assignmentUid.eq(assignmentUid))
|
||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
@@ -115,6 +117,7 @@ public class TrainingDataReviewJobRepositoryImpl extends QuerydslRepositorySuppo
|
|||||||
.update(labelingAssignmentEntity)
|
.update(labelingAssignmentEntity)
|
||||||
.set(labelingAssignmentEntity.inspectorUid, reviewerId)
|
.set(labelingAssignmentEntity.inspectorUid, reviewerId)
|
||||||
.set(labelingAssignmentEntity.inspectState, InspectState.UNCONFIRM.getId())
|
.set(labelingAssignmentEntity.inspectState, InspectState.UNCONFIRM.getId())
|
||||||
|
.set(labelingAssignmentEntity.modifiedDate, ZonedDateTime.now())
|
||||||
.where(labelingAssignmentEntity.assignmentUid.in(assignmentUids))
|
.where(labelingAssignmentEntity.assignmentUid.in(assignmentUids))
|
||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
@@ -132,4 +135,14 @@ public class TrainingDataReviewJobRepositoryImpl extends QuerydslRepositorySuppo
|
|||||||
.where(labelingAssignmentEntity.assignmentUid.eq(UUID.fromString(assignmentUid)))
|
.where(labelingAssignmentEntity.assignmentUid.eq(UUID.fromString(assignmentUid)))
|
||||||
.fetchOne();
|
.fetchOne();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void updateGeomUidTestState(List<Long> geomUids) {
|
||||||
|
queryFactory
|
||||||
|
.update(mapSheetAnalDataInferenceGeomEntity)
|
||||||
|
.set(mapSheetAnalDataInferenceGeomEntity.testState, InspectState.UNCONFIRM.getId())
|
||||||
|
.set(mapSheetAnalDataInferenceGeomEntity.updatedDttm, ZonedDateTime.now())
|
||||||
|
.where(mapSheetAnalDataInferenceGeomEntity.geoUid.in(geomUids))
|
||||||
|
.execute();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -209,6 +209,15 @@ public class TrainingDataReviewRepositoryImpl extends QuerydslRepositorySupport
|
|||||||
@Override
|
@Override
|
||||||
public void updateReviewPolygonClass(
|
public void updateReviewPolygonClass(
|
||||||
Long inferenceGeomUid, Geometry geometry, Properties properties, String status) {
|
Long inferenceGeomUid, Geometry geometry, Properties properties, String status) {
|
||||||
|
|
||||||
|
// inference_geom 테이블에 상태 업데이트
|
||||||
|
queryFactory
|
||||||
|
.update(mapSheetAnalDataInferenceGeomEntity)
|
||||||
|
.set(mapSheetAnalDataInferenceGeomEntity.testStateDttm, ZonedDateTime.now())
|
||||||
|
.set(mapSheetAnalDataInferenceGeomEntity.testState, status)
|
||||||
|
.where(mapSheetAnalDataInferenceGeomEntity.geoUid.eq(inferenceGeomUid))
|
||||||
|
.execute();
|
||||||
|
|
||||||
// inference_geom 테이블 정보 가져오기
|
// inference_geom 테이블 정보 가져오기
|
||||||
MapSheetAnalDataInferenceGeomEntity entity =
|
MapSheetAnalDataInferenceGeomEntity entity =
|
||||||
queryFactory
|
queryFactory
|
||||||
|
|||||||
@@ -80,8 +80,10 @@ public class TrainingDataReviewJobService {
|
|||||||
|
|
||||||
List<UUID> assignmentUids =
|
List<UUID> assignmentUids =
|
||||||
assignedTasks.stream().map(Tasks::getAssignmentUid).toList();
|
assignedTasks.stream().map(Tasks::getAssignmentUid).toList();
|
||||||
|
|
||||||
trainingDataReviewJobCoreService.assignReviewerBatch(assignmentUids, reviewerId);
|
trainingDataReviewJobCoreService.assignReviewerBatch(assignmentUids, reviewerId);
|
||||||
|
|
||||||
|
List<Long> geomUids = assignedTasks.stream().map(Tasks::getInferenceUid).toList();
|
||||||
|
trainingDataReviewJobCoreService.updateGeomUidTestState(geomUids);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
@@ -127,5 +129,9 @@ public class TrainingDataReviewJobService {
|
|||||||
trainingDataReviewJobCoreService.lockInspectors(analUid, order);
|
trainingDataReviewJobCoreService.lockInspectors(analUid, order);
|
||||||
|
|
||||||
trainingDataReviewJobCoreService.assignReviewer(task.getAssignmentUid(), order.getFirst());
|
trainingDataReviewJobCoreService.assignReviewer(task.getAssignmentUid(), order.getFirst());
|
||||||
|
|
||||||
|
List<Long> geomUids = new ArrayList<>();
|
||||||
|
geomUids.add(task.getInferenceUid());
|
||||||
|
trainingDataReviewJobCoreService.updateGeomUidTestState(geomUids);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user