Merge remote-tracking branch 'origin/feat/infer_dev_260107' into feat/infer_dev_260107

This commit is contained in:
DanielLee
2026-01-14 16:48:07 +09:00
6 changed files with 252 additions and 0 deletions

View File

@@ -307,4 +307,64 @@ public class InferenceResultApiController {
String uuid) { String uuid) {
return ApiResponseDto.ok(inferenceResultService.getInferenceResultInfo(uuid)); return ApiResponseDto.ok(inferenceResultService.getInferenceResultInfo(uuid));
} }
@Operation(summary = "추론결과 분류별 탐지 건수", description = "추론결과 분류별 탐지 건수")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "검색 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = InferenceDetailDto.AnalResSummary.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@GetMapping("/infer-class-count")
public ApiResponseDto<List<InferenceDetailDto.Dashboard>> getInferenceClassCountList(
@Parameter(description = "회차 uuid", example = "8584e8d4-53b3-4582-bde2-28a81495a626")
@RequestParam
String uuid) {
return ApiResponseDto.ok(inferenceResultService.getInferenceClassCountList(uuid));
}
@Operation(summary = "추론관리 분석결과 상세 목록", description = "추론관리 분석결과 상세 목록 geojson 데이터 조회")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "검색 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Page.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@GetMapping("/geom-list")
public ApiResponseDto<Page<InferenceDetailDto.Geom>> getInferenceGeomList(
@Parameter(description = "회차 uuid", example = "8584e8d4-53b3-4582-bde2-28a81495a626")
@RequestParam(required = true)
String uuid,
@Parameter(description = "기준년도 분류", example = "land") @RequestParam(required = false)
String targetClass,
@Parameter(description = "비교년도 분류", example = "waste") @RequestParam(required = false)
String compareClass,
@Parameter(description = "5000:1 도엽번호 37801011,37801012") @RequestParam(required = false)
List<Long> mapSheetNum,
@Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0")
int page,
@Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20")
int size,
@Parameter(description = "정렬 조건 (형식: 필드명,방향)", example = "name,asc")
@RequestParam(required = false)
String sort) {
InferenceDetailDto.SearchGeoReq searchGeoReq =
new InferenceDetailDto.SearchGeoReq(
targetClass, compareClass, mapSheetNum, page, size, sort);
Page<InferenceDetailDto.Geom> geomList =
inferenceResultService.getInferenceGeomList(uuid, searchGeoReq);
return ApiResponseDto.ok(geomList);
}
} }

View File

@@ -294,8 +294,10 @@ public class InferenceDetailDto {
@Getter @Getter
public static class Geom { public static class Geom {
UUID uuid;
Integer compareYyyy; Integer compareYyyy;
Integer targetYyyy; Integer targetYyyy;
Double cdProb;
String classBeforeCd; String classBeforeCd;
String classBeforeName; String classBeforeName;
Double classBeforeProb; Double classBeforeProb;
@@ -303,23 +305,29 @@ public class InferenceDetailDto {
String classAfterName; String classAfterName;
Double classAfterProb; Double classAfterProb;
Long mapSheetNum; Long mapSheetNum;
String mapSheetName;
@JsonIgnore String gemoStr; @JsonIgnore String gemoStr;
@JsonIgnore String geomCenterStr; @JsonIgnore String geomCenterStr;
JsonNode gemo; JsonNode gemo;
JsonNode geomCenter; JsonNode geomCenter;
public Geom( public Geom(
UUID uuid,
Integer compareYyyy, Integer compareYyyy,
Integer targetYyyy, Integer targetYyyy,
Double cdProb,
String classBeforeCd, String classBeforeCd,
Double classBeforeProb, Double classBeforeProb,
String classAfterCd, String classAfterCd,
Double classAfterProb, Double classAfterProb,
Long mapSheetNum, Long mapSheetNum,
String mapSheetName,
String gemoStr, String gemoStr,
String geomCenterStr) { String geomCenterStr) {
this.uuid = uuid;
this.compareYyyy = compareYyyy; this.compareYyyy = compareYyyy;
this.targetYyyy = targetYyyy; this.targetYyyy = targetYyyy;
this.cdProb = cdProb;
this.classBeforeCd = classBeforeCd; this.classBeforeCd = classBeforeCd;
this.classBeforeName = DetectionClassification.fromString(classBeforeCd).getDesc(); this.classBeforeName = DetectionClassification.fromString(classBeforeCd).getDesc();
this.classBeforeProb = classBeforeProb; this.classBeforeProb = classBeforeProb;
@@ -327,6 +335,7 @@ public class InferenceDetailDto {
this.classAfterName = DetectionClassification.fromString(classAfterCd).getDesc(); this.classAfterName = DetectionClassification.fromString(classAfterCd).getDesc();
this.classAfterProb = classAfterProb; this.classAfterProb = classAfterProb;
this.mapSheetNum = mapSheetNum; this.mapSheetNum = mapSheetNum;
this.mapSheetName = mapSheetName;
this.gemoStr = gemoStr; this.gemoStr = gemoStr;
this.geomCenterStr = geomCenterStr; this.geomCenterStr = geomCenterStr;

View File

@@ -10,7 +10,9 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Detail; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Detail;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Geom;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.SearchGeoReq;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.DetectOption; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.DetectOption;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
@@ -486,4 +488,12 @@ public class InferenceResultService {
public AnalResultInfo getInferenceResultInfo(String uuid) { public AnalResultInfo getInferenceResultInfo(String uuid) {
return inferenceResultCoreService.getInferenceResultInfo(uuid); return inferenceResultCoreService.getInferenceResultInfo(uuid);
} }
public List<Dashboard> getInferenceClassCountList(String uuid) {
return inferenceResultCoreService.getInferenceClassCountList(uuid);
}
public Page<Geom> getInferenceGeomList(String uuid, SearchGeoReq searchGeoReq) {
return inferenceResultCoreService.getInferenceGeomList(uuid, searchGeoReq);
}
} }

View File

@@ -5,8 +5,10 @@ import com.kamco.cd.kamcoback.common.utils.UserUtil;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Geom;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.InferenceBatchSheet; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.InferenceBatchSheet;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.SearchGeoReq;
import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto; import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
@@ -388,4 +390,12 @@ public class InferenceResultCoreService {
public AnalResultInfo getInferenceResultInfo(String uuid) { public AnalResultInfo getInferenceResultInfo(String uuid) {
return mapSheetLearnRepository.getInferenceResultInfo(uuid); return mapSheetLearnRepository.getInferenceResultInfo(uuid);
} }
public List<Dashboard> getInferenceClassCountList(String uuid) {
return mapSheetLearnRepository.getInferenceClassCountList(uuid);
}
public Page<Geom> getInferenceGeomList(String uuid, SearchGeoReq searchGeoReq) {
return mapSheetLearnRepository.getInferenceGeomList(uuid, searchGeoReq);
}
} }

View File

@@ -1,6 +1,9 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference; package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Geom;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.SearchGeoReq;
import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto; import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
@@ -30,4 +33,8 @@ public interface MapSheetLearnRepositoryCustom {
Integer getLearnStage(Integer compareYear, Integer targetYear); Integer getLearnStage(Integer compareYear, Integer targetYear);
AnalResultInfo getInferenceResultInfo(String uuid); AnalResultInfo getInferenceResultInfo(String uuid);
List<Dashboard> getInferenceClassCountList(String uuid);
Page<Geom> getInferenceGeomList(String uuid, SearchGeoReq searchGeoReq);
} }

View File

@@ -1,24 +1,35 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference; package com.kamco.cd.kamcoback.postgres.repository.Inference;
import static com.kamco.cd.kamcoback.postgres.entity.QGpuMetricEntity.gpuMetricEntity; import static com.kamco.cd.kamcoback.postgres.entity.QGpuMetricEntity.gpuMetricEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity.mapInkx5kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QSystemMetricEntity.systemMetricEntity; import static com.kamco.cd.kamcoback.postgres.entity.QSystemMetricEntity.systemMetricEntity;
import com.kamco.cd.kamcoback.common.utils.DateRange; import com.kamco.cd.kamcoback.common.utils.DateRange;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Geom;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.SearchGeoReq;
import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto; import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
import com.kamco.cd.kamcoback.model.service.ModelMngService; import com.kamco.cd.kamcoback.model.service.ModelMngService;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity; import com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity;
import com.querydsl.core.BooleanBuilder; import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Projections; import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.CaseBuilder; import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberExpression; import com.querydsl.core.types.dsl.NumberExpression;
import com.querydsl.jpa.impl.JPAQueryFactory; import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityNotFoundException;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.UUID; import java.util.UUID;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
@@ -293,4 +304,149 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
.where(mapSheetLearnEntity.uuid.eq(UUID.fromString(uuid))) .where(mapSheetLearnEntity.uuid.eq(UUID.fromString(uuid)))
.fetchOne(); .fetchOne();
} }
@Override
public List<Dashboard> getInferenceClassCountList(String uuid) {
// analUid로 분석 정보 조회
MapSheetAnalInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalInferenceEntity)
.where(mapSheetAnalInferenceEntity.uuid.eq(UUID.fromString(uuid)))
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("MapSheetAnalInferenceEntity not found for analUid: ");
}
return queryFactory
.select(
Projections.constructor(
Dashboard.class,
mapSheetAnalSttcEntity.id.classAfterCd,
mapSheetAnalSttcEntity.classAfterCnt.sum()))
.from(mapSheetAnalSttcEntity)
.where(mapSheetAnalSttcEntity.id.analUid.eq(analEntity.getId()))
.groupBy(mapSheetAnalSttcEntity.id.classAfterCd)
.orderBy(mapSheetAnalSttcEntity.id.classAfterCd.asc())
.fetch();
}
/**
* 분석결과 상세 목록
*
* @param searchGeoReq
* @return
*/
@Override
public Page<Geom> getInferenceGeomList(String uuid, SearchGeoReq searchGeoReq) {
Pageable pageable = searchGeoReq.toPageable();
BooleanBuilder builder = new BooleanBuilder();
// analUid로 분석 정보 조회
MapSheetAnalInferenceEntity analEntity =
queryFactory
.selectFrom(mapSheetAnalInferenceEntity)
.where(mapSheetAnalInferenceEntity.uuid.eq(UUID.fromString(uuid)))
.fetchOne();
if (Objects.isNull(analEntity)) {
throw new EntityNotFoundException("MapSheetAnalInferenceEntity not found for analUid: ");
}
// 추론결과 id
builder.and(mapSheetAnalInferenceEntity.id.eq(analEntity.getId()));
// 기준년도 분류
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) {
builder.and(
mapSheetAnalDataInferenceGeomEntity
.classAfterCd
.toLowerCase()
.eq(searchGeoReq.getTargetClass().toLowerCase()));
}
// 비교년도 분류
if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")) {
builder.and(
mapSheetAnalDataInferenceGeomEntity
.classBeforeCd
.toLowerCase()
.eq(searchGeoReq.getCompareClass().toLowerCase()));
}
// 분석도엽
if (searchGeoReq.getMapSheetNum() != null && !searchGeoReq.getMapSheetNum().isEmpty()) {
List<Long> mapSheetNum = searchGeoReq.getMapSheetNum();
builder.and(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.in(mapSheetNum));
}
List<Geom> content =
queryFactory
.select(
Projections.constructor(
Geom.class,
mapSheetAnalDataInferenceGeomEntity.uuid,
mapSheetAnalDataInferenceGeomEntity.compareYyyy,
mapSheetAnalDataInferenceGeomEntity.targetYyyy,
mapSheetAnalDataInferenceGeomEntity.cdProb,
mapSheetAnalDataInferenceGeomEntity.classBeforeCd,
mapSheetAnalDataInferenceGeomEntity.classBeforeProb,
mapSheetAnalDataInferenceGeomEntity.classAfterCd,
mapSheetAnalDataInferenceGeomEntity.classAfterProb,
mapSheetAnalDataInferenceGeomEntity.mapSheetNum,
mapInkx5kEntity.mapidNm,
Expressions.stringTemplate(
"ST_AsGeoJSON({0})", mapSheetAnalDataInferenceGeomEntity.geom),
Expressions.stringTemplate(
"ST_AsGeoJSON({0})", mapSheetAnalDataInferenceGeomEntity.geomCenter)))
.from(mapSheetAnalInferenceEntity)
.join(mapSheetAnalDataInferenceGeomEntity)
.on(
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(
mapSheetAnalInferenceEntity.compareYyyy),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(
mapSheetAnalInferenceEntity.targetYyyy),
mapSheetAnalDataInferenceGeomEntity.stage.eq(mapSheetAnalInferenceEntity.stage))
.join(mapInkx5kEntity)
.on(
mapSheetAnalDataInferenceGeomEntity
.mapSheetNum
.stringValue()
.eq(mapInkx5kEntity.mapidcdNo))
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.fetch();
long total =
queryFactory
.select(mapSheetAnalDataInferenceGeomEntity.geoUid)
.from(mapSheetAnalInferenceEntity)
.join(mapSheetAnalDataInferenceGeomEntity)
.on(
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(
mapSheetAnalInferenceEntity.compareYyyy),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(
mapSheetAnalInferenceEntity.targetYyyy),
mapSheetAnalDataInferenceGeomEntity.stage.eq(mapSheetAnalInferenceEntity.stage))
.from(mapSheetAnalInferenceEntity)
.join(mapSheetAnalDataInferenceGeomEntity)
.on(
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(
mapSheetAnalInferenceEntity.compareYyyy),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(
mapSheetAnalInferenceEntity.targetYyyy),
mapSheetAnalDataInferenceGeomEntity.stage.eq(mapSheetAnalInferenceEntity.stage))
.join(mapInkx5kEntity)
.on(
mapSheetAnalDataInferenceGeomEntity
.mapSheetNum
.stringValue()
.eq(mapInkx5kEntity.mapidcdNo))
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total);
}
} }