[KC-108] ai api batch 작업중
This commit is contained in:
@@ -6,6 +6,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.InferenceBatchSheet;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
|
||||
@@ -24,6 +25,7 @@ import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.EntityNotFoundException;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
@@ -63,11 +65,11 @@ public class InferenceResultCoreService {
|
||||
*/
|
||||
public UUID saveInferenceInfo(InferenceResultDto.RegReq req) {
|
||||
String mapSheetName =
|
||||
req.getMapSheetNum().get(0).getMapSheetName() + " 외 " + req.getMapSheetNum().size() + "건";
|
||||
req.getMapSheetNum().get(0).getMapSheetName() + " 외 " + req.getMapSheetNum().size() + "건";
|
||||
|
||||
if (req.getMapSheetNum().size() == 1) {
|
||||
mapSheetName =
|
||||
req.getMapSheetNum().get(0).getMapSheetName() + " " + req.getMapSheetNum().size() + "건";
|
||||
req.getMapSheetNum().get(0).getMapSheetName() + " " + req.getMapSheetNum().size() + "건";
|
||||
}
|
||||
|
||||
MapSheetLearnEntity mapSheetLearnEntity = new MapSheetLearnEntity();
|
||||
@@ -115,22 +117,22 @@ public class InferenceResultCoreService {
|
||||
|
||||
// 청크 번호 추출 in 조건 만들기
|
||||
List<String> chunkNums =
|
||||
buffer.stream().map(e -> String.valueOf(e.getMapSheetNum())).distinct().toList();
|
||||
buffer.stream().map(e -> String.valueOf(e.getMapSheetNum())).distinct().toList();
|
||||
|
||||
// 추론 제외
|
||||
List<MapInkx5kEntity> usedEntities =
|
||||
mapInkx5kRepository.findByMapSheetNumInAndUseInference(chunkNums, CommonUseStatus.USE);
|
||||
mapInkx5kRepository.findByMapSheetNumInAndUseInference(chunkNums, CommonUseStatus.USE);
|
||||
|
||||
// TODO 추론 제외 했으면 파일 있는지도 확인 해야함
|
||||
// 조회 결과에서 번호만 Set으로
|
||||
Set<String> usedSet =
|
||||
usedEntities.stream()
|
||||
.map(MapInkx5kEntity::getMapidcdNo)
|
||||
.collect(java.util.stream.Collectors.toSet());
|
||||
usedEntities.stream()
|
||||
.map(MapInkx5kEntity::getMapidcdNo)
|
||||
.collect(java.util.stream.Collectors.toSet());
|
||||
|
||||
// 필터 후 저장
|
||||
List<MapSheetLearn5kEntity> toSave =
|
||||
buffer.stream().filter(e -> usedSet.contains(String.valueOf(e.getMapSheetNum()))).toList();
|
||||
buffer.stream().filter(e -> usedSet.contains(String.valueOf(e.getMapSheetNum()))).toList();
|
||||
|
||||
if (!toSave.isEmpty()) {
|
||||
mapSheetLearn5kRepository.saveAll(toSave);
|
||||
@@ -150,9 +152,9 @@ public class InferenceResultCoreService {
|
||||
*/
|
||||
public InferenceDetailDto.AnalResSummary getInferenceResultSummary(Long id) {
|
||||
InferenceDetailDto.AnalResSummary summary =
|
||||
mapSheetAnalDataRepository
|
||||
.getInferenceResultSummary(id)
|
||||
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
|
||||
mapSheetAnalDataRepository
|
||||
.getInferenceResultSummary(id)
|
||||
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
|
||||
return summary;
|
||||
}
|
||||
|
||||
@@ -173,7 +175,7 @@ public class InferenceResultCoreService {
|
||||
* @return
|
||||
*/
|
||||
public Page<InferenceDetailDto.Geom> getInferenceResultGeomList(
|
||||
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
|
||||
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
|
||||
return mapSheetAnalDataRepository.getInferenceGeomList(id, searchGeoReq);
|
||||
}
|
||||
|
||||
@@ -185,16 +187,16 @@ public class InferenceResultCoreService {
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom(
|
||||
@NotNull Long analyId, InferenceDetailDto.SearchGeoReq searchReq) {
|
||||
@NotNull Long analyId, InferenceDetailDto.SearchGeoReq searchReq) {
|
||||
// 분석 ID 에 해당하는 dataids를 가져온다.
|
||||
List<Long> dataIds =
|
||||
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
|
||||
.mapToLong(MapSheetAnalDataInferenceEntity::getId)
|
||||
.boxed()
|
||||
.toList();
|
||||
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
|
||||
.mapToLong(MapSheetAnalDataInferenceEntity::getId)
|
||||
.boxed()
|
||||
.toList();
|
||||
// 해당데이터의 폴리곤데이터를 가져온다
|
||||
Page<MapSheetAnalDataInferenceGeomEntity> mapSheetAnalDataGeomEntities =
|
||||
mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq);
|
||||
mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq);
|
||||
return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataInferenceGeomEntity::toEntity);
|
||||
}
|
||||
|
||||
@@ -211,34 +213,64 @@ public class InferenceResultCoreService {
|
||||
@Transactional(readOnly = true)
|
||||
public List<MapSheet> listGetScenes5k(Long analyId) {
|
||||
List<String> sceneCodes =
|
||||
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
|
||||
.mapToLong(MapSheetAnalDataInferenceEntity::getMapSheetNum)
|
||||
.mapToObj(String::valueOf)
|
||||
.toList();
|
||||
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
|
||||
.mapToLong(MapSheetAnalDataInferenceEntity::getMapSheetNum)
|
||||
.mapToObj(String::valueOf)
|
||||
.toList();
|
||||
|
||||
return mapInkx5kRepository.listGetScenes5k(sceneCodes).stream()
|
||||
.map(MapInkx5kEntity::toEntity)
|
||||
.toList();
|
||||
.map(MapInkx5kEntity::toEntity)
|
||||
.toList();
|
||||
}
|
||||
|
||||
public void update(SaveInferenceAiDto request) {
|
||||
MapSheetLearnEntity entity =
|
||||
mapSheetLearnRepository
|
||||
.getInferenceResultByUuid(request.getUuid())
|
||||
.orElseThrow(() -> new EntityNotFoundException());
|
||||
mapSheetLearnRepository
|
||||
.getInferenceResultByUuid(request.getUuid())
|
||||
.orElseThrow(() -> new EntityNotFoundException());
|
||||
|
||||
if (request.getType().equals("M1")) {
|
||||
entity.setM1ModelBatchId(request.getBatchId());
|
||||
|
||||
if (request.getModelStartDttm() != null) {
|
||||
entity.setM1ModelStartDttm(request.getModelStartDttm());
|
||||
}
|
||||
|
||||
if (request.getModelEndDttm() != null) {
|
||||
entity.setM1ModelEndDttm(request.getModelEndDttm());
|
||||
}
|
||||
|
||||
} else if (request.getType().equals("M2")) {
|
||||
entity.setM2ModelBatchId(request.getBatchId());
|
||||
|
||||
if (request.getModelStartDttm() != null) {
|
||||
entity.setM2ModelStartDttm(request.getModelStartDttm());
|
||||
}
|
||||
|
||||
if (request.getModelEndDttm() != null) {
|
||||
entity.setM2ModelEndDttm(request.getModelEndDttm());
|
||||
}
|
||||
|
||||
} else if (request.getType().equals("M3")) {
|
||||
entity.setM3ModelBatchId(request.getBatchId());
|
||||
|
||||
if (request.getModelStartDttm() != null) {
|
||||
entity.setM3ModelStartDttm(request.getModelStartDttm());
|
||||
}
|
||||
|
||||
if (request.getModelEndDttm() != null) {
|
||||
entity.setM3ModelEndDttm(request.getModelEndDttm());
|
||||
}
|
||||
}
|
||||
|
||||
if (request.getInferStartDttm() != null) {
|
||||
entity.setInferStartDttm(request.getInferStartDttm());
|
||||
}
|
||||
|
||||
if (request.getInferEndDttm() != null) {
|
||||
entity.setInferEndDttm(request.getInferEndDttm());
|
||||
}
|
||||
|
||||
if (request.getModelComparePath() != null) {
|
||||
entity.setModelComparePath(request.getModelComparePath());
|
||||
}
|
||||
@@ -247,8 +279,24 @@ public class InferenceResultCoreService {
|
||||
entity.setModelTargetPath(request.getModelTargetPath());
|
||||
}
|
||||
|
||||
entity.setRunningModelType(request.getType());
|
||||
entity.setStatus(request.getStatus());
|
||||
if (request.getDetectEndCnt() != null) {
|
||||
entity.setDetectEndCnt(request.getDetectEndCnt());
|
||||
}
|
||||
|
||||
if (request.getType() != null) {
|
||||
entity.setRunningModelType(request.getType());
|
||||
}
|
||||
|
||||
if (request.getStatus() != null) {
|
||||
entity.setStatus(request.getStatus());
|
||||
}
|
||||
|
||||
if (request.getUpdateUid() != null) {
|
||||
entity.setUpdatedUid(request.getUpdateUid());
|
||||
}
|
||||
|
||||
entity.setUpdatedDttm(ZonedDateTime.now());
|
||||
|
||||
}
|
||||
|
||||
public List<InferenceServerStatusDto> getInferenceServerStatusList() {
|
||||
@@ -257,23 +305,26 @@ public class InferenceResultCoreService {
|
||||
|
||||
public InferenceBatchSheet getInferenceResultByStatus(String status) {
|
||||
MapSheetLearnEntity entity =
|
||||
mapSheetLearnRepository
|
||||
.getInferenceResultByStatus(status)
|
||||
.orElseThrow(() -> new EntityNotFoundException(status));
|
||||
mapSheetLearnRepository
|
||||
.getInferenceResultByStatus(status)
|
||||
.orElseThrow(() -> new EntityNotFoundException(status));
|
||||
|
||||
InferenceBatchSheet inferenceBatchSheet = new InferenceBatchSheet();
|
||||
inferenceBatchSheet.setId(entity.getId());
|
||||
inferenceBatchSheet.setUuid(entity.getUuid());
|
||||
inferenceBatchSheet.setM1BatchId(entity.getM1ModelBatchId());
|
||||
inferenceBatchSheet.setM2BatchId(entity.getM2ModelBatchId());
|
||||
inferenceBatchSheet.setM3BatchId(entity.getM3ModelBatchId());
|
||||
inferenceBatchSheet.setStatus(entity.getStatus());
|
||||
inferenceBatchSheet.setRunningModelType(entity.getRunningModelType());
|
||||
inferenceBatchSheet.setM1ModelUuid(entity.getM1ModelUuid());
|
||||
inferenceBatchSheet.setM2ModelUuid(entity.getM2ModelUuid());
|
||||
inferenceBatchSheet.setM3ModelUuid(entity.getM3ModelUuid());
|
||||
return inferenceBatchSheet;
|
||||
}
|
||||
|
||||
public SaveInferenceAiDto getInferenceAiResultById(Long id) {
|
||||
|
||||
return null;
|
||||
public InferenceProgressDto getInferenceAiResultById(Long id, String type, UUID modelUuid) {
|
||||
return mapSheetLearnRepository.getInferenceAiResultById(id, modelUuid);
|
||||
}
|
||||
|
||||
public InferenceStatusDetailDto getInferenceStatus(UUID uuid) {
|
||||
|
||||
@@ -25,9 +25,9 @@ public class MapSheetLearnEntity {
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_learn_id_gen")
|
||||
@SequenceGenerator(
|
||||
name = "tb_map_sheet_learn_id_gen",
|
||||
sequenceName = "tb_map_sheet_learn_uid",
|
||||
allocationSize = 1)
|
||||
name = "tb_map_sheet_learn_id_gen",
|
||||
sequenceName = "tb_map_sheet_learn_uid",
|
||||
allocationSize = 1)
|
||||
@Column(name = "id", nullable = false)
|
||||
private Long id;
|
||||
|
||||
@@ -124,17 +124,30 @@ public class MapSheetLearnEntity {
|
||||
@Column(name = "model_target_path")
|
||||
private String modelTargetPath;
|
||||
|
||||
@Column(name = "m1_model_start_dttm")
|
||||
private ZonedDateTime m1ModelStartDttm;
|
||||
@Column(name = "m2_model_start_dttm")
|
||||
private ZonedDateTime m2ModelStartDttm;
|
||||
@Column(name = "m3_model_start_dttm")
|
||||
private ZonedDateTime m3ModelStartDttm;
|
||||
@Column(name = "m1_model_end_dttm")
|
||||
private ZonedDateTime m1ModelEndDttm;
|
||||
@Column(name = "m2_model_end_dttm")
|
||||
private ZonedDateTime m2ModelEndDttm;
|
||||
@Column(name = "m3_model_end_dttm")
|
||||
private ZonedDateTime m3ModelEndDttm;
|
||||
|
||||
public InferenceResultDto.ResultList toDto() {
|
||||
return new InferenceResultDto.ResultList(
|
||||
this.uuid,
|
||||
this.title,
|
||||
this.status,
|
||||
this.mapSheetCnt,
|
||||
this.detectingCnt,
|
||||
this.inferStartDttm,
|
||||
this.inferEndDttm,
|
||||
this.elapsedTime,
|
||||
this.applyYn,
|
||||
this.applyDttm);
|
||||
this.uuid,
|
||||
this.title,
|
||||
this.status,
|
||||
this.mapSheetCnt,
|
||||
this.detectingCnt,
|
||||
this.inferStartDttm,
|
||||
this.inferEndDttm,
|
||||
this.elapsedTime,
|
||||
this.applyYn,
|
||||
this.applyDttm);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ public interface MapSheetLearnRepositoryCustom {
|
||||
|
||||
Optional<MapSheetLearnEntity> getInferenceResultByStatus(String status);
|
||||
|
||||
Optional<InferenceProgressDto> getInferenceAiResultById(Long id);
|
||||
InferenceProgressDto getInferenceAiResultById(Long id, UUID modelUuid);
|
||||
|
||||
public InferenceStatusDetailDto getInferenceStatus(UUID uuid);
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
BooleanBuilder builder = new BooleanBuilder();
|
||||
|
||||
NumberExpression<Integer> statusOrder =
|
||||
new CaseBuilder().when(mapSheetLearnEntity.status.eq("Y")).then(0).otherwise(1);
|
||||
new CaseBuilder().when(mapSheetLearnEntity.status.eq("Y")).then(0).otherwise(1);
|
||||
|
||||
// 국유인 반영 여부
|
||||
if (StringUtils.isNotBlank(req.getApplyYn())) {
|
||||
@@ -54,10 +54,10 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
// 국유인 반영일
|
||||
if (req.getStrtDttm() != null && req.getEndDttm() != null) {
|
||||
builder.and(
|
||||
mapSheetLearnEntity
|
||||
.applyDttm
|
||||
.goe(DateRange.start(req.getStrtDttm()))
|
||||
.and(mapSheetLearnEntity.applyDttm.lt(DateRange.end(req.getEndDttm()))));
|
||||
mapSheetLearnEntity
|
||||
.applyDttm
|
||||
.goe(DateRange.start(req.getStrtDttm()))
|
||||
.and(mapSheetLearnEntity.applyDttm.lt(DateRange.end(req.getEndDttm()))));
|
||||
}
|
||||
|
||||
// 제목
|
||||
@@ -66,21 +66,21 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
}
|
||||
|
||||
List<MapSheetLearnEntity> content =
|
||||
queryFactory
|
||||
.select(mapSheetLearnEntity)
|
||||
.from(mapSheetLearnEntity)
|
||||
.where(builder)
|
||||
.offset(pageable.getOffset())
|
||||
.limit(pageable.getPageSize())
|
||||
.orderBy(statusOrder.asc())
|
||||
.fetch();
|
||||
queryFactory
|
||||
.select(mapSheetLearnEntity)
|
||||
.from(mapSheetLearnEntity)
|
||||
.where(builder)
|
||||
.offset(pageable.getOffset())
|
||||
.limit(pageable.getPageSize())
|
||||
.orderBy(statusOrder.asc())
|
||||
.fetch();
|
||||
|
||||
Long total =
|
||||
queryFactory
|
||||
.select(mapSheetLearnEntity.count())
|
||||
.from(mapSheetLearnEntity)
|
||||
.where(builder)
|
||||
.fetchOne();
|
||||
queryFactory
|
||||
.select(mapSheetLearnEntity.count())
|
||||
.from(mapSheetLearnEntity)
|
||||
.where(builder)
|
||||
.fetchOne();
|
||||
|
||||
return new PageImpl<>(content, pageable, total == null ? 0L : total);
|
||||
}
|
||||
@@ -88,10 +88,10 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
@Override
|
||||
public Optional<MapSheetLearnEntity> getInferenceResultByUuid(UUID uuid) {
|
||||
return Optional.ofNullable(
|
||||
queryFactory
|
||||
.selectFrom(mapSheetLearnEntity)
|
||||
.where(mapSheetLearnEntity.uuid.eq(uuid))
|
||||
.fetchOne());
|
||||
queryFactory
|
||||
.selectFrom(mapSheetLearnEntity)
|
||||
.where(mapSheetLearnEntity.uuid.eq(uuid))
|
||||
.fetchOne());
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -100,41 +100,41 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
BooleanBuilder builder = new BooleanBuilder();
|
||||
|
||||
List<Integer> latestIds =
|
||||
queryFactory
|
||||
.select(systemMetricEntity.id1.max())
|
||||
.from(systemMetricEntity)
|
||||
.groupBy(systemMetricEntity.serverName)
|
||||
.fetch();
|
||||
queryFactory
|
||||
.select(systemMetricEntity.id1.max())
|
||||
.from(systemMetricEntity)
|
||||
.groupBy(systemMetricEntity.serverName)
|
||||
.fetch();
|
||||
|
||||
List<Integer> latestGpuIds =
|
||||
queryFactory
|
||||
.select(gpuMetricEntity.id1.max())
|
||||
.from(gpuMetricEntity)
|
||||
.groupBy(gpuMetricEntity.serverName)
|
||||
.fetch();
|
||||
queryFactory
|
||||
.select(gpuMetricEntity.id1.max())
|
||||
.from(gpuMetricEntity)
|
||||
.groupBy(gpuMetricEntity.serverName)
|
||||
.fetch();
|
||||
|
||||
List<InferenceServerStatusDto> foundContent =
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
InferenceServerStatusDto.class,
|
||||
systemMetricEntity.serverName,
|
||||
systemMetricEntity.cpuUser,
|
||||
systemMetricEntity.cpuSystem,
|
||||
systemMetricEntity.memused,
|
||||
systemMetricEntity.kbmemused,
|
||||
gpuMetricEntity.gpuUtil))
|
||||
.from(systemMetricEntity)
|
||||
.leftJoin(gpuMetricEntity)
|
||||
.on(
|
||||
gpuMetricEntity
|
||||
.id1
|
||||
.in(latestGpuIds)
|
||||
.and(gpuMetricEntity.serverName.eq(systemMetricEntity.serverName)))
|
||||
.where(systemMetricEntity.id1.in(latestIds)) // In 절 사용
|
||||
.orderBy(systemMetricEntity.serverName.asc())
|
||||
.limit(4)
|
||||
.fetch();
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
InferenceServerStatusDto.class,
|
||||
systemMetricEntity.serverName,
|
||||
systemMetricEntity.cpuUser,
|
||||
systemMetricEntity.cpuSystem,
|
||||
systemMetricEntity.memused,
|
||||
systemMetricEntity.kbmemused,
|
||||
gpuMetricEntity.gpuUtil))
|
||||
.from(systemMetricEntity)
|
||||
.leftJoin(gpuMetricEntity)
|
||||
.on(
|
||||
gpuMetricEntity
|
||||
.id1
|
||||
.in(latestGpuIds)
|
||||
.and(gpuMetricEntity.serverName.eq(systemMetricEntity.serverName)))
|
||||
.where(systemMetricEntity.id1.in(latestIds)) // In 절 사용
|
||||
.orderBy(systemMetricEntity.serverName.asc())
|
||||
.limit(4)
|
||||
.fetch();
|
||||
|
||||
return foundContent;
|
||||
}
|
||||
@@ -142,11 +142,11 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
@Override
|
||||
public Optional<MapSheetLearnEntity> getInferenceResultByStatus(String status) {
|
||||
return Optional.ofNullable(
|
||||
queryFactory
|
||||
.selectFrom(mapSheetLearnEntity)
|
||||
.where(mapSheetLearnEntity.status.eq(status))
|
||||
.limit(1)
|
||||
.fetchOne());
|
||||
queryFactory
|
||||
.selectFrom(mapSheetLearnEntity)
|
||||
.where(mapSheetLearnEntity.status.eq(status))
|
||||
.limit(1)
|
||||
.fetchOne());
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -159,61 +159,64 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
QModelMngEntity m3Model = new QModelMngEntity("m3Model");
|
||||
|
||||
InferenceStatusDetailDto foundContent =
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
InferenceStatusDetailDto.class,
|
||||
mapSheetLearnEntity.title,
|
||||
mapSheetLearnEntity.compareYyyy,
|
||||
mapSheetLearnEntity.targetYyyy,
|
||||
mapSheetLearnEntity.detectOption,
|
||||
mapSheetLearnEntity.mapSheetScope,
|
||||
mapSheetLearnEntity.inferStartDttm,
|
||||
mapSheetLearnEntity.inferEndDttm,
|
||||
mapSheetLearnEntity.detectingCnt,
|
||||
m1Model.modelVer.as("model1Ver"),
|
||||
m2Model.modelVer.as("model2Ver"),
|
||||
m3Model.modelVer.as("model3Ver")))
|
||||
.from(mapSheetLearnEntity)
|
||||
.leftJoin(m1Model)
|
||||
.on(m1Model.uuid.eq(mapSheetLearnEntity.m1ModelUuid))
|
||||
.leftJoin(m2Model)
|
||||
.on(m2Model.uuid.eq(mapSheetLearnEntity.m2ModelUuid))
|
||||
.leftJoin(m3Model)
|
||||
.on(m3Model.uuid.eq(mapSheetLearnEntity.m3ModelUuid))
|
||||
.where(mapSheetLearnEntity.uuid.eq(uuid))
|
||||
.fetchOne();
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
InferenceStatusDetailDto.class,
|
||||
mapSheetLearnEntity.title,
|
||||
mapSheetLearnEntity.compareYyyy,
|
||||
mapSheetLearnEntity.targetYyyy,
|
||||
mapSheetLearnEntity.detectOption,
|
||||
mapSheetLearnEntity.mapSheetScope,
|
||||
mapSheetLearnEntity.inferStartDttm,
|
||||
mapSheetLearnEntity.inferEndDttm,
|
||||
mapSheetLearnEntity.detectingCnt,
|
||||
m1Model.modelVer.as("model1Ver"),
|
||||
m2Model.modelVer.as("model2Ver"),
|
||||
m3Model.modelVer.as("model3Ver")))
|
||||
.from(mapSheetLearnEntity)
|
||||
.leftJoin(m1Model)
|
||||
.on(m1Model.uuid.eq(mapSheetLearnEntity.m1ModelUuid))
|
||||
.leftJoin(m2Model)
|
||||
.on(m2Model.uuid.eq(mapSheetLearnEntity.m2ModelUuid))
|
||||
.leftJoin(m3Model)
|
||||
.on(m3Model.uuid.eq(mapSheetLearnEntity.m3ModelUuid))
|
||||
.where(mapSheetLearnEntity.uuid.eq(uuid))
|
||||
.fetchOne();
|
||||
|
||||
return foundContent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<InferenceProgressDto> getInferenceAiResultById(Long id) {
|
||||
// InferenceProgressDto dto =
|
||||
// queryFactory
|
||||
// .select(
|
||||
// Projections.constructor(
|
||||
// InferenceProgressDto.class,
|
||||
// Projections.constructor(
|
||||
// InferenceProgressDto.pred_requests_areas.class,
|
||||
// mapSheetLearnEntity.compareYyyy,
|
||||
// mapSheetLearnEntity.targetYyyy,
|
||||
// mapSheetLearnEntity.modelComparePath,
|
||||
// mapSheetLearnEntity.modelTargetPath
|
||||
// ),
|
||||
// modelMngEntity.uuid.eq(mapSheetLearnEntity.m1ModelUuid).as("m1ModelUuid"),
|
||||
// modelMngEntity.uuid.eq(mapSheetLearnEntity.m2ModelUuid).as("m2ModelUuid"),
|
||||
// mapSheetLearnEntity.cdModelPath,
|
||||
// mapSheetLearnEntity.cdModelConfig,
|
||||
// mapSheetLearnEntity.clsModelPath,
|
||||
// mapSheetLearnEntity.clsModelVersion,
|
||||
// mapSheetLearnEntity.cdModelType,
|
||||
// mapSheetLearnEntity.priority
|
||||
// )
|
||||
// )
|
||||
// .from(mapSheetLearnEntity)
|
||||
// .where(mapSheetLearnEntity.id.eq(id))
|
||||
// .fetchOne();
|
||||
return Optional.empty();
|
||||
public InferenceProgressDto getInferenceAiResultById(Long id, UUID modelUuid) {
|
||||
|
||||
QModelMngEntity model = new QModelMngEntity("model");
|
||||
|
||||
InferenceProgressDto dto =
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
InferenceProgressDto.class,
|
||||
Projections.constructor(
|
||||
InferenceProgressDto.pred_requests_areas.class,
|
||||
mapSheetLearnEntity.compareYyyy,
|
||||
mapSheetLearnEntity.targetYyyy,
|
||||
mapSheetLearnEntity.modelComparePath,
|
||||
mapSheetLearnEntity.modelTargetPath),
|
||||
model.modelVer.as("modelVer"),
|
||||
model.cdModelPath.as("cdModelPath"),
|
||||
model.cdModelFileName.as("cdModelFileName"),
|
||||
model.cdModelConfigPath.as("cdModelConfigPath"),
|
||||
model.cdModelConfigFileName.as("cdModelConfigFileName"),
|
||||
model.clsModelPath,
|
||||
model.clsModelFileName,
|
||||
model.clsModelVersion
|
||||
))
|
||||
.from(mapSheetLearnEntity)
|
||||
.join(model)
|
||||
.on(model.uuid.eq(modelUuid))
|
||||
.where(mapSheetLearnEntity.id.eq(id))
|
||||
.fetchOne();
|
||||
return dto;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user