추론실행할때 회차 저장 제거

This commit is contained in:
2026-01-16 17:03:28 +09:00
parent b77ae9ed68
commit d4bad6485e
3 changed files with 68 additions and 67 deletions

View File

@@ -174,9 +174,9 @@ public class InferenceResultCoreService {
*/
public InferenceDetailDto.AnalResSummary getInferenceResultSummary(Long id) {
InferenceDetailDto.AnalResSummary summary =
mapSheetAnalDataRepository
.getInferenceResultSummary(id)
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
mapSheetAnalDataRepository
.getInferenceResultSummary(id)
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
return summary;
}
@@ -197,7 +197,7 @@ public class InferenceResultCoreService {
* @return
*/
public Page<InferenceDetailDto.Geom> getInferenceResultGeomList(
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
return mapSheetAnalDataRepository.getInferenceGeomList(id, searchGeoReq);
}
@@ -209,16 +209,16 @@ public class InferenceResultCoreService {
*/
@Transactional(readOnly = true)
public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom(
@NotNull Long analyId, InferenceDetailDto.SearchGeoReq searchReq) {
@NotNull Long analyId, InferenceDetailDto.SearchGeoReq searchReq) {
// 분석 ID 에 해당하는 dataids를 가져온다.
List<Long> dataIds =
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getId)
.boxed()
.toList();
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getId)
.boxed()
.toList();
// 해당데이터의 폴리곤데이터를 가져온다
Page<MapSheetAnalDataInferenceGeomEntity> mapSheetAnalDataGeomEntities =
mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq);
mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq);
return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataInferenceGeomEntity::toEntity);
}
@@ -235,14 +235,14 @@ public class InferenceResultCoreService {
@Transactional(readOnly = true)
public List<MapSheet> listGetScenes5k(Long analyId) {
List<String> sceneCodes =
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getMapSheetNum)
.mapToObj(String::valueOf)
.toList();
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getMapSheetNum)
.mapToObj(String::valueOf)
.toList();
return mapInkx5kRepository.listGetScenes5k(sceneCodes).stream()
.map(MapInkx5kEntity::toEntity)
.toList();
.map(MapInkx5kEntity::toEntity)
.toList();
}
/**
@@ -253,9 +253,9 @@ public class InferenceResultCoreService {
@Transactional
public void update(SaveInferenceAiDto request) {
MapSheetLearnEntity entity =
mapSheetLearnRepository
.getInferenceResultByUuid(request.getUuid())
.orElseThrow(EntityNotFoundException::new);
mapSheetLearnRepository
.getInferenceResultByUuid(request.getUuid())
.orElseThrow(EntityNotFoundException::new);
// M1/M2/M3 영역 업데이트
if (request.getType() != null) {
@@ -277,46 +277,49 @@ public class InferenceResultCoreService {
private void applyModelUpdate(MapSheetLearnEntity entity, SaveInferenceAiDto request) {
switch (request.getType()) {
case "M1" -> applyModelFields(
request,
entity::setM1ModelBatchId,
entity::setM1ModelStartDttm,
entity::setM1ModelEndDttm,
entity::setM1PendingJobs,
entity::setM1RunningJobs,
entity::setM1CompletedJobs,
entity::setM1FailedJobs);
case "M2" -> applyModelFields(
request,
entity::setM2ModelBatchId,
entity::setM2ModelStartDttm,
entity::setM2ModelEndDttm,
entity::setM2PendingJobs,
entity::setM2RunningJobs,
entity::setM2CompletedJobs,
entity::setM2FailedJobs);
case "M3" -> applyModelFields(
request,
entity::setM3ModelBatchId,
entity::setM3ModelStartDttm,
entity::setM3ModelEndDttm,
entity::setM3PendingJobs,
entity::setM3RunningJobs,
entity::setM3CompletedJobs,
entity::setM3FailedJobs);
case "M1" ->
applyModelFields(
request,
entity::setM1ModelBatchId,
entity::setM1ModelStartDttm,
entity::setM1ModelEndDttm,
entity::setM1PendingJobs,
entity::setM1RunningJobs,
entity::setM1CompletedJobs,
entity::setM1FailedJobs);
case "M2" ->
applyModelFields(
request,
entity::setM2ModelBatchId,
entity::setM2ModelStartDttm,
entity::setM2ModelEndDttm,
entity::setM2PendingJobs,
entity::setM2RunningJobs,
entity::setM2CompletedJobs,
entity::setM2FailedJobs);
case "M3" ->
applyModelFields(
request,
entity::setM3ModelBatchId,
entity::setM3ModelStartDttm,
entity::setM3ModelEndDttm,
entity::setM3PendingJobs,
entity::setM3RunningJobs,
entity::setM3CompletedJobs,
entity::setM3FailedJobs);
default -> throw new IllegalArgumentException("Unknown type: " + request.getType());
}
}
private void applyModelFields(
SaveInferenceAiDto request,
Consumer<Long> setBatchId,
Consumer<ZonedDateTime> setStart,
Consumer<ZonedDateTime> setEnd,
Consumer<Integer> setPending,
Consumer<Integer> setRunning,
Consumer<Integer> setCompleted,
Consumer<Integer> setFailed) {
SaveInferenceAiDto request,
Consumer<Long> setBatchId,
Consumer<ZonedDateTime> setStart,
Consumer<ZonedDateTime> setEnd,
Consumer<Integer> setPending,
Consumer<Integer> setRunning,
Consumer<Integer> setCompleted,
Consumer<Integer> setFailed) {
applyIfNotNull(request.getBatchId(), setBatchId);
applyIfNotNull(request.getModelStartDttm(), setStart);
applyIfNotNull(request.getModelEndDttm(), setEnd);
@@ -343,7 +346,7 @@ public class InferenceResultCoreService {
public InferenceBatchSheet getInferenceResultByStatus(String status) {
MapSheetLearnEntity entity =
mapSheetLearnRepository.getInferenceResultByStatus(status).orElse(null);
mapSheetLearnRepository.getInferenceResultByStatus(status).orElse(null);
if (entity == null) {
return null;
@@ -433,9 +436,9 @@ public class InferenceResultCoreService {
/**
* 모델별 도엽별 실패여부 저장
*
* @param uuid learn 테이블 uuid
* @param uuid learn 테이블 uuid
* @param failMapIds 실패한 도엽 목록
* @param type 모델타입
* @param type 모델타입
*/
@Transactional
public void saveFail5k(UUID uuid, List<Long> failMapIds, String type) {
@@ -454,7 +457,7 @@ public class InferenceResultCoreService {
*/
public List<InferenceResultsTestingDto.ShpDto> getInferenceResults(List<Long> batchIds) {
List<InferenceResultsTestingEntity> list =
inferenceResultsTetingRepository.getInferenceResultList(batchIds);
inferenceResultsTetingRepository.getInferenceResultList(batchIds);
return list.stream().map(InferenceResultsTestingDto.ShpDto::fromEntity).toList();
}
}

View File

@@ -4,7 +4,5 @@ import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface InferenceResultsTestingRepository
extends JpaRepository<InferenceResultsTestingEntity, Long>,
InferenceResultsTestingRepositoryCustom {
}
extends JpaRepository<InferenceResultsTestingEntity, Long>,
InferenceResultsTestingRepositoryCustom {}

View File

@@ -11,16 +11,16 @@ import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class InferenceResultsTestingRepositoryImpl
implements InferenceResultsTestingRepositoryCustom {
implements InferenceResultsTestingRepositoryCustom {
private final JPAQueryFactory queryFactory;
@Override
public List<InferenceResultsTestingEntity> getInferenceResultList(List<Long> batchIds) {
return queryFactory
.select(inferenceResultsTestingEntity)
.from(inferenceResultsTestingEntity)
.where(inferenceResultsTestingEntity.batchId.in(batchIds))
.fetch();
.select(inferenceResultsTestingEntity)
.from(inferenceResultsTestingEntity)
.where(inferenceResultsTestingEntity.batchId.in(batchIds))
.fetch();
}
}