[KC-108] ai api batch 작업중

This commit is contained in:
2026-01-12 22:46:36 +09:00
parent 3dc5129655
commit f54304fbdb
8 changed files with 205 additions and 225 deletions

View File

@@ -29,22 +29,20 @@ public class InferenceDetailDto {
private String dataName;
private Long mapSheepNum;
private Long detectingCnt;
@JsonFormatDttm
private ZonedDateTime analStrtDttm;
@JsonFormatDttm
private ZonedDateTime analEndDttm;
@JsonFormatDttm private ZonedDateTime analStrtDttm;
@JsonFormatDttm private ZonedDateTime analEndDttm;
private Long analSec;
private String analState;
public Basic(
Long id,
String dataName,
Long mapSheepNum,
Long detectingCnt,
ZonedDateTime analStrtDttm,
ZonedDateTime analEndDttm,
Long analSec,
String analState) {
Long id,
String dataName,
Long mapSheepNum,
Long detectingCnt,
ZonedDateTime analStrtDttm,
ZonedDateTime analEndDttm,
Long analSec,
String analState) {
this.id = id;
this.dataName = dataName;
this.mapSheepNum = mapSheepNum;
@@ -63,10 +61,8 @@ public class InferenceDetailDto {
private Long id;
private String analTitle;
private Long detectingCnt;
@JsonFormatDttm
private ZonedDateTime analStrtDttm;
@JsonFormatDttm
private ZonedDateTime analEndDttm;
@JsonFormatDttm private ZonedDateTime analStrtDttm;
@JsonFormatDttm private ZonedDateTime analEndDttm;
private Long analSec;
private Long analPredSec;
private String analState;
@@ -74,16 +70,16 @@ public class InferenceDetailDto {
private String gukyuinUsed;
public AnalResList(
Long id,
String analTitle,
Long detectingCnt,
ZonedDateTime analStrtDttm,
ZonedDateTime analEndDttm,
Long analSec,
Long analPredSec,
String analState,
String analStateNm,
String gukyuinUsed) {
Long id,
String analTitle,
Long detectingCnt,
ZonedDateTime analStrtDttm,
ZonedDateTime analEndDttm,
Long analSec,
Long analPredSec,
String analState,
String analStateNm,
String gukyuinUsed) {
this.id = id;
this.analTitle = analTitle;
this.detectingCnt = detectingCnt;
@@ -106,10 +102,8 @@ public class InferenceDetailDto {
private String modelInfo;
private Integer targetYyyy;
private Integer compareYyyy;
@JsonFormatDttm
private ZonedDateTime analStrtDttm;
@JsonFormatDttm
private ZonedDateTime analEndDttm;
@JsonFormatDttm private ZonedDateTime analStrtDttm;
@JsonFormatDttm private ZonedDateTime analEndDttm;
private Long analSec;
private Long analPredSec;
private String resultUrl;
@@ -119,20 +113,20 @@ public class InferenceDetailDto {
private String analStateNm;
public AnalResSummary(
Long id,
String analTitle,
String modelInfo,
Integer targetYyyy,
Integer compareYyyy,
ZonedDateTime analStrtDttm,
ZonedDateTime analEndDttm,
Long analSec,
Long analPredSec,
String resultUrl,
Long detectingCnt,
Double accuracy,
String analState,
String analStateNm) {
Long id,
String analTitle,
String modelInfo,
Integer targetYyyy,
Integer compareYyyy,
ZonedDateTime analStrtDttm,
ZonedDateTime analEndDttm,
Long analSec,
Long analPredSec,
String resultUrl,
Long detectingCnt,
Double accuracy,
String analState,
String analStateNm) {
this.id = id;
this.analTitle = analTitle;
this.modelInfo = modelInfo;
@@ -189,17 +183,16 @@ public class InferenceDetailDto {
private Clazzes target;
private MapSheet mapSheet;
private Coordinate center;
@JsonFormatDttm
private ZonedDateTime updatedDttm;
@JsonFormatDttm private ZonedDateTime updatedDttm;
public DetailListEntity(
UUID uuid,
Double detectionScore,
Clazzes compare,
Clazzes target,
MapSheet mapSheet,
Coordinate center,
ZonedDateTime updatedDttm) {
UUID uuid,
Double detectionScore,
Clazzes compare,
Clazzes target,
MapSheet mapSheet,
Coordinate center,
ZonedDateTime updatedDttm) {
this.code = new Uid(uuid);
this.detectionScore = detectionScore;
this.compare = compare;
@@ -240,8 +233,7 @@ public class InferenceDetailDto {
private String code;
private String name;
@JsonIgnore
private Double score;
@JsonIgnore private Double score;
public Clazz(String code, Double score) {
this.code = code;
@@ -308,23 +300,21 @@ public class InferenceDetailDto {
String classAfterName;
Double classAfterProb;
Long mapSheetNum;
@JsonIgnore
String gemoStr;
@JsonIgnore
String geomCenterStr;
@JsonIgnore String gemoStr;
@JsonIgnore String geomCenterStr;
JsonNode gemo;
JsonNode geomCenter;
public Geom(
Integer compareYyyy,
Integer targetYyyy,
String classBeforeCd,
Double classBeforeProb,
String classAfterCd,
Double classAfterProb,
Long mapSheetNum,
String gemoStr,
String geomCenterStr) {
Integer compareYyyy,
Integer targetYyyy,
String classBeforeCd,
Double classBeforeProb,
String classAfterCd,
Double classAfterProb,
Long mapSheetNum,
String gemoStr,
String geomCenterStr) {
this.compareYyyy = compareYyyy;
this.targetYyyy = targetYyyy;
this.classBeforeCd = classBeforeCd;
@@ -395,7 +385,7 @@ public class InferenceDetailDto {
String[] sortParams = sort.split(",");
String property = sortParams[0];
Sort.Direction direction =
sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC;
sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC;
return PageRequest.of(page, size, Sort.by(direction, property));
}
return PageRequest.of(page, size);

View File

@@ -22,16 +22,16 @@ public class InferenceProgressDto {
private Integer priority;
public InferenceProgressDto(
InferenceProgressDto.pred_requests_areas pred_requests_areas,
String modelVersion,
String cdModelPath,
String cdModelFileName,
String cdModelConfigPath,
String cdModelConfigFileName,
String cdModelClsPath,
String cdModelClsFileName,
String clsModelVersion,
Integer priority) {
InferenceProgressDto.pred_requests_areas pred_requests_areas,
String modelVersion,
String cdModelPath,
String cdModelFileName,
String cdModelConfigPath,
String cdModelConfigFileName,
String cdModelClsPath,
String cdModelClsFileName,
String clsModelVersion,
Integer priority) {
this.pred_requests_areas = pred_requests_areas;
this.modelVersion = modelVersion;
this.cdModelPath = cdModelPath;

View File

@@ -23,9 +23,7 @@ import org.springframework.data.domain.Pageable;
public class InferenceResultDto {
/**
* 목록조회 dto
*/
/** 목록조회 dto */
@Getter
@Setter
@AllArgsConstructor
@@ -37,15 +35,11 @@ public class InferenceResultDto {
private String status;
private String mapSheetCnt;
private Long detectingCnt;
@JsonFormatDttm
private ZonedDateTime startTime;
@JsonFormatDttm
private ZonedDateTime endTime;
@JsonFormatDttm
private ZonedDateTime elapsedTime;
@JsonFormatDttm private ZonedDateTime startTime;
@JsonFormatDttm private ZonedDateTime endTime;
@JsonFormatDttm private ZonedDateTime elapsedTime;
private Boolean applyYn;
@JsonFormatDttm
private ZonedDateTime applyDttm;
@JsonFormatDttm private ZonedDateTime applyDttm;
@JsonProperty("statusName")
public String statusName() {
@@ -53,9 +47,7 @@ public class InferenceResultDto {
}
}
/**
* 목록조회 검색 조건 dto
*/
/** 목록조회 검색 조건 dto */
@Getter
@Setter
@NoArgsConstructor
@@ -269,8 +261,7 @@ public class InferenceResultDto {
this.model2Ver = model2Ver;
this.model3Ver = model3Ver;
if( this.detectingEndCnt == null )this.detectingEndCnt = 0L;
if (this.detectingEndCnt == null) this.detectingEndCnt = 0L;
}
private String usedServerName;
@@ -296,7 +287,7 @@ public class InferenceResultDto {
return 0.0;
}
if( this.detectingEndCnt == null )this.detectingEndCnt = 0L;
if (this.detectingEndCnt == null) this.detectingEndCnt = 0L;
return (double) (this.detectingEndCnt / this.detectingCnt) * 100.0;
}
@@ -336,7 +327,6 @@ public class InferenceResultDto {
return String.format("%02d:%02d:%02d", hours, minutes, seconds);
}
}
@Getter
@@ -346,12 +336,9 @@ public class InferenceResultDto {
public static class InferenceServerStatusDto {
private String serverName;
@JsonIgnore
private float cpu_user;
@JsonIgnore
private float cpu_system;
@JsonIgnore
private float memused;
@JsonIgnore private float cpu_user;
@JsonIgnore private float cpu_system;
@JsonIgnore private float memused;
private Long kbmemused;
private float gpuUtil;

View File

@@ -96,7 +96,7 @@ public class InferenceResultService {
// 기준년도 조회
List<MngListDto> targetList =
mapSheetMngCoreService.getHstMapSheetList(req.getTargetYyyy(), mapTargetIds);
mapSheetMngCoreService.getHstMapSheetList(req.getTargetYyyy(), mapTargetIds);
req.setMapSheetNum(this.createdMngDto(req, targetList));
}
@@ -109,19 +109,19 @@ public class InferenceResultService {
// 비교년도 탐지 제이터 옵션 별로 조회하여 req에 적용
private List<MapSheetNumDto> createdMngDto(
InferenceResultDto.RegReq req, List<MngListDto> targetList) {
InferenceResultDto.RegReq req, List<MngListDto> targetList) {
List<String> mapTargetIds = new ArrayList<>();
targetList.forEach(
hstMapSheet -> {
// 비교년도는 target 년도 기준으로 가져옴 파라미터 만들기
mapTargetIds.add(hstMapSheet.getMapSheetNum());
});
hstMapSheet -> {
// 비교년도는 target 년도 기준으로 가져옴 파라미터 만들기
mapTargetIds.add(hstMapSheet.getMapSheetNum());
});
// 비교년도 조회
List<String> mapCompareIds = new ArrayList<>();
List<MngListCompareDto> compareList =
mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds);
mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds);
for (MngListCompareDto dto : compareList) {
// 추론 제외일때 이전년도 파일이 없으면 제외
@@ -136,35 +136,35 @@ public class InferenceResultService {
}
Set<String> compareSet =
mapCompareIds.stream()
.filter(Objects::nonNull)
.map(String::trim) // 공백/개행 방지
.collect(Collectors.toSet());
mapCompareIds.stream()
.filter(Objects::nonNull)
.map(String::trim) // 공백/개행 방지
.collect(Collectors.toSet());
// target 기준 compare 비교하여 서로 있는것만 저장
List<String> commonIds =
mapTargetIds.stream()
.filter(Objects::nonNull)
.map(String::trim)
.filter(compareSet::contains)
.toList();
mapTargetIds.stream()
.filter(Objects::nonNull)
.map(String::trim)
.filter(compareSet::contains)
.toList();
Set<String> commonIdSet =
commonIds.stream().filter(Objects::nonNull).map(String::trim).collect(Collectors.toSet());
commonIds.stream().filter(Objects::nonNull).map(String::trim).collect(Collectors.toSet());
// 저장하기위해 파라미터 다시 구성
List<MapSheetNumDto> mapSheetNum =
targetList.stream()
.filter(dto -> dto.getMapSheetNum() != null)
.filter(dto -> commonIdSet.contains(dto.getMapSheetNum().trim()))
.map(
dto -> {
MapSheetNumDto mapSheetNumDto = new MapSheetNumDto();
mapSheetNumDto.setMapSheetNum(dto.getMapSheetNum());
mapSheetNumDto.setMapSheetName(dto.getMapSheetName());
return mapSheetNumDto;
})
.toList();
targetList.stream()
.filter(dto -> dto.getMapSheetNum() != null)
.filter(dto -> commonIdSet.contains(dto.getMapSheetNum().trim()))
.map(
dto -> {
MapSheetNumDto mapSheetNumDto = new MapSheetNumDto();
mapSheetNumDto.setMapSheetNum(dto.getMapSheetNum());
mapSheetNumDto.setMapSheetName(dto.getMapSheetName());
return mapSheetNumDto;
})
.toList();
return mapSheetNum;
}
@@ -184,9 +184,9 @@ public class InferenceResultService {
}
String modelComparePath =
this.getSceneInference(String.valueOf(req.getCompareYyyy()), mapSheetNumList);
this.getSceneInference(String.valueOf(req.getCompareYyyy()), mapSheetNumList);
String modelTargetPath =
this.getSceneInference(String.valueOf(req.getTargetYyyy()), mapSheetNumList);
this.getSceneInference(String.valueOf(req.getTargetYyyy()), mapSheetNumList);
pred_requests_areas predRequestsAreas = new pred_requests_areas();
predRequestsAreas.setInput1_year(req.getCompareYyyy());
@@ -241,7 +241,7 @@ public class InferenceResultService {
}
ExternalCallResult<String> result =
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
int status = result.statusCode();
String body = result.body();
@@ -254,8 +254,7 @@ public class InferenceResultService {
try {
List<Map<String, Object>> list =
om.readValue(body, new TypeReference<List<Map<String, Object>>>() {
});
om.readValue(body, new TypeReference<List<Map<String, Object>>>() {});
Integer batchIdInt = (Integer) list.get(0).get("batch_id");
batchId = batchIdInt.longValue();
@@ -354,7 +353,7 @@ public class InferenceResultService {
* @return
*/
public Page<InferenceDetailDto.Geom> getInferenceResultGeomList(
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
return inferenceResultCoreService.getInferenceResultGeomList(id, searchGeoReq);
}
@@ -365,7 +364,7 @@ public class InferenceResultService {
* @return
*/
public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom(
@NotNull Long id, InferenceDetailDto.SearchGeoReq searchReq) {
@NotNull Long id, InferenceDetailDto.SearchGeoReq searchReq) {
return inferenceResultCoreService.listInferenceResultWithGeom(id, searchReq);
}
@@ -410,7 +409,7 @@ public class InferenceResultService {
public InferenceStatusDetailDto getInferenceStatus(UUID uuid) {
List<InferenceServerStatusDto> servers =
inferenceResultCoreService.getInferenceServerStatusList();
inferenceResultCoreService.getInferenceServerStatusList();
String serverNames = "";
for (InferenceServerStatusDto server : servers) {

View File

@@ -65,11 +65,11 @@ public class InferenceResultCoreService {
*/
public UUID saveInferenceInfo(InferenceResultDto.RegReq req) {
String mapSheetName =
req.getMapSheetNum().get(0).getMapSheetName() + "" + req.getMapSheetNum().size() + "";
req.getMapSheetNum().get(0).getMapSheetName() + "" + req.getMapSheetNum().size() + "";
if (req.getMapSheetNum().size() == 1) {
mapSheetName =
req.getMapSheetNum().get(0).getMapSheetName() + " " + req.getMapSheetNum().size() + "";
req.getMapSheetNum().get(0).getMapSheetName() + " " + req.getMapSheetNum().size() + "";
}
MapSheetLearnEntity mapSheetLearnEntity = new MapSheetLearnEntity();
@@ -117,22 +117,22 @@ public class InferenceResultCoreService {
// 청크 번호 추출 in 조건 만들기
List<String> chunkNums =
buffer.stream().map(e -> String.valueOf(e.getMapSheetNum())).distinct().toList();
buffer.stream().map(e -> String.valueOf(e.getMapSheetNum())).distinct().toList();
// 추론 제외
List<MapInkx5kEntity> usedEntities =
mapInkx5kRepository.findByMapSheetNumInAndUseInference(chunkNums, CommonUseStatus.USE);
mapInkx5kRepository.findByMapSheetNumInAndUseInference(chunkNums, CommonUseStatus.USE);
// TODO 추론 제외 했으면 파일 있는지도 확인 해야함
// 조회 결과에서 번호만 Set으로
Set<String> usedSet =
usedEntities.stream()
.map(MapInkx5kEntity::getMapidcdNo)
.collect(java.util.stream.Collectors.toSet());
usedEntities.stream()
.map(MapInkx5kEntity::getMapidcdNo)
.collect(java.util.stream.Collectors.toSet());
// 필터 후 저장
List<MapSheetLearn5kEntity> toSave =
buffer.stream().filter(e -> usedSet.contains(String.valueOf(e.getMapSheetNum()))).toList();
buffer.stream().filter(e -> usedSet.contains(String.valueOf(e.getMapSheetNum()))).toList();
if (!toSave.isEmpty()) {
mapSheetLearn5kRepository.saveAll(toSave);
@@ -152,9 +152,9 @@ public class InferenceResultCoreService {
*/
public InferenceDetailDto.AnalResSummary getInferenceResultSummary(Long id) {
InferenceDetailDto.AnalResSummary summary =
mapSheetAnalDataRepository
.getInferenceResultSummary(id)
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
mapSheetAnalDataRepository
.getInferenceResultSummary(id)
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
return summary;
}
@@ -175,7 +175,7 @@ public class InferenceResultCoreService {
* @return
*/
public Page<InferenceDetailDto.Geom> getInferenceResultGeomList(
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
return mapSheetAnalDataRepository.getInferenceGeomList(id, searchGeoReq);
}
@@ -187,16 +187,16 @@ public class InferenceResultCoreService {
*/
@Transactional(readOnly = true)
public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom(
@NotNull Long analyId, InferenceDetailDto.SearchGeoReq searchReq) {
@NotNull Long analyId, InferenceDetailDto.SearchGeoReq searchReq) {
// 분석 ID 에 해당하는 dataids를 가져온다.
List<Long> dataIds =
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getId)
.boxed()
.toList();
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getId)
.boxed()
.toList();
// 해당데이터의 폴리곤데이터를 가져온다
Page<MapSheetAnalDataInferenceGeomEntity> mapSheetAnalDataGeomEntities =
mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq);
mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq);
return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataInferenceGeomEntity::toEntity);
}
@@ -213,21 +213,21 @@ public class InferenceResultCoreService {
@Transactional(readOnly = true)
public List<MapSheet> listGetScenes5k(Long analyId) {
List<String> sceneCodes =
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getMapSheetNum)
.mapToObj(String::valueOf)
.toList();
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getMapSheetNum)
.mapToObj(String::valueOf)
.toList();
return mapInkx5kRepository.listGetScenes5k(sceneCodes).stream()
.map(MapInkx5kEntity::toEntity)
.toList();
.map(MapInkx5kEntity::toEntity)
.toList();
}
public void update(SaveInferenceAiDto request) {
MapSheetLearnEntity entity =
mapSheetLearnRepository
.getInferenceResultByUuid(request.getUuid())
.orElseThrow(() -> new EntityNotFoundException());
mapSheetLearnRepository
.getInferenceResultByUuid(request.getUuid())
.orElseThrow(() -> new EntityNotFoundException());
if (request.getType().equals("M1")) {
entity.setM1ModelBatchId(request.getBatchId());
@@ -296,7 +296,6 @@ public class InferenceResultCoreService {
}
entity.setUpdatedDttm(ZonedDateTime.now());
}
public List<InferenceServerStatusDto> getInferenceServerStatusList() {
@@ -305,9 +304,9 @@ public class InferenceResultCoreService {
public InferenceBatchSheet getInferenceResultByStatus(String status) {
MapSheetLearnEntity entity =
mapSheetLearnRepository
.getInferenceResultByStatus(status)
.orElseThrow(() -> new EntityNotFoundException(status));
mapSheetLearnRepository
.getInferenceResultByStatus(status)
.orElseThrow(() -> new EntityNotFoundException(status));
InferenceBatchSheet inferenceBatchSheet = new InferenceBatchSheet();
inferenceBatchSheet.setId(entity.getId());

View File

@@ -25,9 +25,9 @@ public class MapSheetLearnEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_learn_id_gen")
@SequenceGenerator(
name = "tb_map_sheet_learn_id_gen",
sequenceName = "tb_map_sheet_learn_uid",
allocationSize = 1)
name = "tb_map_sheet_learn_id_gen",
sequenceName = "tb_map_sheet_learn_uid",
allocationSize = 1)
@Column(name = "id", nullable = false)
private Long id;
@@ -126,28 +126,33 @@ public class MapSheetLearnEntity {
@Column(name = "m1_model_start_dttm")
private ZonedDateTime m1ModelStartDttm;
@Column(name = "m2_model_start_dttm")
private ZonedDateTime m2ModelStartDttm;
@Column(name = "m3_model_start_dttm")
private ZonedDateTime m3ModelStartDttm;
@Column(name = "m1_model_end_dttm")
private ZonedDateTime m1ModelEndDttm;
@Column(name = "m2_model_end_dttm")
private ZonedDateTime m2ModelEndDttm;
@Column(name = "m3_model_end_dttm")
private ZonedDateTime m3ModelEndDttm;
public InferenceResultDto.ResultList toDto() {
return new InferenceResultDto.ResultList(
this.uuid,
this.title,
this.status,
this.mapSheetCnt,
this.detectingCnt,
this.inferStartDttm,
this.inferEndDttm,
this.elapsedTime,
this.applyYn,
this.applyDttm);
this.uuid,
this.title,
this.status,
this.mapSheetCnt,
this.detectingCnt,
this.inferStartDttm,
this.inferEndDttm,
this.elapsedTime,
this.applyYn,
this.applyDttm);
}
}

View File

@@ -172,7 +172,6 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
mapSheetLearnEntity.inferEndDttm,
mapSheetLearnEntity.detectingCnt,
mapSheetLearnEntity.detectEndCnt,
m1Model.modelVer.as("model1Ver"),
m2Model.modelVer.as("model2Ver"),
m3Model.modelVer.as("model3Ver")))
@@ -195,30 +194,29 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
QModelMngEntity model = new QModelMngEntity("model");
InferenceProgressDto dto =
queryFactory
.select(
Projections.constructor(
InferenceProgressDto.class,
Projections.constructor(
InferenceProgressDto.pred_requests_areas.class,
mapSheetLearnEntity.compareYyyy,
mapSheetLearnEntity.targetYyyy,
mapSheetLearnEntity.modelComparePath,
mapSheetLearnEntity.modelTargetPath),
model.modelVer.as("modelVer"),
model.cdModelPath.as("cdModelPath"),
model.cdModelFileName.as("cdModelFileName"),
model.cdModelConfigPath.as("cdModelConfigPath"),
model.cdModelConfigFileName.as("cdModelConfigFileName"),
model.clsModelPath,
model.clsModelFileName,
model.clsModelVersion
))
.from(mapSheetLearnEntity)
.join(model)
.on(model.uuid.eq(modelUuid))
.where(mapSheetLearnEntity.id.eq(id))
.fetchOne();
queryFactory
.select(
Projections.constructor(
InferenceProgressDto.class,
Projections.constructor(
InferenceProgressDto.pred_requests_areas.class,
mapSheetLearnEntity.compareYyyy,
mapSheetLearnEntity.targetYyyy,
mapSheetLearnEntity.modelComparePath,
mapSheetLearnEntity.modelTargetPath),
model.modelVer.as("modelVer"),
model.cdModelPath.as("cdModelPath"),
model.cdModelFileName.as("cdModelFileName"),
model.cdModelConfigPath.as("cdModelConfigPath"),
model.cdModelConfigFileName.as("cdModelConfigFileName"),
model.clsModelPath,
model.clsModelFileName,
model.clsModelVersion))
.from(mapSheetLearnEntity)
.join(model)
.on(model.uuid.eq(modelUuid))
.where(mapSheetLearnEntity.id.eq(id))
.fetchOne();
return dto;
}
}

View File

@@ -48,16 +48,14 @@ public class MapSheetInferenceJobService {
@Value("${inference.url}")
private String inferenceUrl;
/** 추론 진행 배치 1분 */
@Scheduled(fixedDelay = 60_000)
@Transactional
public void runBatch() {
log.info("1분 배치 시작");
try {
// TODO: 배치 로직 작성
InferenceBatchSheet batchSheet =
inferenceResultCoreService.getInferenceResultByStatus(Status.IN_PROGRESS.getId());
inferenceResultCoreService.getInferenceResultByStatus(Status.IN_PROGRESS.getId());
if (batchSheet == null) {
return;
@@ -84,7 +82,7 @@ public class MapSheetInferenceJobService {
String url = batchUrl + "/" + batchId;
ExternalCallResult<String> result =
externalHttpClient.call(url, HttpMethod.GET, null, headers, String.class);
externalHttpClient.call(url, HttpMethod.GET, null, headers, String.class);
int status = result.statusCode();
if (status < 200 || status >= 300) {
@@ -99,12 +97,14 @@ public class MapSheetInferenceJobService {
if (type.equals("M1")) {
// M1 완료되었으면 M2 실행
this.startInference(batchSheet.getId(), batchSheet.getUuid(), "M2", batchSheet.getM2ModelUuid());
this.startInference(
batchSheet.getId(), batchSheet.getUuid(), "M2", batchSheet.getM2ModelUuid());
// 종료시간
this.updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M1");
} else if (type.equals("M2")) {
// M1 완료되었으면 M3 실행
this.startInference(batchSheet.getId(), batchSheet.getUuid(), "M3", batchSheet.getM3ModelUuid());
this.startInference(
batchSheet.getId(), batchSheet.getUuid(), "M3", batchSheet.getM3ModelUuid());
// 종료시간
this.updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M2");
} else if (type.equals("M3")) {
@@ -129,25 +129,28 @@ public class MapSheetInferenceJobService {
Thread.currentThread().interrupt();
log.error("배치 중 인터럽트 발생", e);
}
log.info("1분 배치 종료");
}
private void startInference(Long id, UUID uuid, String type, UUID modelUuid) {
InferenceProgressDto progressDto = inferenceResultCoreService.getInferenceAiResultById(id, type, modelUuid);
InferenceProgressDto progressDto =
inferenceResultCoreService.getInferenceAiResultById(id, type, modelUuid);
pred_requests_areas predRequestsAreas = new pred_requests_areas();
predRequestsAreas.setInput1_year(progressDto.getPred_requests_areas().getInput1_year());
predRequestsAreas.setInput2_year(progressDto.getPred_requests_areas().getInput2_year());
predRequestsAreas.setInput1_scene_path(progressDto.getPred_requests_areas().getInput1_scene_path());
predRequestsAreas.setInput2_scene_path(progressDto.getPred_requests_areas().getInput2_scene_path());
predRequestsAreas.setInput1_scene_path(
progressDto.getPred_requests_areas().getInput1_scene_path());
predRequestsAreas.setInput2_scene_path(
progressDto.getPred_requests_areas().getInput2_scene_path());
InferenceSendDto m = new InferenceSendDto();
m.setModel_version(progressDto.getModelVersion());
m.setCd_model_path(progressDto.getCdModelPath() + "/" + progressDto.getCdModelFileName());
m.setCd_model_config(progressDto.getCdModelConfigPath() + "/" + progressDto.getCdModelConfigFileName());
m.setCls_model_path(progressDto.getCdModelClsPath() + "/" + progressDto.getCdModelClsFileName());
m.setCd_model_config(
progressDto.getCdModelConfigPath() + "/" + progressDto.getCdModelConfigFileName());
m.setCls_model_path(
progressDto.getCdModelClsPath() + "/" + progressDto.getCdModelClsFileName());
m.setCls_model_version(progressDto.getClsModelVersion());
m.setCd_model_type(type);
m.setPriority(progressDto.getPriority());
@@ -184,7 +187,7 @@ public class MapSheetInferenceJobService {
}
ExternalCallResult<String> result =
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
int status = result.statusCode();
String body = result.body();
@@ -197,8 +200,7 @@ public class MapSheetInferenceJobService {
try {
List<Map<String, Object>> list =
om.readValue(body, new TypeReference<List<Map<String, Object>>>() {
});
om.readValue(body, new TypeReference<List<Map<String, Object>>>() {});
Integer batchIdInt = (Integer) list.get(0).get("batch_id");
batchId = batchIdInt.longValue();