Merge remote-tracking branch 'origin/feat/infer_dev_260107' into feat/infer_dev_260107
# Conflicts: # src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetLearnRepositoryCustom.java # src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetLearnRepositoryImpl.java
This commit is contained in:
@@ -391,4 +391,17 @@ public class InferenceDetailDto {
|
|||||||
return PageRequest.of(page, size);
|
return PageRequest.of(page, size);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@Setter
|
||||||
|
@AllArgsConstructor
|
||||||
|
@NoArgsConstructor
|
||||||
|
public static class InferenceBatchSheet {
|
||||||
|
|
||||||
|
private Long m1BatchId;
|
||||||
|
private Long m2BatchId;
|
||||||
|
private Long m3BatchId;
|
||||||
|
private String status;
|
||||||
|
private String runningModelType;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -129,10 +129,7 @@ public class InferenceResultDto {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static Status fromCode(String code) {
|
public static Status fromCode(String code) {
|
||||||
return Arrays.stream(values())
|
return Arrays.stream(values()).filter(v -> v.name().equals(code)).findFirst().orElse(null);
|
||||||
.filter(v -> v.name().equals(code))
|
|
||||||
.findFirst()
|
|
||||||
.orElseThrow(() -> new IllegalArgumentException("알 수 없는 상태 코드: " + code));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getDescByCode(String code) {
|
public static String getDescByCode(String code) {
|
||||||
@@ -271,75 +268,81 @@ public class InferenceResultDto {
|
|||||||
public static class InferenceServerStatusDto {
|
public static class InferenceServerStatusDto {
|
||||||
|
|
||||||
private String serverName;
|
private String serverName;
|
||||||
@JsonIgnore
|
@JsonIgnore private float cpu_user;
|
||||||
private float cpu_user;
|
|
||||||
@JsonIgnore private float cpu_system;
|
@JsonIgnore private float cpu_system;
|
||||||
@JsonIgnore private float memused;
|
@JsonIgnore private float memused;
|
||||||
private Long kbmemused;
|
private Long kbmemused;
|
||||||
private float gpuUtil;
|
private float gpuUtil;
|
||||||
|
|
||||||
public float getCpuUseRate()
|
// private String cpuStatusName;
|
||||||
{
|
// private String memStatusName;
|
||||||
return this.cpu_user+this.cpu_system;
|
// private String gpuStatusName;
|
||||||
|
// private float cpu_use_rate;
|
||||||
|
// private float gpu_use_rate;
|
||||||
|
// private float mem_use_rate;
|
||||||
|
|
||||||
|
public float getCpuUseRate() {
|
||||||
|
return this.cpu_user + this.cpu_system;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getServerStatus()
|
public String getServerStatus() {
|
||||||
{
|
|
||||||
String enumId = "SAFETY";
|
String enumId = "SAFETY";
|
||||||
//if( this.cpu_user+this.cpu_system >= 80 )enumId = "CAUTION";
|
// if( this.cpu_user+this.cpu_system >= 80 )enumId = "CAUTION";
|
||||||
return enumId;
|
return enumId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getServerStatusName()
|
public String getServerStatusName() {
|
||||||
{
|
// String enumId = "SAFETY";
|
||||||
//String enumId = "SAFETY";
|
// if( this.cpu_user+this.cpu_system >= 80 )enumId = "CAUTION";
|
||||||
//if( this.cpu_user+this.cpu_system >= 80 )enumId = "CAUTION";
|
|
||||||
return ServerStatus.SAFETY.getText();
|
return ServerStatus.SAFETY.getText();
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getCpuStatus()
|
public String getCpuStatus() {
|
||||||
{
|
|
||||||
String enumId = "SAFETY";
|
String enumId = "SAFETY";
|
||||||
if( this.cpu_user+this.cpu_system >= 80 )enumId = "CAUTION";
|
if (this.cpu_user + this.cpu_system >= 80) {
|
||||||
|
enumId = "CAUTION";
|
||||||
|
}
|
||||||
return enumId;
|
return enumId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getGpuStatus()
|
public String getGpuStatus() {
|
||||||
{
|
|
||||||
String enumId = "SAFETY";
|
String enumId = "SAFETY";
|
||||||
if( this.gpuUtil >= 80 )enumId = "CAUTION";
|
if (this.gpuUtil >= 80) {
|
||||||
|
enumId = "CAUTION";
|
||||||
|
}
|
||||||
return enumId;
|
return enumId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getMemStatus()
|
public String getMemStatus() {
|
||||||
{
|
|
||||||
String enumId = "SAFETY";
|
String enumId = "SAFETY";
|
||||||
if( this.memused >= 80 )enumId = "CAUTION";
|
if (this.memused >= 80) {
|
||||||
|
enumId = "CAUTION";
|
||||||
|
}
|
||||||
return enumId;
|
return enumId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getCpuStatusName()
|
public String getCpuStatusName() {
|
||||||
{
|
if (this.cpu_user + this.cpu_system >= 80) {
|
||||||
if( this.cpu_user+this.cpu_system >= 80 )return ServerStatus.CAUTION.getText();
|
return ServerStatus.CAUTION.getText();
|
||||||
|
}
|
||||||
return ServerStatus.SAFETY.getText();
|
return ServerStatus.SAFETY.getText();
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getGpuStatusName()
|
public String getGpuStatusName() {
|
||||||
{
|
if (this.gpuUtil >= 80) {
|
||||||
if( this.gpuUtil >= 80 )return ServerStatus.CAUTION.getText();
|
return ServerStatus.CAUTION.getText();
|
||||||
|
}
|
||||||
return ServerStatus.SAFETY.getText();
|
return ServerStatus.SAFETY.getText();
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getMemStatusName()
|
public String getMemStatusName() {
|
||||||
{
|
if (this.memused >= 80) {
|
||||||
if( this.memused >= 80 )return ServerStatus.CAUTION.getText();
|
return ServerStatus.CAUTION.getText();
|
||||||
|
}
|
||||||
return ServerStatus.SAFETY.getText();
|
return ServerStatus.SAFETY.getText();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Getter
|
@Getter
|
||||||
@AllArgsConstructor
|
@AllArgsConstructor
|
||||||
public enum ServerStatus implements EnumType {
|
public enum ServerStatus implements EnumType {
|
||||||
@@ -360,4 +363,17 @@ public class InferenceResultDto {
|
|||||||
return desc;
|
return desc;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@Setter
|
||||||
|
@AllArgsConstructor
|
||||||
|
@NoArgsConstructor
|
||||||
|
public static class SaveInferenceAiDto {
|
||||||
|
|
||||||
|
private UUID uuid;
|
||||||
|
private Long batchId;
|
||||||
|
private String status;
|
||||||
|
private String type;
|
||||||
|
private ZonedDateTime inferStartDttm;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,6 +16,8 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDe
|
|||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetNumDto;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetNumDto;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
|
||||||
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
|
||||||
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto.pred_requests_areas;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto.pred_requests_areas;
|
||||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListCompareDto;
|
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListCompareDto;
|
||||||
@@ -25,6 +27,7 @@ import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
|
|||||||
import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService;
|
import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService;
|
||||||
import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService;
|
import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService;
|
||||||
import jakarta.validation.constraints.NotNull;
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
import java.time.ZonedDateTime;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@@ -200,7 +203,14 @@ public class InferenceResultService {
|
|||||||
m3.setPred_requests_areas(predRequestsAreas);
|
m3.setPred_requests_areas(predRequestsAreas);
|
||||||
|
|
||||||
Long batchId = this.ensureAccepted(m1);
|
Long batchId = this.ensureAccepted(m1);
|
||||||
inferenceResultCoreService.update(uuid, batchId, "IN_PROGRESS");
|
|
||||||
|
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
|
||||||
|
saveInferenceAiDto.setUuid(uuid);
|
||||||
|
saveInferenceAiDto.setBatchId(batchId);
|
||||||
|
saveInferenceAiDto.setStatus(Status.IN_PROGRESS.getId());
|
||||||
|
saveInferenceAiDto.setType("M1");
|
||||||
|
saveInferenceAiDto.setInferStartDttm(ZonedDateTime.now());
|
||||||
|
inferenceResultCoreService.update(saveInferenceAiDto);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -4,12 +4,14 @@ import com.kamco.cd.kamcoback.common.enums.CommonUseStatus;
|
|||||||
import com.kamco.cd.kamcoback.common.utils.UserUtil;
|
import com.kamco.cd.kamcoback.common.utils.UserUtil;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
|
||||||
|
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.InferenceBatchSheet;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetNumDto;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetNumDto;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
|
||||||
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
|
||||||
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
|
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
|
||||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
|
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
|
||||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
|
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
|
||||||
@@ -219,17 +221,43 @@ public class InferenceResultCoreService {
|
|||||||
.toList();
|
.toList();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void update(UUID uuid, Long batchId, String status) {
|
public void update(SaveInferenceAiDto request) {
|
||||||
MapSheetLearnEntity entity =
|
MapSheetLearnEntity entity =
|
||||||
mapSheetLearnRepository
|
mapSheetLearnRepository
|
||||||
.getInferenceResultByUuid(uuid)
|
.getInferenceResultByUuid(request.getUuid())
|
||||||
.orElseThrow(() -> new EntityNotFoundException(uuid.toString()));
|
.orElseThrow(() -> new EntityNotFoundException());
|
||||||
|
|
||||||
entity.setBatchId(batchId);
|
if (request.getType().equals("M1")) {
|
||||||
entity.setStatus(status);
|
entity.setM1ModelBatchId(request.getBatchId());
|
||||||
|
} else if (request.getType().equals("M2")) {
|
||||||
|
entity.setM2ModelBatchId(request.getBatchId());
|
||||||
|
} else if (request.getType().equals("M3")) {
|
||||||
|
entity.setM3ModelBatchId(request.getBatchId());
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<InferenceServerStatusDto> getInferenceServerStatusList(){return mapSheetLearnRepository.getInferenceServerStatusList();
|
entity.setStatus(request.getStatus());
|
||||||
|
if (request.getInferStartDttm() != null) {
|
||||||
|
entity.setInferStartDttm(request.getInferStartDttm());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<InferenceServerStatusDto> getInferenceServerStatusList() {
|
||||||
|
return mapSheetLearnRepository.getInferenceServerStatusList();
|
||||||
|
}
|
||||||
|
|
||||||
|
public InferenceBatchSheet getInferenceResultByStatus(String status) {
|
||||||
|
MapSheetLearnEntity entity =
|
||||||
|
mapSheetLearnRepository
|
||||||
|
.getInferenceResultByStatus(status)
|
||||||
|
.orElseThrow(() -> new EntityNotFoundException(status));
|
||||||
|
|
||||||
|
InferenceBatchSheet inferenceBatchSheet = new InferenceBatchSheet();
|
||||||
|
inferenceBatchSheet.setM1BatchId(entity.getM1ModelBatchId());
|
||||||
|
inferenceBatchSheet.setM2BatchId(entity.getM2ModelBatchId());
|
||||||
|
inferenceBatchSheet.setM3BatchId(entity.getM3ModelBatchId());
|
||||||
|
inferenceBatchSheet.setStatus(entity.getStatus());
|
||||||
|
inferenceBatchSheet.setRunningModelType(entity.getRunningModelType());
|
||||||
|
return inferenceBatchSheet;
|
||||||
}
|
}
|
||||||
|
|
||||||
public InferenceStatusDetailDto getInferenceStatus(UUID uuid){
|
public InferenceStatusDetailDto getInferenceStatus(UUID uuid){
|
||||||
|
|||||||
@@ -44,5 +44,4 @@ public class GpuMetricEntity {
|
|||||||
|
|
||||||
@Column(name = "gpu_mem_total")
|
@Column(name = "gpu_mem_total")
|
||||||
private Float gpuMemTotal;
|
private Float gpuMemTotal;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -103,8 +103,20 @@ public class MapSheetLearnEntity {
|
|||||||
@Column(name = "updated_uid")
|
@Column(name = "updated_uid")
|
||||||
private Long updatedUid;
|
private Long updatedUid;
|
||||||
|
|
||||||
@Column(name = "batch_id")
|
@Column(name = "running_model_type")
|
||||||
private Long batchId;
|
private String runningModelType;
|
||||||
|
|
||||||
|
@Column(name = "m1_model_batch_id")
|
||||||
|
private Long m1ModelBatchId;
|
||||||
|
|
||||||
|
@Column(name = "m2_model_batch_id")
|
||||||
|
private Long m2ModelBatchId;
|
||||||
|
|
||||||
|
@Column(name = "m3_model_batch_id")
|
||||||
|
private Long m3ModelBatchId;
|
||||||
|
|
||||||
|
@Column(name = "detect_end_cnt")
|
||||||
|
private Long detectEndCnt;
|
||||||
|
|
||||||
public InferenceResultDto.ResultList toDto() {
|
public InferenceResultDto.ResultList toDto() {
|
||||||
return new InferenceResultDto.ResultList(
|
return new InferenceResultDto.ResultList(
|
||||||
|
|||||||
@@ -55,5 +55,4 @@ public class SystemMetricEntity {
|
|||||||
|
|
||||||
@Column(name = "memused")
|
@Column(name = "memused")
|
||||||
private Float memused;
|
private Float memused;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,52 @@
|
|||||||
|
package com.kamco.cd.kamcoback.scheduler.dto;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import java.time.ZonedDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public class JobStatusDto {
|
||||||
|
|
||||||
|
private Long id;
|
||||||
|
|
||||||
|
@JsonProperty("created_at")
|
||||||
|
private ZonedDateTime createdAt;
|
||||||
|
|
||||||
|
@JsonProperty("input1_year")
|
||||||
|
private Integer input1Year;
|
||||||
|
|
||||||
|
@JsonProperty("input2_year")
|
||||||
|
private Integer input2Year;
|
||||||
|
|
||||||
|
@JsonProperty("total_jobs")
|
||||||
|
private Integer totalJobs;
|
||||||
|
|
||||||
|
@JsonProperty("pending_jobs")
|
||||||
|
private Integer pendingJobs;
|
||||||
|
|
||||||
|
@JsonProperty("running_jobs")
|
||||||
|
private Integer runningJobs;
|
||||||
|
|
||||||
|
@JsonProperty("completed_jobs")
|
||||||
|
private Integer completedJobs;
|
||||||
|
|
||||||
|
@JsonProperty("failed_jobs")
|
||||||
|
private Integer failedJobs;
|
||||||
|
|
||||||
|
private String status;
|
||||||
|
|
||||||
|
private List<Object> jobs;
|
||||||
|
|
||||||
|
@JsonProperty("completed_ids")
|
||||||
|
private List<String> completedIds;
|
||||||
|
|
||||||
|
@JsonProperty("processing_ids")
|
||||||
|
private List<String> processingIds;
|
||||||
|
|
||||||
|
@JsonProperty("queued_ids")
|
||||||
|
private List<String> queuedIds;
|
||||||
|
|
||||||
|
@JsonProperty("failed_ids")
|
||||||
|
private List<String> failedIds;
|
||||||
|
}
|
||||||
@@ -1,8 +1,20 @@
|
|||||||
package com.kamco.cd.kamcoback.scheduler.service;
|
package com.kamco.cd.kamcoback.scheduler.service;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient;
|
||||||
|
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient.ExternalCallResult;
|
||||||
|
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.InferenceBatchSheet;
|
||||||
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
|
||||||
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
|
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
|
||||||
|
import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto;
|
||||||
|
import java.util.List;
|
||||||
import lombok.RequiredArgsConstructor;
|
import lombok.RequiredArgsConstructor;
|
||||||
import lombok.extern.log4j.Log4j2;
|
import lombok.extern.log4j.Log4j2;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.http.HttpHeaders;
|
||||||
|
import org.springframework.http.HttpMethod;
|
||||||
|
import org.springframework.http.MediaType;
|
||||||
import org.springframework.scheduling.annotation.Scheduled;
|
import org.springframework.scheduling.annotation.Scheduled;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
@@ -12,6 +24,12 @@ import org.springframework.stereotype.Service;
|
|||||||
public class MapSheetInferenceJobService {
|
public class MapSheetInferenceJobService {
|
||||||
|
|
||||||
private final InferenceResultCoreService inferenceResultCoreService;
|
private final InferenceResultCoreService inferenceResultCoreService;
|
||||||
|
private final ExternalHttpClient externalHttpClient;
|
||||||
|
|
||||||
|
private final ObjectMapper objectMapper;
|
||||||
|
|
||||||
|
@Value("${inference.batch-url}")
|
||||||
|
private String batchUrl;
|
||||||
|
|
||||||
@Scheduled(fixedDelay = 60_000)
|
@Scheduled(fixedDelay = 60_000)
|
||||||
public void runBatch() {
|
public void runBatch() {
|
||||||
@@ -19,8 +37,52 @@ public class MapSheetInferenceJobService {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
// TODO: 배치 로직 작성
|
// TODO: 배치 로직 작성
|
||||||
|
|
||||||
|
InferenceBatchSheet batchSheet =
|
||||||
|
inferenceResultCoreService.getInferenceResultByStatus(Status.IN_PROGRESS.getId());
|
||||||
|
|
||||||
|
if (batchSheet == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
HttpHeaders headers = new HttpHeaders();
|
||||||
|
headers.setContentType(MediaType.APPLICATION_JSON);
|
||||||
|
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
|
||||||
|
|
||||||
|
Long batchId = 0L;
|
||||||
|
|
||||||
|
if (batchSheet.getM3BatchId() != null) {
|
||||||
|
batchId = batchSheet.getM3BatchId();
|
||||||
|
} else if (batchSheet.getM2BatchId() != null) {
|
||||||
|
batchId = batchSheet.getM2BatchId();
|
||||||
|
} else if (batchSheet.getM1BatchId() != null) {
|
||||||
|
batchId = batchSheet.getM1BatchId();
|
||||||
|
}
|
||||||
|
|
||||||
|
String url = batchUrl + "/" + batchId;
|
||||||
|
|
||||||
|
ExternalCallResult<String> result =
|
||||||
|
externalHttpClient.call(url, HttpMethod.GET, null, headers, String.class);
|
||||||
|
|
||||||
|
int status = result.statusCode();
|
||||||
|
if (status < 200 || status >= 300) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
String json = result.body();
|
||||||
|
JobStatusDto dto = objectMapper.readValue(json, JobStatusDto.class);
|
||||||
|
|
||||||
|
if ("COMPLETED".equals(dto.getStatus())) {
|
||||||
|
String type = batchSheet.getRunningModelType();
|
||||||
|
|
||||||
|
// if(type.equals("M3"))
|
||||||
|
}
|
||||||
|
|
||||||
|
System.out.println(dto);
|
||||||
|
|
||||||
Thread.sleep(3000); // 예시: 처리 시간 3초
|
Thread.sleep(3000); // 예시: 처리 시간 3초
|
||||||
} catch (InterruptedException e) {
|
|
||||||
|
} catch (InterruptedException | JsonProcessingException e) {
|
||||||
Thread.currentThread().interrupt();
|
Thread.currentThread().interrupt();
|
||||||
log.error("배치 중 인터럽트 발생", e);
|
log.error("배치 중 인터럽트 발생", e);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -97,4 +97,5 @@ file:
|
|||||||
|
|
||||||
inference:
|
inference:
|
||||||
url: http://10.100.0.11:8000/jobs
|
url: http://10.100.0.11:8000/jobs
|
||||||
|
batch-url: http://10.100.0.11:8000/batches
|
||||||
geojson-dir: /kamco-nfs/requests/
|
geojson-dir: /kamco-nfs/requests/
|
||||||
|
|||||||
@@ -83,4 +83,5 @@ file:
|
|||||||
|
|
||||||
inference:
|
inference:
|
||||||
url: http://10.100.0.11:8000/jobs
|
url: http://10.100.0.11:8000/jobs
|
||||||
|
batch-url: http://10.100.0.11:8000/batches
|
||||||
geojson-dir: /kamco-nfs/requests/
|
geojson-dir: /kamco-nfs/requests/
|
||||||
|
|||||||
@@ -58,5 +58,6 @@ file:
|
|||||||
|
|
||||||
inference:
|
inference:
|
||||||
url: http://10.100.0.11:8000/jobs
|
url: http://10.100.0.11:8000/jobs
|
||||||
|
batch-url: http://10.100.0.11:8000/batches
|
||||||
geojson-dir: /kamco-nfs/requests/
|
geojson-dir: /kamco-nfs/requests/
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user