shp파일 생성 수정

This commit is contained in:
2026-01-23 14:08:05 +09:00
parent 3f4cf070cd
commit 406009e700
5 changed files with 30 additions and 20 deletions

View File

@@ -2,18 +2,18 @@ package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto.CreateShpRequest;
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@@ -46,10 +46,10 @@ public class InferenceResultShpApiController {
}
@Operation(summary = "추론결과 shp 생성", description = "추론결과 shp 생성")
@PostMapping("/shp/{uid}")
@PostMapping("/shp/{uuid}")
public ApiResponseDto<Void> createShp(
@PathVariable String uid, @RequestBody CreateShpRequest req) {
inferenceResultShpService.createShp(uid, req);
@Parameter(example = "feb2ec0b-a0f7-49ca-95e4-98b2231bdaae") @PathVariable UUID uuid) {
inferenceResultShpService.createShp(uuid);
return ApiResponseDto.createOK(null);
}
}

View File

@@ -538,15 +538,18 @@ public class InferenceResultDto {
public String getServerStatus() {
String enumId = "SAFETY";
if (this.cpu_user + this.cpu_system + this.gpuUtil + this.kbmemused == 0) enumId = "FAILUR";
if (this.cpu_user + this.cpu_system + this.gpuUtil + this.kbmemused == 0) {
enumId = "FAILUR";
}
// if( this.cpu_user+this.cpu_system >= 80 )enumId = "CAUTION";
return enumId;
}
public String getServerStatusName() {
String enumStr = ServerStatus.SAFETY.getText();
if (this.cpu_user + this.cpu_system + this.gpuUtil + this.kbmemused == 0)
if (this.cpu_user + this.cpu_system + this.gpuUtil + this.kbmemused == 0) {
enumStr = ServerStatus.FAILUR.getText();
}
return enumStr;
}
@@ -648,5 +651,8 @@ public class InferenceResultDto {
public static class InferenceLearnDto {
private String uid;
private Long m1ModelBatchId;
private Long m2ModelBatchId;
private Long m3ModelBatchId;
}
}

View File

@@ -1,12 +1,11 @@
package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceLearnDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto.CreateShpRequest;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService;
import com.kamco.cd.kamcoback.scheduler.service.ShpPipelineService;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@@ -39,18 +38,20 @@ public class InferenceResultShpService {
/**
* shp 파일 수동생성
*
* @param uid
* @param req
* @param uuid
*/
public void createShp(String uid, CreateShpRequest req) {
List<Long> batchIds = new ArrayList<>();
batchIds.add(req.getM1BatchId());
batchIds.add(req.getM2BatchId());
batchIds.add(req.getM3BatchId());
public void createShp(UUID uuid) {
String batchId = req.getM1BatchId() + "," + req.getM2BatchId() + "," + req.getM3BatchId();
InferenceLearnDto dto = inferenceResultCoreService.getInferenceUid(uuid);
if (dto == null) {
return;
}
Long m1BatchId = dto.getM1ModelBatchId();
Long m2BatchId = dto.getM2ModelBatchId();
Long m3BatchId = dto.getM3ModelBatchId();
String batchId = m1BatchId + "," + m2BatchId + "," + m3BatchId;
// shp 파일 비동기 생성
shpPipelineService.runPipeline(jarPath, datasetDir, batchId, uid);
shpPipelineService.runPipeline(jarPath, datasetDir, batchId, dto.getUid());
}
}

View File

@@ -498,6 +498,9 @@ public class InferenceResultCoreService {
.orElseThrow(() -> new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND));
InferenceLearnDto dto = new InferenceLearnDto();
dto.setUid(entity.getUid());
dto.setM1ModelBatchId(entity.getM1ModelBatchId());
dto.setM2ModelBatchId(entity.getM2ModelBatchId());
dto.setM3ModelBatchId(entity.getM3ModelBatchId());
return dto;
}

View File

@@ -16,5 +16,5 @@ public interface InferenceResultRepositoryCustom {
Long getInferenceLearnIdByUuid(UUID uuid);
public Optional<MapSheetLearnEntity> getInferenceUid(UUID uuid);
Optional<MapSheetLearnEntity> getInferenceUid(UUID uuid);
}