16 Commits

Author SHA1 Message Date
4f742edf8b dev 토큰 유효시간 변경, 인증 Log 추가한거 삭제 2026-03-17 13:55:30 +09:00
0aa415cf3a Merge pull request '토큰 생성 로그 추가' (#152) from feat/infer_dev_260211 into develop
Reviewed-on: #152
2026-03-17 13:33:34 +09:00
6861f6b8b6 Merge pull request '메뉴 권한 Log 추가' (#151) from feat/infer_dev_260211 into develop
Reviewed-on: #151
2026-03-17 13:22:06 +09:00
9297d19e24 Merge pull request 'refresh 토큰 인증 로그 추가' (#150) from feat/infer_dev_260211 into develop
Reviewed-on: #150
2026-03-17 12:46:12 +09:00
24dca652f0 Merge pull request 'refresh 토큰 재발급 로그 추가' (#149) from feat/infer_dev_260211 into develop
Reviewed-on: #149
2026-03-17 12:26:31 +09:00
dean
0efde4e5bb access-token 2026-03-17 07:49:13 +09:00
dean
548d82da12 access-token 2026-03-17 07:20:08 +09:00
536ff8fc65 추론 종료 async 수정 2026-03-13 15:52:20 +09:00
1dc1ce741e 추론 종료 비동기 호출 2026-03-13 15:38:11 +09:00
d21ed61666 hello 2026-03-12 08:22:39 +09:00
c9a1007c21 hello 2026-03-12 08:08:59 +09:00
413afb0b7c shp파일 바꾸는로직정리 2026-03-12 08:08:11 +09:00
e69eccc82b shp파일 바꾸는로직정리 2026-03-12 07:57:14 +09:00
828a4c5dca shp파일 바꾸는로직정리 2026-03-12 07:31:54 +09:00
5d417d85ff Merge pull request '국유인연동 알럿 문구 변경' (#148) from feat/infer_dev_260211 into develop
Reviewed-on: #148
2026-03-09 10:52:25 +09:00
8d45e91982 Merge pull request '국유인연동 : 라벨링 진행중 회차 uuid 추가' (#147) from feat/infer_dev_260211 into develop
Reviewed-on: #147
2026-03-09 10:10:52 +09:00
17 changed files with 279 additions and 31 deletions

View File

@@ -16,7 +16,6 @@ import org.springframework.util.AntPathMatcher;
import org.springframework.web.filter.OncePerRequestFilter; import org.springframework.web.filter.OncePerRequestFilter;
@Component @Component
@Log4j2
@RequiredArgsConstructor @RequiredArgsConstructor
public class JwtAuthenticationFilter extends OncePerRequestFilter { public class JwtAuthenticationFilter extends OncePerRequestFilter {
@@ -33,24 +32,16 @@ public class JwtAuthenticationFilter extends OncePerRequestFilter {
HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) HttpServletRequest request, HttpServletResponse response, FilterChain filterChain)
throws ServletException, IOException { throws ServletException, IOException {
log.info("JWT FILTER START uri={}", request.getRequestURI());
// HTTP 요청 헤더에서 JWT 토큰 꺼내기 // HTTP 요청 헤더에서 JWT 토큰 꺼내기
String token = resolveToken(request); String token = resolveToken(request);
log.info("JWT TOKEN = {}", token);
// JWT 토큰을 검증하고, 인증된 사용자로 SecurityContext에 등록 // JWT 토큰을 검증하고, 인증된 사용자로 SecurityContext에 등록
if (token != null && jwtTokenProvider.isValidToken(token)) { if (token != null && jwtTokenProvider.isValidToken(token)) {
log.info("JWT TOKEN VALID");
String username = jwtTokenProvider.getSubject(token); String username = jwtTokenProvider.getSubject(token);
log.info("JWT USERNAME = {}", username);
UserDetails userDetails = userDetailsService.loadUserByUsername(username); UserDetails userDetails = userDetailsService.loadUserByUsername(username);
log.info("JWT AUTHORITIES = {}", userDetails.getAuthorities());
UsernamePasswordAuthenticationToken authentication = UsernamePasswordAuthenticationToken authentication =
new UsernamePasswordAuthenticationToken(userDetails, null, userDetails.getAuthorities()); new UsernamePasswordAuthenticationToken(userDetails, null, userDetails.getAuthorities());
SecurityContextHolder.getContext().setAuthentication(authentication); SecurityContextHolder.getContext().setAuthentication(authentication);
log.info("JWT SECURITY CONTEXT SET");
} }
filterChain.doFilter(request, response); filterChain.doFilter(request, response);

View File

@@ -21,7 +21,6 @@ import org.springframework.stereotype.Component;
* <p>- Redis 사용 안 함 - ADMIN 예외 없음 (DB 매핑 기준) - 한 계정 = role 1개 - menu_url(prefix) 기반 API 접근 제어 * <p>- Redis 사용 안 함 - ADMIN 예외 없음 (DB 매핑 기준) - 한 계정 = role 1개 - menu_url(prefix) 기반 API 접근 제어
*/ */
@Component @Component
@Log4j2
@RequiredArgsConstructor @RequiredArgsConstructor
public class MenuAuthorizationManager implements AuthorizationManager<RequestAuthorizationContext> { public class MenuAuthorizationManager implements AuthorizationManager<RequestAuthorizationContext> {
@@ -60,15 +59,12 @@ public class MenuAuthorizationManager implements AuthorizationManager<RequestAut
for (MenuEntity menu : allowedMenus) { for (MenuEntity menu : allowedMenus) {
String baseUri = menu.getMenuUrl(); String baseUri = menu.getMenuUrl();
log.info("MenuAuthorizationManager REQUEST = {}", requestPath);
log.info("MenuAuthorizationManager BASE URI = {}", baseUri);
if (baseUri == null || baseUri.isBlank()) { if (baseUri == null || baseUri.isBlank()) {
continue; continue;
} }
if (matchUri(baseUri, requestPath)) { if (matchUri(baseUri, requestPath)) {
log.info("MenuAuthorizationManager MATCH SUCCESS");
return new AuthorizationDecision(true); return new AuthorizationDecision(true);
} }
} }

View File

@@ -1,23 +1,45 @@
package com.kamco.cd.kamcoback.common.service; package com.kamco.cd.kamcoback.common.service;
import com.kamco.cd.kamcoback.config.InferenceProperties;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2; import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@Log4j2 @Log4j2
// 0312
@RequiredArgsConstructor
@Component @Component
public class ExternalJarRunner { public class ExternalJarRunner {
@Value("${spring.profiles.active}") @Value("${spring.profiles.active}")
private String profile; private String profile;
// 0312
private final InferenceProperties inferenceProperties;
private static final long TIMEOUT_MINUTES = TimeUnit.DAYS.toMinutes(3); private static final long TIMEOUT_MINUTES = TimeUnit.DAYS.toMinutes(3);
// java -jar build/libs/shp-exporter.jar --batch --geoserver.enabled=true
// --converter.inference-id=qq99999 --converter.batch-ids[0]=111
// 0312 shp 파일 배치를 통해 생성
public void run(String inferenceLearningId, List<Long> batchIds) {
// JAR 경로 (shape파일 생성용)
String jarPathV2 = inferenceProperties.getJarPathV2();
List<String> args = new ArrayList<>();
args.add(" --spring.profiles.active=" + profile);
args.add(" --batch");
args.add(" --geoserver.enabled=true");
args.add(" --converter.inference-id=" + inferenceLearningId);
batchIds.forEach(batchId -> args.add(" --converter.batch-ids[" + args.size() + "]=" + batchId));
execJar(jarPathV2, args);
}
/** /**
* shp 파일 생성 * shp 파일 생성
* *
@@ -74,7 +96,8 @@ public class ExternalJarRunner {
cmd.add("-jar"); cmd.add("-jar");
cmd.add(jarPath); cmd.add(jarPath);
cmd.addAll(args); cmd.addAll(args);
// 0312
log.info("exec jar command: {}", cmd);
ProcessBuilder pb = new ProcessBuilder(cmd); ProcessBuilder pb = new ProcessBuilder(cmd);
pb.redirectErrorStream(true); pb.redirectErrorStream(true);

View File

@@ -16,5 +16,7 @@ public class InferenceProperties {
private String batchUrl; private String batchUrl;
private String geojsonDir; private String geojsonDir;
private String jarPath; private String jarPath;
// 0312
private String jarPathV2;
private String inferenceServerName; private String inferenceServerName;
} }

View File

@@ -71,6 +71,10 @@ public class SecurityConfig {
.requestMatchers("/api/test/review") .requestMatchers("/api/test/review")
.hasAnyRole("ADMIN", "REVIEWER") .hasAnyRole("ADMIN", "REVIEWER")
// shapefile 생성 테스트 API - 인증 없이 접근 가능
.requestMatchers("/api/test/make-shapefile")
.permitAll()
// ASYNC/ERROR 재디스패치는 막지 않기 (다운로드/스트리밍에서 필수) // ASYNC/ERROR 재디스패치는 막지 않기 (다운로드/스트리밍에서 필수)
.dispatcherTypeMatchers(DispatcherType.ASYNC, DispatcherType.ERROR) .dispatcherTypeMatchers(DispatcherType.ASYNC, DispatcherType.ERROR)
.permitAll() .permitAll()

View File

@@ -8,6 +8,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
import com.kamco.cd.kamcoback.inference.service.InferenceAsyncService;
import com.kamco.cd.kamcoback.inference.service.InferenceResultService; import com.kamco.cd.kamcoback.inference.service.InferenceResultService;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto; import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto.DownloadReq; import com.kamco.cd.kamcoback.log.dto.AuditLogDto.DownloadReq;
@@ -55,6 +56,7 @@ import org.springframework.web.bind.annotation.RestController;
public class InferenceResultApiController { public class InferenceResultApiController {
private final InferenceResultService inferenceResultService; private final InferenceResultService inferenceResultService;
private final InferenceAsyncService inferenceAsyncService;
private final MapSheetMngService mapSheetMngService; private final MapSheetMngService mapSheetMngService;
private final ModelMngService modelMngService; private final ModelMngService modelMngService;
private final RangeDownloadResponder rangeDownloadResponder; private final RangeDownloadResponder rangeDownloadResponder;
@@ -176,7 +178,8 @@ public class InferenceResultApiController {
}) })
@DeleteMapping("/end") @DeleteMapping("/end")
public ApiResponseDto<UUID> getInferenceGeomList() { public ApiResponseDto<UUID> getInferenceGeomList() {
UUID uuid = inferenceResultService.deleteInferenceEnd(); // UUID uuid = inferenceResultService.deleteInferenceEnd();
UUID uuid = inferenceAsyncService.asyncInferenceEnd();
return ApiResponseDto.ok(uuid); return ApiResponseDto.ok(uuid);
} }

View File

@@ -87,6 +87,7 @@ public class InferenceResultDto {
READY("대기"), READY("대기"),
IN_PROGRESS("진행중"), IN_PROGRESS("진행중"),
END("완료"), END("완료"),
END_FAIL("종료실패"),
FORCED_END("강제종료"); FORCED_END("강제종료");
private final String desc; private final String desc;
@@ -683,6 +684,7 @@ public class InferenceResultDto {
@NoArgsConstructor @NoArgsConstructor
@AllArgsConstructor @AllArgsConstructor
public static class MapSheetFallbackYearDto { public static class MapSheetFallbackYearDto {
private String mapSheetNum; private String mapSheetNum;
private Integer mngYyyy; private Integer mngYyyy;
} }

View File

@@ -0,0 +1,117 @@
package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.common.inference.service.InferenceCommonService;
import com.kamco.cd.kamcoback.common.utils.UserUtil;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient.ExternalCallResult;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
import com.kamco.cd.kamcoback.postgres.core.AuditLogCoreService;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService;
import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/** 추론 관리 */
@Service
@Slf4j
@RequiredArgsConstructor
@Transactional(readOnly = true)
public class InferenceAsyncService {
private final InferenceResultCoreService inferenceResultCoreService;
private final MapSheetMngCoreService mapSheetMngCoreService;
private final ModelMngCoreService modelMngCoreService;
private final AuditLogCoreService auditLogCoreService;
private final InferenceCommonService inferenceCommonService;
private final ExternalHttpClient externalHttpClient;
private final UserUtil userUtil;
@Value("${inference.batch-url}")
private String batchUrl;
@Value("${inference.inference-server-name}")
private String inferenceServerName;
@Value("${file.dataset-dir}")
private String datasetDir;
@Value("${spring.profiles.active}")
private String activeEnv;
@Value("${inference.geojson-dir}")
private String inferenceDir;
// 0313
@Transactional
public UUID asyncInferenceEnd() {
SaveInferenceAiDto dto = inferenceResultCoreService.getProcessing();
if (dto == null) {
throw new CustomApiException("NOT_FOUND", HttpStatus.NOT_FOUND);
}
this.deleteInferenceEndAsync(dto); // 비동기 종료 호출
return dto.getUuid();
}
// 0313
@Async("inferenceEndExecutor")
@Transactional
public void deleteInferenceEndAsync(SaveInferenceAiDto dto) {
Long batchId = dto.getBatchId();
String url = batchUrl + "/" + batchId;
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
try {
log.info("[추론 종료 비동기 시작] uuid={}, batchId={}", dto.getUuid(), batchId);
ExternalCallResult<String> result =
externalHttpClient.callLong(url, HttpMethod.DELETE, dto, headers, String.class);
if (!result.success()) {
log.error("[추론 종료 실패] 외부 API 호출 실패. uuid={}, batchId={}", dto.getUuid(), batchId);
SaveInferenceAiDto failRequest = new SaveInferenceAiDto();
failRequest.setUuid(dto.getUuid());
failRequest.setStatus(Status.END_FAIL.getId()); // TODO: 종료실패 상태 추가하는 게 맞는지?
failRequest.setUpdateUid(userUtil.getId());
failRequest.setInferEndDttm(ZonedDateTime.now());
inferenceResultCoreService.update(failRequest);
return;
}
SaveInferenceAiDto request = new SaveInferenceAiDto();
request.setStatus(Status.FORCED_END.getId());
request.setUuid(dto.getUuid());
request.setUpdateUid(userUtil.getId());
request.setInferEndDttm(ZonedDateTime.now());
inferenceResultCoreService.update(request);
Long learnId = inferenceResultCoreService.getInferenceLearnIdByUuid(dto.getUuid());
inferenceResultCoreService.upsertGeomData(learnId);
log.info("[추론 종료 비동기 완료] uuid={}, batchId={}", dto.getUuid(), batchId);
} catch (Exception e) {
log.error("[추론 종료 비동기 예외] uuid={}, batchId={}", dto.getUuid(), batchId, e);
}
}
}

View File

@@ -36,7 +36,6 @@ import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.RestController;
@Tag(name = "인증(Auth)", description = "로그인, 토큰 재발급, 로그아웃 API") @Tag(name = "인증(Auth)", description = "로그인, 토큰 재발급, 로그아웃 API")
@Log4j2
@RestController @RestController
@RequestMapping("/api/auth") @RequestMapping("/api/auth")
@RequiredArgsConstructor @RequiredArgsConstructor
@@ -172,9 +171,6 @@ public class AuthController {
throw new AccessDeniedException("만료되었거나 유효하지 않은 리프레시 토큰 입니다."); throw new AccessDeniedException("만료되었거나 유효하지 않은 리프레시 토큰 입니다.");
} }
log.info("refresh ========");
log.info("refreshToken ======== {}", refreshToken);
String username = jwtTokenProvider.getSubject(refreshToken); String username = jwtTokenProvider.getSubject(refreshToken);
// Redis에 저장된 RefreshToken과 일치하는지 확인 // Redis에 저장된 RefreshToken과 일치하는지 확인

View File

@@ -21,6 +21,17 @@ public class AsyncConfig {
return ex; return ex;
} }
@Bean(name = "makeShapeFile")
public Executor makeShapeFileExecutor() {
ThreadPoolTaskExecutor ex = new ThreadPoolTaskExecutor();
ex.setCorePoolSize(2);
ex.setMaxPoolSize(4);
ex.setQueueCapacity(50);
ex.setThreadNamePrefix("makeShapeFile-");
ex.initialize();
return ex;
}
@Bean(name = "auditLogExecutor") @Bean(name = "auditLogExecutor")
public Executor auditLogExecutor() { public Executor auditLogExecutor() {
ThreadPoolTaskExecutor exec = new ThreadPoolTaskExecutor(); ThreadPoolTaskExecutor exec = new ThreadPoolTaskExecutor();
@@ -31,4 +42,16 @@ public class AsyncConfig {
exec.initialize(); exec.initialize();
return exec; return exec;
} }
// 0313
@Bean(name = "inferenceEndExecutor")
public Executor inferenceEndExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(5);
executor.setMaxPoolSize(10);
executor.setQueueCapacity(100);
executor.setThreadNamePrefix("inference-async-");
executor.initialize();
return executor;
}
} }

View File

@@ -255,8 +255,9 @@ public class MapSheetInferenceJobService {
// 추론 종료일때 shp 파일 생성 // 추론 종료일때 shp 파일 생성
String batchIdStr = batchIds.stream().map(String::valueOf).collect(Collectors.joining(",")); String batchIdStr = batchIds.stream().map(String::valueOf).collect(Collectors.joining(","));
// shp 파일 비동기 생성 // 0312 shp 파일 비동기 생성 (바꿔주세요)
shpPipelineService.runPipeline(jarPath, datasetDir, batchIdStr, sheet.getUid()); shpPipelineService.makeShapeFile(sheet.getUid(), batchIds);
// shpPipelineService.runPipeline(jarPath, datasetDir, batchIdStr, sheet.getUid());
} }
/** /**

View File

@@ -3,7 +3,7 @@ package com.kamco.cd.kamcoback.scheduler.service;
import com.kamco.cd.kamcoback.common.service.ExternalJarRunner; import com.kamco.cd.kamcoback.common.service.ExternalJarRunner;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService; import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.scheduler.config.ShpKeyLock; import com.kamco.cd.kamcoback.scheduler.config.ShpKeyLock;
import java.nio.file.Paths; import java.util.List;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2; import lombok.extern.log4j.Log4j2;
import org.springframework.scheduling.annotation.Async; import org.springframework.scheduling.annotation.Async;
@@ -18,6 +18,30 @@ public class ShpPipelineService {
private final ExternalJarRunner externalJarRunner; private final ExternalJarRunner externalJarRunner;
private final ShpKeyLock shpKeyLock; private final ShpKeyLock shpKeyLock;
// 0312 shp 파일 비동기 생성
@Async("makeShapeFile")
public void makeShapeFile(String inferenceId, List<Long> batchIds) {
if (!shpKeyLock.tryLock(inferenceId)) {
log.info("");
log.info("============================================================");
log.info("SHP pipeline already running. inferenceId={}", inferenceId);
log.info("============================================================");
try {
log.info("SHP pipeline already start. inferenceId={}", inferenceId);
externalJarRunner.run(inferenceId, batchIds);
} catch (Exception e) {
log.error("SHP pipeline failed. inferenceId={}", inferenceId, e);
// TODO 실패 상태 업데이트 로직 추가
} finally {
log.info("============================================================");
log.info("SHP pipeline DONE. inferenceId={}", inferenceId);
log.info("============================================================");
log.info("");
shpKeyLock.unlock(inferenceId);
}
}
}
/** /**
* shp 파일 생성 1. merge 생성 2. 생성된 merge shp 파일로 geoserver 등록, 3.도엽별로 shp 생성 * shp 파일 생성 1. merge 생성 2. 생성된 merge shp 파일로 geoserver 등록, 3.도엽별로 shp 생성
* *
@@ -36,24 +60,33 @@ public class ShpPipelineService {
} }
try { try {
log.info("");
log.info("============================================================");
log.info("SHP pipeline started. inferenceId={}", inferenceId);
log.info("============================================================");
// uid 기준 merge shp, geojson 파일 생성 // uid 기준 merge shp, geojson 파일 생성
externalJarRunner.run(jarPath, batchIds, inferenceId, "", "MERGED"); externalJarRunner.run(jarPath, batchIds, inferenceId, "", "MERGED");
// uid 기준 shp 파일 geoserver 등록 // uid 기준 shp 파일 geoserver 등록
String register = // String register =
Paths.get(datasetDir, inferenceId, "merge", inferenceId + ".shp").toString(); // Paths.get(datasetDir, inferenceId, "merge", inferenceId + ".shp").toString();
log.info("register={}", register); // log.info("register={}", register);
externalJarRunner.run(jarPath, register, inferenceId); // externalJarRunner.run(jarPath, register, inferenceId);
//
// uid 기준 도엽별 shp, geojson 파일 생성 // // uid 기준 도엽별 shp, geojson 파일 생성
externalJarRunner.run(jarPath, batchIds, inferenceId, "", "RESOLVE"); // externalJarRunner.run(jarPath, batchIds, inferenceId, "", "RESOLVE");
//
log.info("SHP pipeline finished. inferenceId={}", inferenceId); // log.info("SHP pipeline finished. inferenceId={}", inferenceId);
} catch (Exception e) { } catch (Exception e) {
log.error("SHP pipeline failed. inferenceId={}", inferenceId, e); log.error("SHP pipeline failed. inferenceId={}", inferenceId, e);
// TODO 실패 상태 업데이트 로직 추가 // TODO 실패 상태 업데이트 로직 추가
} finally { } finally {
log.info("============================================================");
log.info("SHP pipeline DONE. inferenceId={}", inferenceId);
log.info("============================================================");
shpKeyLock.unlock(inferenceId); shpKeyLock.unlock(inferenceId);
} }
} }

View File

@@ -0,0 +1,50 @@
package com.kamco.cd.kamcoback.test;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.scheduler.service.ShpPipelineService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.web.ErrorResponse;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "test shape api", description = "test shape api")
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/test")
public class TestShapeApiController {
private final ShpPipelineService shpPipelineService;
@Operation(
summary = "shapefile 생성 테스트",
description = "지정된 inference ID와 batch ID 목록으로 shapefile을 생성합니다.")
@ApiResponses({
@ApiResponse(
responseCode = "200",
description = "shapefile 생성 요청 성공",
content = @Content(schema = @Schema(implementation = String.class))),
@ApiResponse(
responseCode = "400",
description = "잘못된 요청 데이터",
content = @Content(schema = @Schema(implementation = ErrorResponse.class))),
@ApiResponse(
responseCode = "500",
description = "서버 오류",
content = @Content(schema = @Schema(implementation = ErrorResponse.class)))
})
@GetMapping("/make-shapefile")
public ApiResponseDto<String> makeShapeFile(
@RequestParam String inferenceId, @RequestParam List<Long> batchIds) {
shpPipelineService.makeShapeFile(inferenceId, batchIds);
return ApiResponseDto.ok("Shapefile 생성이 시작되었습니다. inferenceId: " + inferenceId);
}
}

View File

@@ -100,6 +100,7 @@ inference:
url: http://192.168.2.183:8000/jobs url: http://192.168.2.183:8000/jobs
batch-url: http://192.168.2.183:8000/batches batch-url: http://192.168.2.183:8000/batches
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
jar-path-v2: ${inference.nfs}/repo/jar/shp-exporter-v2.jar
inference-server-name: server1,server2,server3,server4 inference-server-name: server1,server2,server3,server4
output-dir: ${inference.nfs}/model_output/export output-dir: ${inference.nfs}/model_output/export

View File

@@ -78,6 +78,7 @@ inference:
url: http://10.100.0.11:8000/jobs url: http://10.100.0.11:8000/jobs
batch-url: http://10.100.0.11:8000/batches batch-url: http://10.100.0.11:8000/batches
jar-path: jar/shp-exporter.jar jar-path: jar/shp-exporter.jar
jar-path-v2: jar/shp-exporter-v2.jar
inference-server-name: server1,server2,server3,server4 inference-server-name: server1,server2,server3,server4
output-dir: ${inference.nfs}/model_output/export output-dir: ${inference.nfs}/model_output/export

View File

@@ -95,6 +95,8 @@ inference:
url: http://172.16.4.56:8000/jobs url: http://172.16.4.56:8000/jobs
batch-url: http://172.16.4.56:8000/batches batch-url: http://172.16.4.56:8000/batches
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
# //0312
jar-path-v2: ${inference.nfs}/repo/jar/shp-exporter-v2.jar
inference-server-name: server1,server2,server3,server4 inference-server-name: server1,server2,server3,server4
output-dir: ${inference.nfs}/model_output/export output-dir: ${inference.nfs}/model_output/export

View File

@@ -88,3 +88,6 @@ inference:
nfs: /kamco-nfs nfs: /kamco-nfs
geojson-dir: ${inference.nfs}/requests/ # 추론실행을 위한 파일생성경로 geojson-dir: ${inference.nfs}/requests/ # 추론실행을 위한 파일생성경로
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
# //0312
jar-path-v2: ${inference.nfs}/repo/jar/shp-exporter-v2.jar