105 Commits

Author SHA1 Message Date
0425a6486d Merge pull request 'shp 테스트' (#160) from feat/infer_dev_260211 into develop
Reviewed-on: #160
2026-03-25 14:07:21 +09:00
241c7222d1 shp 테스트 2026-03-25 14:07:06 +09:00
71e4ab14bd Merge pull request 'shp 테스트' (#159) from feat/infer_dev_260211 into develop
Reviewed-on: #159
2026-03-25 14:01:26 +09:00
52da4dafc3 shp 테스트 2026-03-25 14:01:02 +09:00
24d615174d Merge pull request '토근 정보 변경' (#158) from feat/infer_dev_260211 into develop
Reviewed-on: #158
2026-03-19 13:48:56 +09:00
12b0f0867d 토근 정보 변경 2026-03-19 13:48:14 +09:00
283d906da6 Merge pull request '토큰시간 변경' (#157) from feat/infer_dev_260211 into develop
Reviewed-on: #157
2026-03-19 10:31:05 +09:00
360b451c38 토큰시간 변경 2026-03-19 10:30:42 +09:00
80e281cb99 Merge pull request '개발서버 토큰 시간 변경' (#156) from feat/infer_dev_260211 into develop
Reviewed-on: #156
2026-03-18 14:02:29 +09:00
b07bc38ee8 개발서버 토큰 시간 변경 2026-03-18 14:01:56 +09:00
e4c1c76b2b Merge pull request '재할당 transactional 추가' (#155) from feat/infer_dev_260211 into develop
Reviewed-on: #155
2026-03-17 17:47:43 +09:00
01b64eeca7 재할당 transactional 추가 2026-03-17 17:47:15 +09:00
516f949a37 Merge pull request 'spotless 적용' (#154) from feat/infer_dev_260211 into develop
Reviewed-on: #154
2026-03-17 14:00:22 +09:00
b6ed3b57ef spotless 적용 2026-03-17 14:00:04 +09:00
9eebf23774 Merge pull request 'dev 토큰 유효시간 변경, 인증 Log 추가한거 삭제' (#153) from feat/infer_dev_260211 into develop
Reviewed-on: #153
2026-03-17 13:55:53 +09:00
4f742edf8b dev 토큰 유효시간 변경, 인증 Log 추가한거 삭제 2026-03-17 13:55:30 +09:00
0aa415cf3a Merge pull request '토큰 생성 로그 추가' (#152) from feat/infer_dev_260211 into develop
Reviewed-on: #152
2026-03-17 13:33:34 +09:00
884b635585 토큰 생성 로그 추가 2026-03-17 13:33:17 +09:00
6861f6b8b6 Merge pull request '메뉴 권한 Log 추가' (#151) from feat/infer_dev_260211 into develop
Reviewed-on: #151
2026-03-17 13:22:06 +09:00
a97af0d4dd 메뉴 권한 Log 추가 2026-03-17 13:21:46 +09:00
9297d19e24 Merge pull request 'refresh 토큰 인증 로그 추가' (#150) from feat/infer_dev_260211 into develop
Reviewed-on: #150
2026-03-17 12:46:12 +09:00
65c38b3083 refresh 토큰 인증 로그 추가 2026-03-17 12:45:56 +09:00
24dca652f0 Merge pull request 'refresh 토큰 재발급 로그 추가' (#149) from feat/infer_dev_260211 into develop
Reviewed-on: #149
2026-03-17 12:26:31 +09:00
193cd449a8 refresh 토큰 재발급 로그 추가 2026-03-17 12:23:45 +09:00
dean
0efde4e5bb access-token 2026-03-17 07:49:13 +09:00
dean
548d82da12 access-token 2026-03-17 07:20:08 +09:00
536ff8fc65 추론 종료 async 수정 2026-03-13 15:52:20 +09:00
1dc1ce741e 추론 종료 비동기 호출 2026-03-13 15:38:11 +09:00
d21ed61666 hello 2026-03-12 08:22:39 +09:00
c9a1007c21 hello 2026-03-12 08:08:59 +09:00
413afb0b7c shp파일 바꾸는로직정리 2026-03-12 08:08:11 +09:00
e69eccc82b shp파일 바꾸는로직정리 2026-03-12 07:57:14 +09:00
828a4c5dca shp파일 바꾸는로직정리 2026-03-12 07:31:54 +09:00
5d417d85ff Merge pull request '국유인연동 알럿 문구 변경' (#148) from feat/infer_dev_260211 into develop
Reviewed-on: #148
2026-03-09 10:52:25 +09:00
614d6da695 국유인연동 알럿 문구 변경 2026-03-09 10:51:57 +09:00
8d45e91982 Merge pull request '국유인연동 : 라벨링 진행중 회차 uuid 추가' (#147) from feat/infer_dev_260211 into develop
Reviewed-on: #147
2026-03-09 10:10:52 +09:00
e1febf5863 국유인연동 : 라벨링 진행중 회차 uuid 추가 2026-03-09 10:10:08 +09:00
97fb659f15 enum @notBlank제거 2026-03-07 01:43:07 +09:00
ebb48c3f57 enum @notBlank제거 2026-03-07 01:41:34 +09:00
a35b4b8f59 enum @notBlank제거 2026-03-07 01:24:55 +09:00
0a53e186dc enum @notBlank제거 2026-03-07 01:21:30 +09:00
f3c7c5e8e8 enum @notBlank제거 2026-03-07 01:13:28 +09:00
c505e9b740 @enumvalid제거 2026-03-07 01:11:56 +09:00
8c2f2eff1b enum @notBlank제거 2026-03-07 01:08:21 +09:00
ade8bfa76a Merge pull request 'feat/dean/polishing_2' (#146) from feat/dean/polishing_2 into develop
Reviewed-on: #146
2026-03-07 01:02:58 +09:00
3752b83292 인퍼런스실행 정리 2026-03-07 00:56:51 +09:00
9f31f661fc 원복 2026-03-06 22:12:54 +09:00
b477928261 Merge pull request 'testing 테이블 조회 조건 추가' (#145) from feat/infer_dev_260211 into develop
Reviewed-on: #145
2026-03-06 19:51:41 +09:00
f4f75f353c testing 테이블 조회 조건 추가 2026-03-06 19:51:08 +09:00
f977e4be7c Merge pull request 'shp 파일 생성중 오류 발생시 중지되게 수정' (#144) from feat/infer_dev_260211 into develop
Reviewed-on: #144
2026-03-06 19:29:27 +09:00
573da5b53a shp 파일 생성중 오류 발생시 중지되게 수정 2026-03-06 19:21:08 +09:00
bd72852556 Merge pull request '추론실행 오류 수정' (#143) from feat/infer_dev_260211 into develop
Reviewed-on: #143
2026-03-06 18:54:16 +09:00
e4b904606f 추론실행 오류 수정 2026-03-06 18:53:38 +09:00
0d14dafecc Merge pull request 'feat/infer_dev_260211' (#142) from feat/infer_dev_260211 into develop
Reviewed-on: #142
2026-03-06 18:05:00 +09:00
37f534abff spotless 2026-03-06 18:04:38 +09:00
3521a5fd3d trim, foundUnique.add 추가 2026-03-06 18:04:23 +09:00
cbae052338 Merge branch 'develop' into feat/infer_dev_260211
# Conflicts:
#	src/main/java/com/kamco/cd/kamcoback/common/inference/utils/GeoJsonValidator.java
2026-03-06 18:02:44 +09:00
b2c9c36d4c trim, log 추가 2026-03-06 17:58:23 +09:00
114088469e 추론실행 테스트 2026-03-06 17:53:01 +09:00
7d6dca8b24 Merge pull request '추론실행 테스트' (#138) from feat/infer_dev_260211 into develop
Reviewed-on: #138
2026-03-06 17:42:58 +09:00
2e7ad26528 추론실행 테스트 2026-03-06 17:42:40 +09:00
0353e172ed Merge pull request 'feat/infer_dev_260211' (#137) from feat/infer_dev_260211 into develop
Reviewed-on: #137
2026-03-06 17:15:20 +09:00
1d5b1343a9 file del 추가 2026-03-06 17:14:44 +09:00
65f9026922 학습서버배포정보 수정 2026-03-06 16:02:18 +09:00
9b79f31d7b 줄맞춤 2026-03-06 16:01:28 +09:00
de45bf47c5 review-to-geojson 로직 수정, 미사용 주석 추가 2026-03-06 13:16:08 +09:00
a413de4b93 학습데이터 상세 > 폴리곤 수 로직 수정 2026-03-06 13:12:33 +09:00
815675f112 Merge pull request 'inference_results 결과 저장 중복방지' (#136) from feat/infer_dev_260211 into develop
Reviewed-on: #136
2026-03-06 12:43:56 +09:00
b9f7e36175 inference_results 결과 저장 중복방지 2026-03-06 12:43:36 +09:00
855aca6e5a Merge pull request 'feat/infer_dev_260211' (#135) from feat/infer_dev_260211 into develop
Reviewed-on: #135
2026-03-06 12:25:04 +09:00
206dba6ff9 추론결과 저장 result 조회 조건 수정 2026-03-06 12:24:52 +09:00
5db9127f0c 추론결과 저장 result 조회 저장 테스트 2026-03-06 12:23:16 +09:00
132bad8c33 Merge pull request '추론결과 저장 result 조회 저장 테스트' (#134) from feat/infer_dev_260211 into develop
Reviewed-on: #134
2026-03-06 12:00:01 +09:00
6dde4cd891 추론결과 저장 result 조회 저장 테스트 2026-03-06 11:59:38 +09:00
ac248c2f30 Merge pull request '추론결과 저장 result 조회 조건 변경' (#133) from feat/infer_dev_260211 into develop
Reviewed-on: #133
2026-03-06 11:10:04 +09:00
15d082af0e 추론결과 저장 result 조회 조건 변경 2026-03-06 11:09:34 +09:00
3be536424a Merge pull request 'feat/infer_dev_260211' (#132) from feat/infer_dev_260211 into develop
Reviewed-on: #132
2026-03-05 11:33:24 +09:00
a3b2fd0c73 status 체크 후 pnu cnt update 로 수정 2026-03-05 11:32:43 +09:00
9b504396bc status 체크 후 pnu cnt update 추가 2026-03-05 09:39:00 +09:00
2cc490012e Merge pull request 'feat/infer_dev_260211' (#131) from feat/infer_dev_260211 into develop
Reviewed-on: #131
2026-03-04 23:01:14 +09:00
f99144eccc pnu cnt update 수동호출 추가 2026-03-04 23:00:46 +09:00
66b78022a9 변화탐지 selected polygon, point 조건 수정 2026-03-04 22:59:59 +09:00
199e302068 Merge pull request '실태조사 적합여부 값 수정' (#130) from feat/infer_dev_260211 into develop
Reviewed-on: #130
2026-03-04 21:01:52 +09:00
5a3370d5fb 실태조사 적합여부 값 수정 2026-03-04 21:01:21 +09:00
5d7cb18fb8 줄맞춤 2026-03-04 20:05:00 +09:00
57baf4d911 국유인의타입추가 2026-03-04 19:58:02 +09:00
2fee2143ba Merge pull request 'pnu 로 polygon, point 조회 API 수정' (#129) from feat/infer_dev_260211 into develop
Reviewed-on: #129
2026-03-04 17:34:46 +09:00
f4a410ba49 pnu 로 polygon, point 조회 API 수정 2026-03-04 17:34:12 +09:00
deba2b84f0 Merge pull request '추론결과 상세 조회 api 시간 log 추가' (#128) from feat/infer_dev_260211 into develop
Reviewed-on: #128
2026-03-04 17:04:25 +09:00
85d8efc493 추론결과 상세 조회 api 시간 log 추가
- 추론결과 기본정보
- 분류별 탐지건수 조회
- 추론관리 분석결과 geom 상세 목록
2026-03-04 17:03:50 +09:00
5a82a9507a Merge pull request 'unfit 업데이트 수정' (#127) from feat/infer_dev_260211 into develop
Reviewed-on: #127
2026-03-04 13:21:37 +09:00
b9a268d43a unfit 업데이트 수정 2026-03-04 13:20:55 +09:00
4d1ea53d8f 변화탐지 pnu로도 조회 가능하게 하기 2026-03-04 08:04:04 +09:00
278d5d20e8 restTemplate read timeout, connect timeout 늘리기 2026-03-03 17:07:19 +09:00
4d9fa2d6a5 좌표계 5179 추가 2026-03-03 15:03:44 +09:00
e28e5ba371 국유인 반영 파일 경로 수정 2026-03-03 14:23:24 +09:00
22cc996399 Merge pull request 'feat/infer_dev_260211' (#126) from feat/infer_dev_260211 into develop
Reviewed-on: #126
2026-02-28 00:34:35 +09:00
e32f5e33d9 추론 기본 정보에 상태 추가 2026-02-28 00:34:03 +09:00
f95b82bf7f 스케줄링에 log 추가 완료 2026-02-28 00:23:48 +09:00
060a0db126 Merge pull request 'feat/infer_dev_260211' (#125) from feat/infer_dev_260211 into develop
Reviewed-on: #125
2026-02-27 22:53:07 +09:00
0692456b5b 학습데이터제작 > 작업현황 > 폴리곤 수 추가 2026-02-27 22:52:45 +09:00
8fb9f89c8f 국유인 실태조사 적합여부 임의로 업데이트 API 2026-02-27 22:15:31 +09:00
1cfe6e923f Merge remote-tracking branch 'origin/feat/infer_dev_260211' into feat/infer_dev_260211 2026-02-27 21:04:24 +09:00
af2721949c shp 생성 컨트롤러 이동, 주석추가 2026-02-27 21:04:18 +09:00
e9b9075c72 국유인 실태조사 job 에 log 찍기 추가 2026-02-27 20:37:56 +09:00
69 changed files with 1454 additions and 392 deletions

6
.gitignore vendored
View File

@@ -60,6 +60,7 @@ Thumbs.db
.env.*.local
application-local.yml
application-secret.yml
metrics-collector/.env
### Docker (local testing) ###
.dockerignore
@@ -72,3 +73,8 @@ docker-compose.override.yml
*.swo
*~
!/CLAUDE.md
### Metrics Collector ###
metrics-collector/venv/
metrics-collector/*.pid
metrics-collector/wheels/

47
inference-table-index.sh Normal file
View File

@@ -0,0 +1,47 @@
#!/bin/bash
#############################################
# PostgreSQL INDEX CREATE SCRIPT
# 에러 발생해도 계속 진행
#############################################
# set -e 제거 (중단 안함)
# ===== 환경변수 체크 =====
if [ -z "$DB_HOST" ] || [ -z "$DB_PORT" ] || [ -z "$DB_NAME" ] || [ -z "$DB_USER" ]; then
echo "DB 환경변수가 설정되지 않았습니다."
exit 1
fi
echo "========================================"
echo "START INDEX CREATE"
echo "TIME: $(date)"
echo "========================================"
run_index() {
echo "----------------------------------------"
echo "Running: $1"
psql -h $DB_HOST -p $DB_PORT -U $DB_USER -d $DB_NAME -c "$1"
echo "----------------------------------------"
}
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_files_uid_ext_state
ON tb_map_sheet_mng_files (hst_uid, file_ext, file_state);"
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_mng_files_hstuid_ext
ON tb_map_sheet_mng_files (hst_uid, file_ext);"
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_inkx_no_use
ON tb_map_inkx_5k (mapidcd_no, use_inference);"
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_inkx5k_mapidcd
ON tb_map_inkx_5k (mapidcd_no);"
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_hst_exec_sheetnum_yyyy_desc
ON tb_map_sheet_mng_hst (map_sheet_num, mng_yyyy DESC);"
echo "========================================"
echo "END INDEX CREATE"
echo "TIME: $(date)"
echo "========================================"
echo "모든 인덱스 시도 완료"

View File

@@ -37,7 +37,6 @@ public class JwtAuthenticationFilter extends OncePerRequestFilter {
// JWT 토큰을 검증하고, 인증된 사용자로 SecurityContext에 등록
if (token != null && jwtTokenProvider.isValidToken(token)) {
String username = jwtTokenProvider.getSubject(token);
UserDetails userDetails = userDetailsService.loadUserByUsername(username);
UsernamePasswordAuthenticationToken authentication =
new UsernamePasswordAuthenticationToken(userDetails, null, userDetails.getAuthorities());

View File

@@ -8,11 +8,13 @@ import jakarta.annotation.PostConstruct;
import java.nio.charset.StandardCharsets;
import java.util.Date;
import javax.crypto.SecretKey;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
/** 토큰 생성 */
@Component
@Log4j2
public class JwtTokenProvider {
@Value("${jwt.secret}")
@@ -34,11 +36,13 @@ public class JwtTokenProvider {
// Access Token 생성
public String createAccessToken(String subject) {
log.info("TOKEN VALIDITY = {}", accessTokenValidityInMs);
return createToken(subject, accessTokenValidityInMs);
}
// Refresh Token 생성
public String createRefreshToken(String subject) {
log.info("REFRESH TOKEN VALIDITY = {}", refreshTokenValidityInMs);
return createToken(subject, refreshTokenValidityInMs);
}

View File

@@ -58,6 +58,7 @@ public class MenuAuthorizationManager implements AuthorizationManager<RequestAut
for (MenuEntity menu : allowedMenus) {
String baseUri = menu.getMenuUrl();
if (baseUri == null || baseUri.isBlank()) {
continue;
}

View File

@@ -117,16 +117,18 @@ public class ChangeDetectionApiController {
@Parameter(description = "도곽/일반(MAPSHEET/ADDRESS) 검색 타입", required = true)
@RequestParam(defaultValue = "MAPSHEET")
DetectSearchType type,
@Parameter(description = "5k/50k 구분(SCALE_5K/SCALE_50K))", required = true)
@RequestParam(defaultValue = "SCALE_50K")
@Parameter(description = "5k/50k 구분(SCALE_5K/SCALE_50K))")
@RequestParam(defaultValue = "SCALE_50K", required = false)
MapScaleType scale,
@Parameter(
description = "변화탐지 년도(차수) /year-list 의 uuid",
example = "8584e8d4-53b3-4582-bde2-28a81495a626")
UUID uuid,
@Parameter(description = "도엽번호", example = "34607") @RequestParam String mapSheetNum) {
@Parameter(description = "도엽번호", example = "34607") @RequestParam(required = false)
String mapSheetNum,
@Parameter(description = "pnu") @RequestParam(required = false) String pnu) {
return ApiResponseDto.ok(
changeDetectionService.getChangeDetectionPolygonList(type, scale, uuid, mapSheetNum));
changeDetectionService.getChangeDetectionPolygonList(type, scale, uuid, mapSheetNum, pnu));
}
@Operation(summary = "변화탐지 결과 Point", description = "변화탐지 결과 Point")
@@ -135,16 +137,18 @@ public class ChangeDetectionApiController {
@Parameter(description = "도곽/일반(MAPSHEET/ADDRESS) 검색 타입", required = true)
@RequestParam(defaultValue = "MAPSHEET")
DetectSearchType type,
@Parameter(description = "5k/50k 구분(SCALE_5K/SCALE_50K))", required = true)
@RequestParam(defaultValue = "SCALE_50K")
@Parameter(description = "5k/50k 구분(SCALE_5K/SCALE_50K))")
@RequestParam(defaultValue = "SCALE_50K", required = false)
MapScaleType scale,
@Parameter(
description = "변화탐지 년도(차수) /year-list 의 uuid",
example = "8584e8d4-53b3-4582-bde2-28a81495a626")
UUID uuid,
@Parameter(description = "도엽번호", example = "34607") @RequestParam String mapSheetNum) {
@Parameter(description = "도엽번호", example = "34607") @RequestParam(required = false)
String mapSheetNum,
@Parameter(description = "pnu") @RequestParam(required = false) String pnu) {
return ApiResponseDto.ok(
changeDetectionService.getChangeDetectionPointList(type, scale, uuid, mapSheetNum));
changeDetectionService.getChangeDetectionPointList(type, scale, uuid, mapSheetNum, pnu));
}
@Operation(summary = "선택 변화탐지 결과 uuid 조회", description = "선택 변화탐지 결과 uuid 조회")
@@ -163,13 +167,13 @@ public class ChangeDetectionApiController {
@RequestParam
String chnDtctId,
@Parameter(description = "polygon 32자 uid", example = "3B1A7E5F895A4D9698489540EE1BBE1E")
@RequestParam
@RequestParam(required = false)
String cdObjectId,
@Parameter(
description = "polygon 32자 uids",
example =
"3B1A7E5F895A4D9698489540EE1BBE1E,3B221A2AF9614647A0903A972D56C574,3B22686A7ACE44FC9CB20F1B4FA6DEFD,3B376D94A183479BB5FBE3D7166E6E1A")
@RequestParam
@RequestParam(required = false)
List<String> cdObjectIds,
@Parameter(description = "pnu") @RequestParam(required = false) String pnu) {
return ApiResponseDto.ok(
@@ -183,13 +187,13 @@ public class ChangeDetectionApiController {
@RequestParam
String chnDtctId,
@Parameter(description = "polygon 32자 uid", example = "3B1A7E5F895A4D9698489540EE1BBE1E")
@RequestParam
@RequestParam(required = false)
String cdObjectId,
@Parameter(
description = "polygon 32자 uids",
example =
"3B1A7E5F895A4D9698489540EE1BBE1E,3B221A2AF9614647A0903A972D56C574,3B22686A7ACE44FC9CB20F1B4FA6DEFD,3B376D94A183479BB5FBE3D7166E6E1A")
@RequestParam
@RequestParam(required = false)
List<String> cdObjectIds,
@Parameter(description = "pnu") @RequestParam(required = false) String pnu) {
return ApiResponseDto.ok(

View File

@@ -46,13 +46,13 @@ public class ChangeDetectionService {
}
public ChangeDetectionDto.PolygonFeatureList getChangeDetectionPolygonList(
DetectSearchType type, MapScaleType scale, UUID uuid, String mapSheetNum) {
DetectSearchType type, MapScaleType scale, UUID uuid, String mapSheetNum, String pnu) {
switch (type) {
case MAPSHEET -> {
return changeDetectionCoreService.getChangeDetectionPolygonList(scale, uuid, mapSheetNum);
}
case ADDRESS -> {
return new ChangeDetectionDto.PolygonFeatureList(); // TODO: 일반 주소 검색 로직 확인 후 작업 필요
return changeDetectionCoreService.getChangeDetectionPnuPolygonList(uuid, pnu);
}
default -> throw new IllegalArgumentException("Unsupported type: " + type);
}
@@ -60,14 +60,14 @@ public class ChangeDetectionService {
// Geometry 객체 순환 참조 문제로 캐싱 불가
public ChangeDetectionDto.PointFeatureList getChangeDetectionPointList(
DetectSearchType type, MapScaleType scale, UUID uuid, String mapSheetNum) {
DetectSearchType type, MapScaleType scale, UUID uuid, String mapSheetNum, String pnu) {
switch (type) {
case MAPSHEET -> {
return changeDetectionCoreService.getChangeDetectionPointList(scale, uuid, mapSheetNum);
}
case ADDRESS -> {
return new ChangeDetectionDto.PointFeatureList(); // TODO: 일반 주소 검색 로직 확인 후 작업 필요
return changeDetectionCoreService.getChangeDetectionPnuPointList(uuid, pnu);
}
default -> throw new IllegalArgumentException("Unsupported type: " + type);
}
@@ -101,7 +101,7 @@ public class ChangeDetectionService {
*/
public ChangeDetectionDto.PolygonFeatureList getPolygonListByCd(
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu) {
return changeDetectionCoreService.getPolygonListByCd(chnDtctId, cdObjectId, cdObjectIds);
return changeDetectionCoreService.getPolygonListByCd(chnDtctId, cdObjectId, cdObjectIds, pnu);
}
/**
@@ -115,7 +115,7 @@ public class ChangeDetectionService {
*/
public ChangeDetectionDto.PointFeatureList getPointListByCd(
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu) {
return changeDetectionCoreService.getPointListByCd(chnDtctId, cdObjectId, cdObjectIds);
return changeDetectionCoreService.getPointListByCd(chnDtctId, cdObjectId, cdObjectIds, pnu);
}
/**

View File

@@ -11,7 +11,8 @@ import lombok.Getter;
public enum CrsType implements EnumType {
EPSG_3857("Web Mercator, 웹지도 미터(EPSG:900913 동일)"),
EPSG_4326("WGS84 위경도, GeoJSON/OSM 기본"),
EPSG_5186("Korea 2000 중부 TM, 한국 SHP");
EPSG_5186("5186::Korea 2000 중부 TM, 한국 SHP"),
EPSG_5179("5179::Korea 2000 중부 TM, 한국 SHP");
private final String desc;

View File

@@ -13,7 +13,9 @@ public enum LayerType implements EnumType {
TILE("배경지도"),
GEOJSON("객체데이터"),
WMTS("타일레이어"),
WMS("지적도");
WMS("지적도"),
KAMCO_WMS("국유인WMS"),
KAMCO_WMTS("국유인WMTS");
private final String desc;

View File

@@ -5,7 +5,11 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@@ -170,7 +174,9 @@ public class GeoJsonValidator {
// properties가 있고 scene_id가 null이 아니면 텍스트로 읽음
// 없으면 null 처리
String sceneId =
(props != null && props.hasNonNull("scene_id")) ? props.get("scene_id").asText() : null;
(props != null && props.hasNonNull("scene_id"))
? props.get("scene_id").asText().trim()
: null;
// scene_id가 없거나 빈값이면 "정상적으로 도엽번호가 들어오지 않은 feature"로 카운트
if (sceneId == null || sceneId.isBlank()) {
@@ -209,17 +215,17 @@ public class GeoJsonValidator {
// =========================================================
log.info(
"""
===== GeoJSON Validation =====
file: {}
features(total): {}
requested(unique): {}
found(unique scene_id): {}
scene_id null/blank: {}
duplicates(scene_id): {}
missing(requested - found): {}
extra(found - requested): {}
==============================
""",
===== GeoJSON Validation =====
file: {}
features(total): {}
requested(unique): {}
found(unique scene_id): {}
scene_id null/blank: {}
duplicates(scene_id): {}
missing(requested - found): {}
extra(found - requested): {}
==============================
""",
geojsonPath,
featureCount, // 중복 포함한 전체 feature 수
requested.size(), // 요청 도엽 유니크 수
@@ -230,12 +236,16 @@ public class GeoJsonValidator {
extra.size()); // 요청하지 않았는데 들어온 도엽 수
// 중복/누락/추가 항목은 전체를 다 찍으면 로그 폭발하므로 샘플만
if (!duplicates.isEmpty())
log.warn("duplicates sample: {}", duplicates.stream().limit(20).toList());
// if (!duplicates.isEmpty())
// log.warn("duplicates sample: {}", duplicates.stream().limit(20).toList());
if (!missing.isEmpty()) log.warn("missing sample: {}", missing.stream().limit(50).toList());
if (!missing.isEmpty()) {
log.warn("missing sample: {}", missing.stream().limit(50).toList());
}
if (!extra.isEmpty()) log.warn("extra sample: {}", extra.stream().limit(50).toList());
if (!extra.isEmpty()) {
log.warn("extra sample: {}", extra.stream().limit(50).toList());
}
// =========================================================
// 6) 실패 조건 판정

View File

@@ -1,23 +1,45 @@
package com.kamco.cd.kamcoback.common.service;
import com.kamco.cd.kamcoback.config.InferenceProperties;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@Log4j2
// 0312
@RequiredArgsConstructor
@Component
public class ExternalJarRunner {
@Value("${spring.profiles.active}")
private String profile;
// 0312
private final InferenceProperties inferenceProperties;
private static final long TIMEOUT_MINUTES = TimeUnit.DAYS.toMinutes(3);
// java -jar build/libs/shp-exporter.jar --batch --geoserver.enabled=true
// --converter.inference-id=qq99999 --converter.batch-ids[0]=111
// 0312 shp 파일 배치를 통해 생성
public void run(String inferenceLearningId, List<Long> batchIds) {
// JAR 경로 (shape파일 생성용)
String jarPathV2 = inferenceProperties.getJarPathV2();
List<String> args = new ArrayList<>();
args.add(" --spring.profiles.active=" + profile);
args.add(" --batch");
args.add(" --geoserver.enabled=true");
args.add(" --converter.inference-id=" + inferenceLearningId);
batchIds.forEach(batchId -> args.add(" --converter.batch-ids[" + args.size() + "]=" + batchId));
execJar(jarPathV2, args);
}
/**
* shp 파일 생성
*
@@ -28,7 +50,8 @@ public class ExternalJarRunner {
* @param mode
* <p>MERGED - batch-ids 에 해당하는 **모든 데이터를 하나의 Shapefile로 병합 생성,
* <p>MAP_IDS - 명시적으로 전달한 map-ids만 대상으로 Shapefile 생성,
* <p>RESOLVE - batch-ids 기준으로 **JAR 내부에서 map_ids를 조회**한 뒤 Shapefile 생성
* <p>RESOLVE - batch-ids 기준으로 **JAR 내부에서 map_ids를 조회**한 뒤 Shapefile 생성 java -jar
* build/libs/shp-exporter.jar --spring.profiles.active=prod
*/
public void run(String jarPath, String batchIds, String inferenceId, String mapIds, String mode) {
List<String> args = new ArrayList<>();
@@ -73,7 +96,8 @@ public class ExternalJarRunner {
cmd.add("-jar");
cmd.add(jarPath);
cmd.addAll(args);
// 0312
log.info("exec jar command: {}", cmd);
ProcessBuilder pb = new ProcessBuilder(cmd);
pb.redirectErrorStream(true);
@@ -103,6 +127,7 @@ public class ExternalJarRunner {
} catch (Exception e) {
log.error("jar execution error. output=\n{}", out, e);
throw new RuntimeException("jar execution error\n" + out, e);
}
}

View File

@@ -16,5 +16,7 @@ public class InferenceProperties {
private String batchUrl;
private String geojsonDir;
private String jarPath;
// 0312
private String jarPathV2;
private String inferenceServerName;
}

View File

@@ -71,6 +71,10 @@ public class SecurityConfig {
.requestMatchers("/api/test/review")
.hasAnyRole("ADMIN", "REVIEWER")
// shapefile 생성 테스트 API - 인증 없이 접근 가능
.requestMatchers("/api/test/make-shapefile")
.permitAll()
// ASYNC/ERROR 재디스패치는 막지 않기 (다운로드/스트리밍에서 필수)
.dispatcherTypeMatchers(DispatcherType.ASYNC, DispatcherType.ERROR)
.permitAll()

View File

@@ -16,12 +16,12 @@ public class RestTemplateConfig {
@Bean
@Primary
public RestTemplate restTemplate(RestTemplateBuilder builder) {
return build(builder, 2000, 3000);
return build(builder, 20000, 60000);
}
@Bean("restTemplateLong")
public RestTemplate restTemplateLong(RestTemplateBuilder builder) {
return build(builder, 2000, 60000);
return build(builder, 20000, 60000);
}
private RestTemplate build(RestTemplateBuilder builder, int connectTimeoutMs, int readTimeoutMs) {

View File

@@ -14,9 +14,9 @@ public class GukYuinDto {
public enum GukYuinLinkFailCode implements EnumType {
OK("연동 가능"),
NOT_FOUND("대상 회차가 없습니다."),
SCOPE_PART_NOT_ALLOWED("부분 도엽은 연동 불가능 합니다."),
HAS_RUNNING_INFERENCE("라벨링 진행 중 회차가 있습니다."),
OTHER_GUKYUIN_IN_PROGRESS("국유in 연동 진행 중 회차가 있습니다.");
SCOPE_PART_NOT_ALLOWED("부분 도엽 추론 결과는 연동 할 수 없습니다."),
HAS_RUNNING_INFERENCE("라벨링 진행중 회차가 있습니다.\n진행중인 라벨링 작업을 종료하신 후 다시 연동해주세요."),
OTHER_GUKYUIN_IN_PROGRESS("국유in 연동 진행중입니다. 선행 연동 작업이 종료된 후 진행할 수 있습니다.");
private final String desc;
@@ -36,8 +36,9 @@ public class GukYuinDto {
public static class GukYuinLinkableRes {
private boolean linkable;
// private GukYuinLinkFailCode code;
private GukYuinLinkFailCode code;
private String message;
private UUID inferenceUuid;
}
// Repository가 반환할 Fact(조회 결과)
@@ -45,7 +46,8 @@ public class GukYuinDto {
boolean existsLearn,
boolean isPartScope,
boolean hasRunningInference,
boolean hasOtherUnfinishedGukYuin) {}
boolean hasOtherUnfinishedGukYuin,
UUID inferenceUuid) {}
@Getter
@Setter

View File

@@ -69,7 +69,7 @@ public class GukYuinApiService {
@Value("${file.nfs}")
private String nfs;
@Value("${file.output-dir}")
@Value("${file.output-dir}") // 국유인 반영 파일 경로
private String outputDir;
@Value("${file.dataset-dir}")
@@ -237,9 +237,12 @@ public class GukYuinApiService {
GukYuinLinkFailCode code = decideCode(f);
GukYuinLinkableRes res = new GukYuinLinkableRes();
// res.setCode(code);
res.setCode(code);
res.setLinkable(code == GukYuinLinkFailCode.OK);
res.setMessage(code.getDesc());
if (code == GukYuinLinkFailCode.HAS_RUNNING_INFERENCE) {
res.setInferenceUuid(f.inferenceUuid());
}
return res;
}
@@ -281,6 +284,7 @@ public class GukYuinApiService {
+ "&reqEpno="
+ ("Y".equals(batchYn) ? "BATCH" : userUtil.getEmployeeNo());
log.info("##### API 호출 URL : {}", url);
ExternalCallResult<ChngDetectContDto.ResultContDto> result =
externalHttpClient.call(
url,
@@ -289,6 +293,7 @@ public class GukYuinApiService {
netUtils.jsonHeaders(),
ChngDetectContDto.ResultContDto.class);
log.info("##### API 호출 완료 : {}", result.toString());
List<ContBasic> contList = result.body().getResult();
if (contList == null || contList.isEmpty()) {
return new ResultContDto(
@@ -348,6 +353,7 @@ public class GukYuinApiService {
info.setReqIp(myip);
info.setReqEpno("Y".equals(batchYn) ? "BATCH" : userUtil.getEmployeeNo());
log.info("##### API 호출 URL : {}", url);
ExternalCallResult<ChngDetectContDto.ResultLabelDto> result =
externalHttpClient.call(
url,
@@ -355,6 +361,7 @@ public class GukYuinApiService {
info,
netUtils.jsonHeaders(),
ChngDetectContDto.ResultLabelDto.class);
log.info("##### API 호출 완료 : {}", result.toString());
this.insertGukyuinAuditLog(
EventType.MODIFIED.getId(),
@@ -408,10 +415,12 @@ public class GukYuinApiService {
+ "&reqEpno="
+ ("Y".equals(batchYn) ? "BATCH" : userUtil.getEmployeeNo());
log.info("##### API 호출 URL : {}", url);
ExternalCallResult<ChngDetectMastDto.ResultDto> result =
externalHttpClient.call(
url, HttpMethod.GET, null, netUtils.jsonHeaders(), ChngDetectMastDto.ResultDto.class);
log.info("##### API 호출 완료 : {}", result.toString());
this.insertGukyuinAuditLog(
EventType.DETAIL.getId(),
netUtils.getLocalIP(),
@@ -562,10 +571,12 @@ public class GukYuinApiService {
+ "&yyyymmdd="
+ yyyymmdd;
log.info("##### API 호출 URL : {}", url);
ExternalCallResult<ChngDetectMastDto.RlbDtctDto> result =
externalHttpClient.call(
url, HttpMethod.GET, null, netUtils.jsonHeaders(), ChngDetectMastDto.RlbDtctDto.class);
log.info("##### API 호출 완료 : {}", result.toString());
this.insertGukyuinAuditLog(
EventType.LIST.getId(),
netUtils.getLocalIP(),
@@ -636,4 +647,8 @@ public class GukYuinApiService {
public List<String> findStbltObjectIds(String uid, String mapSheetNum) {
return gukyuinCoreService.findStbltObjectIds(uid, mapSheetNum);
}
public Integer updateStbltRandomData(String uid, int updateCnt) {
return gukyuinCoreService.updateStbltRandomData(uid, updateCnt);
}
}

View File

@@ -8,6 +8,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
import com.kamco.cd.kamcoback.inference.service.InferenceAsyncService;
import com.kamco.cd.kamcoback.inference.service.InferenceResultService;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto.DownloadReq;
@@ -27,6 +28,7 @@ import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.validation.Valid;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.LocalDate;
import java.util.List;
@@ -35,6 +37,7 @@ import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.data.domain.Page;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
@@ -53,6 +56,7 @@ import org.springframework.web.bind.annotation.RestController;
public class InferenceResultApiController {
private final InferenceResultService inferenceResultService;
private final InferenceAsyncService inferenceAsyncService;
private final MapSheetMngService mapSheetMngService;
private final ModelMngService modelMngService;
private final RangeDownloadResponder rangeDownloadResponder;
@@ -174,7 +178,8 @@ public class InferenceResultApiController {
})
@DeleteMapping("/end")
public ApiResponseDto<UUID> getInferenceGeomList() {
UUID uuid = inferenceResultService.deleteInferenceEnd();
// UUID uuid = inferenceResultService.deleteInferenceEnd();
UUID uuid = inferenceAsyncService.asyncInferenceEnd();
return ApiResponseDto.ok(uuid);
}
@@ -373,6 +378,9 @@ public class InferenceResultApiController {
}
Path zipPath = Path.of(path);
if (!Files.isRegularFile(zipPath)) {
return ResponseEntity.status(HttpStatus.NOT_FOUND).body("다운로드 받을 파일이 없습니다.");
}
return rangeDownloadResponder.buildZipResponse(zipPath, uid + ".zip", request);
}

View File

@@ -1,60 +0,0 @@
package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "추론결과 데이터 생성", description = "추론결과 데이터 생성 API")
@Log4j2
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/inference/shp")
public class InferenceResultShpApiController {
private final InferenceResultShpService inferenceResultShpService;
public static final String MAP_ID =
"{ \"mapIds\": [\"37716096\",\"37716095\",\"37716094\",\"37716091\",\"37716086\",\"37716085\",\"37716084\",\"37716083\",\"37716076\",\"37716066\",\"37716065\",\"37716064\",\"37716063\",\"37716061\",\"37716051\",\"37716011\"] }";
@Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "데이터 저장 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(implementation = InferenceResultShpDto.InferenceCntDto.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/save/{learnId}")
public ApiResponseDto<InferenceResultShpDto.InferenceCntDto> saveInferenceData(
@PathVariable Long learnId) {
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData(learnId));
}
@Operation(summary = "추론결과 shp 생성", description = "추론결과 shp 생성")
@PostMapping("/shp/{uuid}")
public ApiResponseDto<Void> createShp(
@Parameter(example = "feb2ec0b-a0f7-49ca-95e4-98b2231bdaae") @PathVariable UUID uuid) {
// shp 파일 수동생성
inferenceResultShpService.createShp(uuid);
return ApiResponseDto.createOK(null);
}
}

View File

@@ -458,6 +458,7 @@ public class InferenceDetailDto {
private String bboxGeom;
private String bboxCenterPoint;
private UUID inferenceUuid;
private String status;
public AnalResultInfo(
String analTitle,
@@ -474,7 +475,8 @@ public class InferenceDetailDto {
String subUid,
Boolean applyYn,
ZonedDateTime applyDttm,
UUID inferenceUuid) {
UUID inferenceUuid,
String status) {
this.analTitle = analTitle;
this.modelVer1 = modelVer1;
this.modelVer2 = modelVer2;
@@ -489,6 +491,7 @@ public class InferenceDetailDto {
this.subUid = subUid;
this.applyYn = applyYn;
this.applyDttm = applyDttm;
this.status = status;
Duration elapsed =
(inferStartDttm != null && inferEndDttm != null)
? Duration.between(inferStartDttm, inferEndDttm)
@@ -538,6 +541,10 @@ public class InferenceDetailDto {
public Boolean getApplyYn() {
return this.applyYn != null && this.applyYn;
}
public String getStatusNm() {
return InferenceResultDto.Status.getDescByCode(this.status);
}
}
@Getter

View File

@@ -18,6 +18,7 @@ import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import lombok.ToString;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
@@ -86,6 +87,7 @@ public class InferenceResultDto {
READY("대기"),
IN_PROGRESS("진행중"),
END("완료"),
END_FAIL("종료실패"),
FORCED_END("강제종료");
private final String desc;
@@ -240,6 +242,7 @@ public class InferenceResultDto {
@Setter
@NoArgsConstructor
@AllArgsConstructor
@ToString
public static class RegReq {
@Schema(description = "제목", example = "2023-2024 변화탐지 테스트")
@@ -272,11 +275,10 @@ public class InferenceResultDto {
private String mapSheetScope;
@Schema(description = "탐지 데이터 옵션 - 추론제외(EXCL), 이전 년도 도엽 사용(PREV)", example = "EXCL")
@NotBlank
@EnumValid(
enumClass = DetectOption.class,
message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.")
private String detectOption;
// @EnumValid(
// enumClass = DetectOption.class,
// message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.")
private DetectOption detectOption;
@Schema(description = "5k 도협 번호 목록", example = "[33605,33606, 33610, 34802, 35603, 35611]")
@NotNull
@@ -682,6 +684,7 @@ public class InferenceResultDto {
@NoArgsConstructor
@AllArgsConstructor
public static class MapSheetFallbackYearDto {
private String mapSheetNum;
private Integer mngYyyy;
}

View File

@@ -71,14 +71,16 @@ public class InferenceResultShpDto {
@NoArgsConstructor
public static class InferenceCntDto {
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 목록 저장 터이터 건수", example = "120")
@Schema(
description = "추론 결과(inference_results_testing)를 기준으로 데이터 목록 저장 터이터 건수",
example = "120")
int sheetAnalDataCnt;
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 데이터 건수", example = "120")
int inferenceCnt;
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 Geom 데이터 건수", example = "120")
@Schema(description = "추론 결과(inference_results_testing)를 기준으로 저장 Geom 데이터 건수", example = "120")
int inferenceGeomCnt;
@Schema(description = "추론 결과(inference_results_testing)를 기준으로 저장 집계 데이터 건수", example = "120")
int inferenceSttcnt;
}
@Setter

View File

@@ -0,0 +1,117 @@
package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.common.inference.service.InferenceCommonService;
import com.kamco.cd.kamcoback.common.utils.UserUtil;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient.ExternalCallResult;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
import com.kamco.cd.kamcoback.postgres.core.AuditLogCoreService;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService;
import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/** 추론 관리 */
@Service
@Slf4j
@RequiredArgsConstructor
@Transactional(readOnly = true)
public class InferenceAsyncService {
private final InferenceResultCoreService inferenceResultCoreService;
private final MapSheetMngCoreService mapSheetMngCoreService;
private final ModelMngCoreService modelMngCoreService;
private final AuditLogCoreService auditLogCoreService;
private final InferenceCommonService inferenceCommonService;
private final ExternalHttpClient externalHttpClient;
private final UserUtil userUtil;
@Value("${inference.batch-url}")
private String batchUrl;
@Value("${inference.inference-server-name}")
private String inferenceServerName;
@Value("${file.dataset-dir}")
private String datasetDir;
@Value("${spring.profiles.active}")
private String activeEnv;
@Value("${inference.geojson-dir}")
private String inferenceDir;
// 0313
@Transactional
public UUID asyncInferenceEnd() {
SaveInferenceAiDto dto = inferenceResultCoreService.getProcessing();
if (dto == null) {
throw new CustomApiException("NOT_FOUND", HttpStatus.NOT_FOUND);
}
this.deleteInferenceEndAsync(dto); // 비동기 종료 호출
return dto.getUuid();
}
// 0313
@Async("inferenceEndExecutor")
@Transactional
public void deleteInferenceEndAsync(SaveInferenceAiDto dto) {
Long batchId = dto.getBatchId();
String url = batchUrl + "/" + batchId;
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
try {
log.info("[추론 종료 비동기 시작] uuid={}, batchId={}", dto.getUuid(), batchId);
ExternalCallResult<String> result =
externalHttpClient.callLong(url, HttpMethod.DELETE, dto, headers, String.class);
if (!result.success()) {
log.error("[추론 종료 실패] 외부 API 호출 실패. uuid={}, batchId={}", dto.getUuid(), batchId);
SaveInferenceAiDto failRequest = new SaveInferenceAiDto();
failRequest.setUuid(dto.getUuid());
failRequest.setStatus(Status.END_FAIL.getId()); // TODO: 종료실패 상태 추가하는 게 맞는지?
failRequest.setUpdateUid(userUtil.getId());
failRequest.setInferEndDttm(ZonedDateTime.now());
inferenceResultCoreService.update(failRequest);
return;
}
SaveInferenceAiDto request = new SaveInferenceAiDto();
request.setStatus(Status.FORCED_END.getId());
request.setUuid(dto.getUuid());
request.setUpdateUid(userUtil.getId());
request.setInferEndDttm(ZonedDateTime.now());
inferenceResultCoreService.update(request);
Long learnId = inferenceResultCoreService.getInferenceLearnIdByUuid(dto.getUuid());
inferenceResultCoreService.upsertGeomData(learnId);
log.info("[추론 종료 비동기 완료] uuid={}, batchId={}", dto.getUuid(), batchId);
} catch (Exception e) {
log.error("[추론 종료 비동기 예외] uuid={}, batchId={}", dto.getUuid(), batchId, e);
}
}
}

View File

@@ -56,7 +56,7 @@ import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
import org.springframework.http.HttpHeaders;
@@ -68,7 +68,7 @@ import org.springframework.transaction.annotation.Transactional;
/** 추론 관리 */
@Service
@Log4j2
@Slf4j
@RequiredArgsConstructor
@Transactional(readOnly = true)
public class InferenceResultService {
@@ -128,47 +128,40 @@ public class InferenceResultService {
*/
@Transactional
public UUID run(InferenceResultDto.RegReq req) {
if (req.getDetectOption().equals(DetectOption.EXCL.getId())) {
log.info("inference start request = {}", req);
DetectOption detectOption = req.getDetectOption();
if (detectOption == DetectOption.EXCL) {
// 추론 제외 일때 EXCL
return runExcl(req);
}
// 이전연도 도엽 사용 일때 PREV
return runPrev(req);
}
/**
* 변화탐지 옵션 추론제외 실행
* 변화탐지 [옵션 추론제외 실행]
*
* @param req
* @return
*/
public UUID runExcl(InferenceResultDto.RegReq req) {
// TODO 쿼리로 한번에 할수 있게 수정해야하나..
// 기준연도 실행가능 도엽 조회
List<MngListDto> targetMngList =
mapSheetMngCoreService.getMapSheetMngHst(
req.getTargetYyyy(), req.getMapSheetScope(), req.getMapSheetNum());
// List<MngListDto> mngList =
// mapSheetMngCoreService.findExecutableSheets(
// req.getCompareYyyy(),
// req.getTargetYyyy(),
// req.getMapSheetScope(),
// req.getMapSheetNum());
mapSheetMngCoreService.getMapSheetMngHst(req.getTargetYyyy(), req.getMapSheetNum());
if (targetMngList == null || targetMngList.isEmpty()) {
throw new CustomApiException("NOT_FOUND_MAP_SHEET_NUM", HttpStatus.NOT_FOUND);
}
log.info("targetMngList size = {}", targetMngList.size());
// 비교연도 실행가능 도엽 조회
List<MngListDto> compareMngList =
mapSheetMngCoreService.getMapSheetMngHst(
req.getCompareYyyy(), req.getMapSheetScope(), req.getMapSheetNum());
mapSheetMngCoreService.getMapSheetMngHst(req.getCompareYyyy(), req.getMapSheetNum());
if (compareMngList == null || compareMngList.isEmpty()) {
throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND);
}
log.info("compareMngList size = {}", compareMngList.size());
// compare 도엽번호 Set 구성
Set<String> compareSet =
@@ -210,14 +203,14 @@ public class InferenceResultService {
log.info(
"""
===== MapSheet Year Comparison =====
target Total: {}
compare Total: {}
Intersection: {}
target Only (Excluded): {}
compare Only: {}
====================================
""",
===== MapSheet Year Comparison =====
target Total: {}
compare Total: {}
Intersection: {}
target Only (Excluded): {}
compare Only: {}
====================================
""",
targetTotal,
compareTotal,
intersection,
@@ -268,28 +261,31 @@ public class InferenceResultService {
*/
@Transactional
public UUID runPrev(InferenceResultDto.RegReq req) {
// TODO 쿼리로 한번에 할수 있게 수정해야하나..
// 기준연도 실행가능 도엽 조회
List<MngListDto> targetMngList =
mapSheetMngCoreService.getMapSheetMngHst(
req.getTargetYyyy(), req.getMapSheetScope(), req.getMapSheetNum());
Integer targetYyyy = req.getTargetYyyy();
Integer compareYyyy = req.getCompareYyyy();
String mapSheetScope = req.getMapSheetScope();
log.info("[{}|{}}] ,{}", compareYyyy, targetYyyy, mapSheetScope);
// 기준연도 실행가능 도엽 조회[AFTER]
List<MngListDto> targetMngList =
mapSheetMngCoreService.getMapSheetMngHst(targetYyyy, req.getMapSheetNum());
log.info("[runPrev] targetMngList size = {}", targetMngList.size());
if (targetMngList == null || targetMngList.isEmpty()) {
throw new CustomApiException("NOT_FOUND_TARGET_YEAR", HttpStatus.NOT_FOUND);
}
// 비교연도 실행가능 도엽 조회
List<MngListDto> compareMngList =
mapSheetMngCoreService.getMapSheetMngHst(
req.getCompareYyyy(), req.getMapSheetScope(), req.getMapSheetNum());
mapSheetMngCoreService.getMapSheetMngHst(compareYyyy, req.getMapSheetNum());
log.info("[runPrev] compareMngList size = {}", compareMngList.size());
if (compareMngList == null || compareMngList.isEmpty()) {
throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND);
}
log.info("targetMngList size = {}", targetMngList.size());
log.info("compareMngList size = {}", compareMngList.size());
log.info("Difference in count = {}", targetMngList.size() - compareMngList.size());
log.info("[runPrev] Difference in count = {}", targetMngList.size() - compareMngList.size());
// 로그용 원본 카운트 (이전도엽 추가 전)
int targetTotal = targetMngList.size();
@@ -310,14 +306,14 @@ public class InferenceResultService {
.filter(num -> !compareSet0.contains(num))
.toList();
log.info("targetOnlyMapSheetNums in count = {}", targetOnlyMapSheetNums.size());
log.info("[runPrev] targetOnlyMapSheetNums in count = {}", targetOnlyMapSheetNums.size());
// 이전연도 초회 추가
compareMngList.addAll(
mapSheetMngCoreService.findFallbackCompareYearByMapSheets(
req.getCompareYyyy(), targetOnlyMapSheetNums));
compareYyyy, targetOnlyMapSheetNums));
log.info("fallback compare size= {}", compareMngList.size());
log.info("[runPrev] fallback compare size= {}", compareMngList.size());
// 이전연도 추가 후 compare 총 개수
int compareTotalAfterFallback = compareMngList.size();
@@ -361,15 +357,15 @@ public class InferenceResultService {
log.info(
"""
===== MapSheet Year Comparison =====
target Total: {}
compare Total(before fallback): {}
compare Total(after fallback): {}
Intersection: {}
target Only (Excluded): {}
compare Only: {}
====================================
""",
===== MapSheet Year Comparison =====
target Total: {}
compare Total(before fallback): {}
compare Total(after fallback): {}
Intersection: {}
target Only (Excluded): {}
compare Only: {}
====================================
""",
targetTotal,
compareTotalBeforeFallback,
compareTotalAfterFallback,
@@ -384,18 +380,12 @@ public class InferenceResultService {
// compare 기준 geojson 생성
Scene compareScene =
getSceneInference(
compareMngList,
req.getCompareYyyy().toString(),
req.getMapSheetScope(),
req.getDetectOption());
compareMngList, compareYyyy.toString(), mapSheetScope, req.getDetectOption());
// target 기준 geojson 생성
Scene targetScene =
getSceneInference(
req.getTargetYyyy().toString(),
mapSheetNums,
req.getMapSheetScope(),
req.getDetectOption());
targetYyyy.toString(), mapSheetNums, mapSheetScope, req.getDetectOption());
log.info("비교년도 geojson 파일 validation ===== {}", compareScene.getFilePath());
GeoJsonValidator.validateWithRequested(compareScene.getFilePath(), mapSheetNums);
@@ -671,7 +661,7 @@ public class InferenceResultService {
* @return
*/
private Scene getSceneInference(
String yyyy, List<String> mapSheetNums, String mapSheetScope, String detectOption) {
String yyyy, List<String> mapSheetNums, String mapSheetScope, DetectOption detectOption) {
// geojson 생성시 필요한 영상파일 정보 조회
List<ImageFeature> features =
@@ -697,7 +687,7 @@ public class InferenceResultService {
* @return
*/
private Scene getSceneInference(
List<MngListDto> yearDtos, String yyyy, String mapSheetScope, String detectOption) {
List<MngListDto> yearDtos, String yyyy, String mapSheetScope, DetectOption detectOption) {
List<ImageFeature> features =
mapSheetMngCoreService.loadSceneInferenceByFallbackYears(yearDtos);
@@ -982,7 +972,10 @@ public class InferenceResultService {
* @return Scene
*/
private Scene writeSceneGeoJson(
String yyyy, String mapSheetScope, String detectOption, List<ImageFeature> sceneInference) {
String yyyy,
String mapSheetScope,
DetectOption detectOption,
List<ImageFeature> sceneInference) {
boolean isAll = MapSheetScope.ALL.getId().equals(mapSheetScope);
String optionSuffix = buildOptionSuffix(detectOption);
@@ -1031,9 +1024,13 @@ public class InferenceResultService {
* @param detectOption
* @return
*/
private String buildOptionSuffix(String detectOption) {
if (DetectOption.EXCL.getId().equals(detectOption)) return "_EXCL";
if (DetectOption.PREV.getId().equals(detectOption)) return "_PREV";
private String buildOptionSuffix(DetectOption detectOption) {
if (DetectOption.EXCL == detectOption) {
return "_EXCL";
}
if (DetectOption.PREV == detectOption) {
return "_PREV";
}
return "";
}
}

View File

@@ -37,10 +37,16 @@ public class InferenceResultShpService {
@Value("${file.dataset-dir}")
private String datasetDir;
/** inference_results 테이블을 기준으로 분석 결과 테이블과 도형 테이블을 최신 상태로 반영한다. */
/**
* 추론 결과 inference 테이블 upsert
*
* @param uuid learn uuid
* @return
*/
@Transactional
public InferenceResultShpDto.InferenceCntDto saveInferenceResultData(Long id) {
return coreService.buildInferenceData(id);
public InferenceResultShpDto.InferenceCntDto saveInferenceResultData(UUID uuid) {
Long learnId = inferenceResultCoreService.getInferenceLearnIdByUuid(uuid);
return coreService.buildInferenceData(learnId);
}
/**
@@ -55,13 +61,13 @@ public class InferenceResultShpService {
return;
}
String batchId =
String batchIds =
Stream.of(dto.getM1ModelBatchId(), dto.getM2ModelBatchId(), dto.getM3ModelBatchId())
.filter(Objects::nonNull)
.map(String::valueOf)
.collect(Collectors.joining(","));
// shp 파일 비동기 생성
shpPipelineService.runPipeline(jarPath, datasetDir, batchId, dto.getUid());
shpPipelineService.runPipeline(jarPath, datasetDir, batchIds, dto.getUid());
}
}

View File

@@ -219,6 +219,9 @@ public class WorkerStatsDto {
@Deprecated
@Schema(description = "[Deprecated] inspectionRemainingCount 사용 권장")
private Long remainingInspectCount;
@Schema(description = "파일 다운로드 가능한 폴리곤 수")
private Long downloadPolygonCnt;
}
@Getter

View File

@@ -157,6 +157,7 @@ public class LabelAllocateService {
return labelAllocateCoreService.findInferenceDetail(uuid);
}
@Transactional
public ApiResponseDto.ResponseObj allocateMove(
Integer totalCnt, String uuid, List<String> targetUsers, String userId) {

View File

@@ -62,7 +62,7 @@ public class LayerService {
.orElseThrow(() -> new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST));
switch (layerType) {
case TILE -> {
case TILE, KAMCO_WMS, KAMCO_WMTS -> {
return mapLayerCoreService.saveTile(dto);
}

View File

@@ -169,6 +169,7 @@ public class AuthController {
if (refreshToken == null || !jwtTokenProvider.isValidToken(refreshToken)) {
throw new AccessDeniedException("만료되었거나 유효하지 않은 리프레시 토큰 입니다.");
}
String username = jwtTokenProvider.getSubject(refreshToken);
// Redis에 저장된 RefreshToken과 일치하는지 확인

View File

@@ -6,6 +6,8 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapScaleType;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapSheetList;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.PointFeatureList;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.PolygonFeatureList;
import com.kamco.cd.kamcoback.common.enums.DetectionClassification;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
@@ -108,8 +110,8 @@ public class ChangeDetectionCoreService {
* @return
*/
public ChangeDetectionDto.PolygonFeatureList getPolygonListByCd(
String chnDtctId, String cdObjectId, List<String> cdObjectIds) {
return changeDetectionRepository.getPolygonListByCd(chnDtctId, cdObjectId, cdObjectIds);
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu) {
return changeDetectionRepository.getPolygonListByCd(chnDtctId, cdObjectId, cdObjectIds, pnu);
}
/**
@@ -121,8 +123,8 @@ public class ChangeDetectionCoreService {
* @return
*/
public ChangeDetectionDto.PointFeatureList getPointListByCd(
String chnDtctId, String cdObjectId, List<String> cdObjectIds) {
return changeDetectionRepository.getPointListByCd(chnDtctId, cdObjectId, cdObjectIds);
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu) {
return changeDetectionRepository.getPointListByCd(chnDtctId, cdObjectId, cdObjectIds, pnu);
}
/**
@@ -136,4 +138,12 @@ public class ChangeDetectionCoreService {
.getLearnUuid(chnDtctId)
.orElseThrow(() -> new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND));
}
public PolygonFeatureList getChangeDetectionPnuPolygonList(UUID uuid, String pnu) {
return changeDetectionRepository.getChangeDetectionPnuPolygonList(uuid, pnu);
}
public PointFeatureList getChangeDetectionPnuPointList(UUID uuid, String pnu) {
return changeDetectionRepository.getChangeDetectionPnuPointList(uuid, pnu);
}
}

View File

@@ -81,4 +81,8 @@ public class GukYuinCoreService {
public List<String> findStbltObjectIds(String uid, String mapSheetNum) {
return gukYuinRepository.findStbltObjectIds(uid, mapSheetNum);
}
public Integer updateStbltRandomData(String uid, int updateCnt) {
return gukYuinRepository.updateStbltRandomData(uid, updateCnt);
}
}

View File

@@ -0,0 +1,26 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.postgres.repository.gukyuin.GukYuinPnuCntUpdateJobRepository;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
public class GukYuinPnuCntUpdateJobCoreService {
private final GukYuinPnuCntUpdateJobRepository gukYuinPnuCntUpdateRepository;
public GukYuinPnuCntUpdateJobCoreService(
GukYuinPnuCntUpdateJobRepository gukYuinPnuCntUpdateRepository) {
this.gukYuinPnuCntUpdateRepository = gukYuinPnuCntUpdateRepository;
}
@Transactional
public void updateGukYuinContListPnuUpdateCnt() {
gukYuinPnuCntUpdateRepository.updateGukYuinContListPnuUpdateCnt();
}
@Transactional
public void updateGukYuinApplyStatus(String uid, String status) {
gukYuinPnuCntUpdateRepository.updateGukYuinApplyStatus(uid, status);
}
}

View File

@@ -36,6 +36,7 @@ import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.constraints.NotNull;
import java.time.LocalDateTime;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.LinkedHashMap;
@@ -46,7 +47,7 @@ import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import lombok.extern.slf4j.Slf4j;
import org.springframework.dao.DataAccessException;
import org.springframework.data.domain.Page;
import org.springframework.http.HttpStatus;
@@ -54,7 +55,7 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@Log4j2
@Slf4j
@RequiredArgsConstructor
public class InferenceResultCoreService {
@@ -119,7 +120,7 @@ public class InferenceResultCoreService {
mapSheetLearnEntity.setCompareYyyy(req.getCompareYyyy());
mapSheetLearnEntity.setTargetYyyy(req.getTargetYyyy());
mapSheetLearnEntity.setMapSheetScope(req.getMapSheetScope());
mapSheetLearnEntity.setDetectOption(req.getDetectOption());
mapSheetLearnEntity.setDetectOption(req.getDetectOption().getId());
mapSheetLearnEntity.setCreatedUid(userUtil.getId());
mapSheetLearnEntity.setMapSheetCnt(mapSheetName);
mapSheetLearnEntity.setDetectingCnt(0L);
@@ -271,7 +272,7 @@ public class InferenceResultCoreService {
.getInferenceResultByUuid(request.getUuid())
.orElseThrow(EntityNotFoundException::new);
// M1/M2/M3 영역 업데이트
// G1/G2/G3 영역 업데이트
if (request.getType() != null) {
applyModelUpdate(entity, request);
}
@@ -453,12 +454,18 @@ public class InferenceResultCoreService {
* @return
*/
public AnalResultInfo getInferenceResultInfo(UUID uuid) {
// 추론 결과 정보조회
log.info("get inference result info start time = {}", LocalDateTime.now());
AnalResultInfo resultInfo = mapSheetLearnRepository.getInferenceResultInfo(uuid);
log.info("get inference result info end time = {}", LocalDateTime.now());
// bbox, point 조회
log.info("get inference result info bbox start time = {}", LocalDateTime.now());
BboxPointDto bboxPointDto = mapSheetLearnRepository.getBboxPoint(uuid);
log.info("get inference result info bbox end time = {}", LocalDateTime.now());
resultInfo.setBboxGeom(bboxPointDto.getBboxGeom());
resultInfo.setBboxCenterPoint(bboxPointDto.getBboxCenterPoint());
return resultInfo;
}
@@ -469,7 +476,10 @@ public class InferenceResultCoreService {
* @return 분류별 탐지건수 정보
*/
public List<Dashboard> getInferenceClassCountList(UUID uuid) {
return mapSheetLearnRepository.getInferenceClassCountList(uuid);
log.info("get inference class count list start time = {}", LocalDateTime.now());
List<Dashboard> classCountList = mapSheetLearnRepository.getInferenceClassCountList(uuid);
log.info("get inference class count list end time = {}", LocalDateTime.now());
return classCountList;
}
/**
@@ -478,7 +488,11 @@ public class InferenceResultCoreService {
* @return geom 목록 정보
*/
public Page<Geom> getInferenceGeomList(UUID uuid, SearchGeoReq searchGeoReq) {
return mapSheetLearnRepository.getInferenceGeomList(uuid, searchGeoReq);
log.info("get Inference Geom List start time = {}", LocalDateTime.now());
Page<Geom> geom = mapSheetLearnRepository.getInferenceGeomList(uuid, searchGeoReq);
log.info("get Inference Geom List end time = {}", LocalDateTime.now());
return geom;
}
/**
@@ -488,9 +502,13 @@ public class InferenceResultCoreService {
*/
@Transactional
public void upsertGeomData(Long id) {
// 추론 결과 목록 저장
Long analId = inferenceResultRepository.upsertGroupsFromMapSheetAnal(id);
// 추론 결과 상세 저장
inferenceResultRepository.upsertGroupsFromInferenceResults(analId);
// geom 목록 추론 결과 저장
inferenceResultRepository.upsertGeomsFromInferenceResults(analId);
// 집계 추론 결과 저장
inferenceResultRepository.upsertSttcFromInferenceResults(analId);
}

View File

@@ -1,12 +1,17 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@Log4j2
@RequiredArgsConstructor
public class InferenceResultShpCoreService {
@@ -15,15 +20,33 @@ public class InferenceResultShpCoreService {
/**
* inference_results 기준으로 - tb_map_sheet_anal_data_inference -
* tb_map_sheet_anal_data_inference_geom 테이블을 최신 상태로 구성한다.
*
* @param id learn id
* @return
*/
@Transactional
public InferenceResultShpDto.InferenceCntDto buildInferenceData(Long id) {
Long analId = repo.upsertGroupsFromMapSheetAnal(id);
repo.upsertGroupsFromInferenceResults(analId);
repo.upsertGeomsFromInferenceResults(analId);
repo.upsertSttcFromInferenceResults(analId);
InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto();
MapSheetAnalInferenceEntity analInferenceEntity =
repo.getAnalInferenceDataByLearnId(id).orElse(null);
if (analInferenceEntity != null) {
throw new CustomApiException("CONFLICT", HttpStatus.CONFLICT);
}
Long analId = repo.upsertGroupsFromMapSheetAnal(id);
int analDataCnt = repo.upsertGroupsFromInferenceResults(analId);
int geomCnt = repo.upsertGeomsFromInferenceResults(analId);
int sttcCnt = repo.upsertSttcFromInferenceResults(analId);
InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto();
cntDto.setSheetAnalDataCnt(analDataCnt);
cntDto.setInferenceGeomCnt(geomCnt);
cntDto.setInferenceSttcnt(sttcCnt);
log.info(
"[ANAL SAVE] analId={}, tb_map_sheet_anal_data_inference={}, tb_map_sheet_anal_data_inference_geom={}, tb_map_sheet_anal_sttc={}",
analId,
analDataCnt,
geomCnt,
sttcCnt);
return cntDto;
}
}

View File

@@ -345,16 +345,15 @@ public class MapSheetMngCoreService {
* 변화탐지 실행 가능 비교년도 조회
*
* @param mngYyyy 비교년도
* @param mapId 5k 도엽번호
* @param mapIds 5k 도엽번호
* @return List<MngListCompareDto>
*/
public List<MngListCompareDto> getByHstMapSheetCompareList(int mngYyyy, List<String> mapId) {
return mapSheetMngYearRepository.findByHstMapSheetCompareList(mngYyyy, mapId);
public List<MngListCompareDto> getByHstMapSheetCompareList(int mngYyyy, List<String> mapIds) {
return mapSheetMngYearRepository.findByHstMapSheetCompareList(mngYyyy, mapIds);
}
public List<MngListDto> getMapSheetMngHst(
Integer year, String mapSheetScope, List<String> mapSheetNum) {
return mapSheetMngRepository.getMapSheetMngHst(year, mapSheetScope, mapSheetNum);
public List<MngListDto> getMapSheetMngHst(Integer year, List<String> mapSheetNums50k) {
return mapSheetMngRepository.getMapSheetMngHst(year, mapSheetNums50k);
}
/**

View File

@@ -11,37 +11,94 @@ import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
/**
* GPU 메트릭 엔티티
*
* <p>서버의 GPU 성능 및 자원 사용량 메트릭 데이터를 저장하는 JPA 엔티티입니다. GPU 연산 사용률 및 메모리 사용량 등 GPU 리소스 모니터링 데이터를 관리합니다.
*
* <p>데이터 소스: nvidia-smi 명령어 또는 NVML (NVIDIA Management Library)
*
* <p>활용 사례:
*
* <ul>
* <li>AI/ML 학습 모니터링: 딥러닝 작업 중 GPU 활용도 추적
* <li>리소스 최적화: GPU 메모리 부족 또는 유휴 상태 감지
* <li>용량 계획: GPU 추가 필요 시점 예측
* <li>알림 설정: gpuUtil > 95% 또는 gpuMemUsed/gpuMemTotal > 90% 시 경고
* </ul>
*/
@Getter
@Setter
@Entity
@Table(name = "gpu_metrics")
public class GpuMetricEntity {
/** 기본 키 (UUID, 자동 생성) */
@Id
@ColumnDefault("gen_random_uuid()")
@Column(name = "uuid", nullable = false)
private UUID id;
/** 시퀀스 기반 보조 ID */
@NotNull
@ColumnDefault("nextval('gpu_metrics_id_seq')")
@Column(name = "id", nullable = false)
private Integer id1;
/** 메트릭 수집 시각 (시간대 포함, 기본값: 현재 시각) */
@NotNull
@ColumnDefault("now()")
@Column(name = "\"timestamp\"", nullable = false)
private OffsetDateTime timestamp;
/** 모니터링 대상 서버 이름 */
@NotNull
@Column(name = "server_name", nullable = false, length = Integer.MAX_VALUE)
private String serverName;
/**
* GPU 연산 사용률 (백분율)
*
* <p>GPU 코어의 연산 처리 활용도를 나타냅니다.
*
* <p>범위: 0.0 ~ 100.0
*
* <p>예시: 85.5 = GPU가 85.5% 활용되어 연산 중
*
* <p>데이터 소스: nvidia-smi의 'utilization.gpu' 또는 NVML의 nvmlDeviceGetUtilizationRates
*
* <p>참고: 높은 사용률(>90%)은 GPU가 충분히 활용되고 있음을 의미하며, 낮은 사용률은 병목 지점이 다른 곳(CPU, I/O)에 있을 수 있음
*/
@Column(name = "gpu_util")
private Float gpuUtil;
/**
* GPU 메모리 사용량 (MB 단위)
*
* <p>현재 GPU에 할당되어 사용 중인 메모리 양
*
* <p>예시: 10240.0 = 약 10GB의 GPU 메모리 사용 중
*
* <p>데이터 소스: nvidia-smi의 'memory.used' 또는 NVML의 nvmlDeviceGetMemoryInfo
*
* <p>용도: 딥러닝 모델 크기, 배치 사이즈 최적화, OOM(Out Of Memory) 에러 예측
*/
@Column(name = "gpu_mem_used")
private Float gpuMemUsed;
/**
* GPU 총 메모리 용량 (MB 단위)
*
* <p>GPU에 장착된 전체 메모리 용량
*
* <p>예시: 16384.0 = 16GB VRAM 장착
*
* <p>데이터 소스: nvidia-smi의 'memory.total' 또는 NVML의 nvmlDeviceGetMemoryInfo
*
* <p>계산식: 메모리 사용률(%) = (gpuMemUsed / gpuMemTotal) × 100
*
* <p>활용: 여유 메모리 = gpuMemTotal - gpuMemUsed
*/
@Column(name = "gpu_mem_total")
private Float gpuMemTotal;
}

View File

@@ -202,6 +202,33 @@ public class MapSheetLearnEntity {
@Column(name = "chn_dtct_mst_id")
private String chnDtctMstId;
@Column(name = "shp_create_status")
private String shp_create_status;
@Column(name = "shp_create_message")
private String shp_create_message;
@Column(name = "shp_create_status_dttm")
private ZonedDateTime shp_create_status_dttm;
@Column(name = "shp_status")
private String shp_status;
@Column(name = "shp_stage")
private String shp_stage;
@Column(name = "shp_started_dttm")
private ZonedDateTime shp_started_dttm;
@Column(name = "shp_ended_dttm")
private ZonedDateTime shp_ended_dttm;
@Column(name = "shp_last_message")
private String shp_last_message;
@Column(name = "shp_error_message")
private String shp_error_message;
public InferenceResultDto.ResultList toDto() {
return new InferenceResultDto.ResultList(
this.uuid,

View File

@@ -53,8 +53,6 @@ import lombok.NoArgsConstructor;
* system leveraging 1:5k map data.
*/
@Getter
// entity의 접근제어를 위해 @setter를 사용 x
// @Setter
@NoArgsConstructor(access = AccessLevel.PROTECTED)
@Entity
// 영상관리이력
@@ -92,7 +90,7 @@ public class MapSheetMngHstEntity extends CommonDateEntity {
private Integer scaleRatio;
@Column(name = "data_state", length = 20)
private String dataState;
private String dataState; // DONE,NOTYET 둘중하나임 같은연도는 같은값
@Column(name = "data_state_dttm")
private ZonedDateTime dataStateDttm;
@@ -165,13 +163,4 @@ public class MapSheetMngHstEntity extends CommonDateEntity {
@Column(name = "upload_id")
private String uploadId;
// 파일정보 업데이트
public void updateFileInfos(Long tifSizeBytes, Long tfwSizeBytes) {
tifSizeBytes = tifSizeBytes == null ? 0L : tifSizeBytes;
tfwSizeBytes = tfwSizeBytes == null ? 0L : tfwSizeBytes;
this.tifSizeBytes = tifSizeBytes;
this.tfwSizeBytes = tfwSizeBytes;
this.totalSizeBytes = tifSizeBytes + tfwSizeBytes;
}
}

View File

@@ -11,48 +11,101 @@ import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
/**
* 시스템 메트릭 엔티티
*
* <p>서버 시스템의 성능 메트릭 데이터를 저장하는 JPA 엔티티입니다. CPU 및 메모리 사용량 등 시스템 리소스 모니터링 데이터를 관리합니다.
*
* <p>데이터 소스: Linux sar 명령어 또는 /proc/meminfo 파일
*
* <p>활용 사례:
*
* <ul>
* <li>용량 계획: 메모리 추가 필요 시점 예측
* <li>성능 모니터링: 메모리 부족 상황 감지
* <li>트렌드 분석: 시간대별 메모리 사용 패턴 파악
* <li>알림 설정: memused > 90% 시 경고
* </ul>
*/
@Getter
@Setter
@Entity
@Table(name = "system_metrics")
public class SystemMetricEntity {
/** 기본 키 (UUID, 자동 생성) */
@Id
@ColumnDefault("gen_random_uuid()")
@Column(name = "uuid", nullable = false)
private UUID id;
/** 시퀀스 기반 보조 ID */
@NotNull
@ColumnDefault("nextval('system_metrics_id_seq')")
@Column(name = "id", nullable = false)
private Integer id1;
/** 메트릭 수집 시각 (시간대 포함) */
@NotNull
@Column(name = "\"timestamp\"", nullable = false)
private OffsetDateTime timestamp;
/** 모니터링 대상 서버 이름 */
@NotNull
@Column(name = "server_name", nullable = false, length = Integer.MAX_VALUE)
private String serverName;
/** 사용자 프로세스가 사용한 CPU 사용률 (%) - 응용 프로그램 실행 */
@Column(name = "cpu_user")
private Float cpuUser;
/** 시스템 프로세스가 사용한 CPU 사용률 (%) - 커널 작업 */
@Column(name = "cpu_system")
private Float cpuSystem;
/** I/O 대기로 소모된 CPU 사용률 (%) - 디스크/네트워크 대기 */
@Column(name = "cpu_iowait")
private Float cpuIowait;
/** 유휴 상태 CPU 사용률 (%) - 사용 가능한 여유 CPU */
@Column(name = "cpu_idle")
private Float cpuIdle;
/**
* 사용 가능한 여유 메모리 (KB 단위)
*
* <p>시스템에서 즉시 사용 가능한 물리 메모리 양
*
* <p>예시: 4194304 = 약 4GB의 여유 메모리
*
* <p>데이터 소스: /proc/meminfo의 MemFree
*/
@Column(name = "kbmemfree")
private Long kbmemfree;
/**
* 현재 사용 중인 메모리 (KB 단위)
*
* <p>시스템이 현재 할당하여 사용 중인 물리 메모리 양
*
* <p>예시: 8388608 = 약 8GB의 사용 중인 메모리
*
* <p>계산: MemTotal - MemFree
*/
@Column(name = "kbmemused")
private Long kbmemused;
/**
* 메모리 사용률 (백분율)
*
* <p>전체 메모리 대비 사용 중인 메모리 비율
*
* <p>계산식: (kbmemused / (kbmemused + kbmemfree)) × 100
*
* <p>예시: 66.7 = 전체 메모리의 66.7% 사용 중
*
* <p>관계식: 총 메모리 = kbmemused + kbmemfree
*/
@Column(name = "memused")
private Float memused;
}

View File

@@ -1,19 +1,50 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import java.util.Optional;
import java.util.UUID;
public interface InferenceResultRepositoryCustom {
/**
* tb_map_sheet_anal_inference 추론 결과 목록 저장
*
* @param id learn 테이블 id
* @return
*/
Long upsertGroupsFromMapSheetAnal(Long id);
void upsertGroupsFromInferenceResults(Long analId);
/**
* tb_map_sheet_anal_data_inference 추론 결과 상세 저장
*
* @param analId
* @return
*/
int upsertGroupsFromInferenceResults(Long analId);
void upsertGeomsFromInferenceResults(Long analId);
/**
* tb_map_sheet_anal_data_inference_geom geom 목록 추론 결과 저장
*
* @param analId
* @return
*/
int upsertGeomsFromInferenceResults(Long analId);
void upsertSttcFromInferenceResults(Long analId);
/**
* tb_map_sheet_anal_sttc 집계 추론 결과 저장
*
* @param analId
* @return
*/
int upsertSttcFromInferenceResults(Long analId);
/**
* 추론실행 목록 uuid 조회
*
* @param uuid 추론 uuid
* @return 추론 실행 정보
*/
Long getInferenceLearnIdByUuid(UUID uuid);
/**
@@ -23,4 +54,12 @@ public interface InferenceResultRepositoryCustom {
* @return 추론 정보
*/
Optional<MapSheetLearnEntity> getInferenceUid(UUID uuid);
/**
* learn id 로 analInference 값 조회
*
* @param id 추론 id
* @return
*/
Optional<MapSheetAnalInferenceEntity> getAnalInferenceDataByLearnId(Long id);
}

View File

@@ -1,8 +1,10 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelMngState;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
@@ -80,7 +82,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
* @return 반영된 행 수
*/
@Override
public void upsertGroupsFromInferenceResults(Long analId) {
public int upsertGroupsFromInferenceResults(Long analId) {
String sql =
"""
@@ -124,7 +126,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
updated_dttm = now()
""";
em.createNativeQuery(sql).setParameter("analId", analId).executeUpdate();
return em.createNativeQuery(sql).setParameter("analId", analId).executeUpdate();
}
/**
@@ -136,7 +138,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
* @return 반영된 행 수
*/
@Override
public void upsertGeomsFromInferenceResults(Long analUid) {
public int upsertGeomsFromInferenceResults(Long analUid) {
String sql =
"""
@@ -214,6 +216,9 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
WHERE msl.anal_uid = :analUid
AND r.after_c is not null
AND r.after_p is not null
AND r.probability is not null
AND r.before_c is not null
AND r.before_p is not null
ORDER BY r.uid, r.created_date DESC NULLS LAST
) x
ON CONFLICT (result_uid)
@@ -229,11 +234,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ref_map_sheet_num = EXCLUDED.ref_map_sheet_num
""";
em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
return em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
}
@Override
public void upsertSttcFromInferenceResults(Long analUid) {
public int upsertSttcFromInferenceResults(Long analUid) {
String sql =
"""
@@ -306,7 +311,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
,updated_uid = EXCLUDED.updated_uid
""";
em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
return em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
}
// ===============================
@@ -331,4 +336,14 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
.where(mapSheetLearnEntity.uuid.eq(uuid))
.fetchOne());
}
@Override
public Optional<MapSheetAnalInferenceEntity> getAnalInferenceDataByLearnId(Long id) {
return Optional.ofNullable(
queryFactory
.select(mapSheetAnalInferenceEntity)
.from(mapSheetAnalInferenceEntity)
.where(mapSheetAnalInferenceEntity.learnId.eq(id))
.fetchOne());
}
}

View File

@@ -23,11 +23,12 @@ public class InferenceResultsTestingRepositoryImpl
.select(inferenceResultsTestingEntity)
.from(inferenceResultsTestingEntity)
.where(
inferenceResultsTestingEntity
.batchId
.in(batchIds)
.and(inferenceResultsTestingEntity.afterC.isNotNull())
.and(inferenceResultsTestingEntity.afterP.isNotNull()))
inferenceResultsTestingEntity.batchId.in(batchIds),
inferenceResultsTestingEntity.afterC.isNotNull(),
inferenceResultsTestingEntity.afterP.isNotNull(),
inferenceResultsTestingEntity.beforeC.isNotNull(),
inferenceResultsTestingEntity.beforeP.isNotNull(),
inferenceResultsTestingEntity.probability.isNotNull())
.fetch();
}
@@ -44,7 +45,10 @@ public class InferenceResultsTestingRepositoryImpl
.where(
inferenceResultsTestingEntity.batchId.in(batchIds),
inferenceResultsTestingEntity.afterC.isNotNull(),
inferenceResultsTestingEntity.afterP.isNotNull())
inferenceResultsTestingEntity.afterP.isNotNull(),
inferenceResultsTestingEntity.beforeC.isNotNull(),
inferenceResultsTestingEntity.beforeP.isNotNull(),
inferenceResultsTestingEntity.probability.isNotNull())
.fetchOne();
return cnt == null ? 0L : cnt;
@@ -62,11 +66,12 @@ public class InferenceResultsTestingRepositoryImpl
inferenceResultsTestingEntity.afterYear.max()))
.from(inferenceResultsTestingEntity)
.where(
inferenceResultsTestingEntity
.batchId
.in(batchIds)
.and(inferenceResultsTestingEntity.afterC.isNotNull())
.and(inferenceResultsTestingEntity.afterP.isNotNull()))
inferenceResultsTestingEntity.batchId.in(batchIds),
inferenceResultsTestingEntity.afterC.isNotNull(),
inferenceResultsTestingEntity.afterP.isNotNull(),
inferenceResultsTestingEntity.beforeC.isNotNull(),
inferenceResultsTestingEntity.beforeP.isNotNull(),
inferenceResultsTestingEntity.probability.isNotNull())
.groupBy(
inferenceResultsTestingEntity.batchId,
inferenceResultsTestingEntity.modelVersion,

View File

@@ -315,7 +315,8 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
Expressions.stringTemplate("substring({0} from 1 for 8)", mapSheetLearnEntity.uid),
mapSheetLearnEntity.applyYn,
mapSheetLearnEntity.applyDttm,
mapSheetAnalInferenceEntity.uuid))
mapSheetAnalInferenceEntity.uuid,
mapSheetLearnEntity.status))
.from(mapSheetLearnEntity)
.leftJoin(m1)
.on(mapSheetLearnEntity.m1ModelUuid.eq(m1.uuid))
@@ -518,7 +519,7 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
.fetchOne();
if (learn == null) {
return new GukYuinLinkFacts(false, false, false, false);
return new GukYuinLinkFacts(false, false, false, false, null);
}
// 부분 도엽 실행인지 확인
@@ -528,19 +529,21 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
QMapSheetLearnEntity learn2 = new QMapSheetLearnEntity("learn2");
QMapSheetLearnEntity learnQ = QMapSheetLearnEntity.mapSheetLearnEntity;
// 실행중인 추론 있는지 확인
boolean hasRunningInference =
// 현재 국유인 연동하려는 추론의 비교년도,기준년도와 같은 회차 중, 할당되거나 진행중인 학습데이터 uuid 조회
// ex. 2022-2023년도 9회차 학습데이터 제작 진행중 -> 10회차 연동하려고 할 시, 먼저 9회차를 종료해야 함
UUID runningInferenceUuid =
queryFactory
.selectOne()
.from(inf)
.join(learn2)
.on(inf.learnId.eq(learn2.id))
.where(
learn2.compareYyyy.eq(learn.getCompareYyyy()),
learn2.targetYyyy.eq(learn.getTargetYyyy()),
inf.analState.in("ASSIGNED", "ING"))
.fetchFirst()
!= null;
.select(inf.uuid)
.from(inf)
.join(learn2)
.on(inf.learnId.eq(learn2.id))
.where(
learn2.compareYyyy.eq(learn.getCompareYyyy()),
learn2.targetYyyy.eq(learn.getTargetYyyy()),
inf.analState.in("ASSIGNED", "ING"))
.fetchFirst();
boolean hasRunningInference = runningInferenceUuid != null;
// 국유인 작업 진행중 있는지 확인
boolean hasOtherUnfinishedGukYuin =
@@ -555,6 +558,7 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
.fetchFirst()
!= null;
return new GukYuinLinkFacts(true, isPartScope, hasRunningInference, hasOtherUnfinishedGukYuin);
return new GukYuinLinkFacts(
true, isPartScope, hasRunningInference, hasOtherUnfinishedGukYuin, runningInferenceUuid);
}
}

View File

@@ -3,6 +3,8 @@ package com.kamco.cd.kamcoback.postgres.repository.changedetection;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapScaleType;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapSheetList;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.PointFeatureList;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.PolygonFeatureList;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@@ -31,10 +33,10 @@ public interface ChangeDetectionRepositoryCustom {
List<MapSheetList> getChangeDetectionMapSheet50kList(UUID uuid);
ChangeDetectionDto.PolygonFeatureList getPolygonListByCd(
String chnDtctId, String cdObjectId, List<String> cdObjectIds);
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu);
ChangeDetectionDto.PointFeatureList getPointListByCd(
String chnDtctId, String cdObjectId, List<String> cdObjectIds);
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu);
ChangeDetectionDto.PolygonFeatureList getSelectedChangeDetectionPolygonListByPnu(
String chnDtctId, String pnu);
@@ -43,4 +45,8 @@ public interface ChangeDetectionRepositoryCustom {
String chnDtctId, String pnu);
Optional<UUID> getLearnUuid(String chnDtctId);
PolygonFeatureList getChangeDetectionPnuPolygonList(UUID uuid, String pnu);
PointFeatureList getChangeDetectionPnuPointList(UUID uuid, String pnu);
}

View File

@@ -18,7 +18,9 @@ import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.DetectSearc
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapScaleType;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapSheetList;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.PointFeatureList;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.PointQueryData;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.PolygonFeatureList;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.PolygonQueryData;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity;
@@ -31,6 +33,7 @@ import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.impl.JPAQuery;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.List;
import java.util.Objects;
@@ -391,16 +394,23 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
@Override
public PolygonFeatureList getPolygonListByCd(
String chnDtctId, String cdObjectId, List<String> cdObjectIds) {
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu) {
BooleanBuilder builder = new BooleanBuilder();
builder.and(mapSheetLearnEntity.uid.eq(chnDtctId));
builder.and(
mapSheetAnalDataInferenceGeomEntity
.resultUid
.eq(chnDtctId)
.or(mapSheetAnalDataInferenceGeomEntity.resultUid.in(cdObjectIds)));
List<ChangeDetectionDto.PolygonQueryData> list =
// pnu가 들어온 경우
boolean hasPnu = pnu != null && !pnu.isBlank();
// pnu 가 없을 때
if (!hasPnu) {
builder.and(
mapSheetAnalDataInferenceGeomEntity
.resultUid
.eq(cdObjectId)
.or(mapSheetAnalDataInferenceGeomEntity.resultUid.in(cdObjectIds)));
}
JPAQuery<PolygonQueryData> query =
queryFactory
.select(
Projections.constructor(
@@ -425,10 +435,19 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
.innerJoin(mapSheetAnalInferenceEntity)
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
.innerJoin(mapSheetLearnEntity)
.on(mapSheetLearnEntity.id.eq(mapSheetAnalInferenceEntity.learnId))
.where(builder)
.orderBy(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.asc())
.fetch();
.on(mapSheetLearnEntity.id.eq(mapSheetAnalInferenceEntity.learnId));
// pnu 조건이면 tb_pnu join 추가
if (hasPnu) {
query
.innerJoin(pnuEntity)
.on(pnuEntity.geo.geoUid.eq(mapSheetAnalDataInferenceGeomEntity.geoUid));
builder.and(pnuEntity.pnu.eq(pnu));
}
List<ChangeDetectionDto.PolygonQueryData> list =
query.where(builder).orderBy(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.asc()).fetch();
ObjectMapper mapper = new ObjectMapper();
List<ChangeDetectionDto.PolygonFeature> result =
@@ -470,16 +489,23 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
@Override
public PointFeatureList getPointListByCd(
String chnDtctId, String cdObjectId, List<String> cdObjectIds) {
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu) {
BooleanBuilder builder = new BooleanBuilder();
builder.and(mapSheetLearnEntity.uid.eq(chnDtctId));
builder.and(
mapSheetAnalDataInferenceGeomEntity
.resultUid
.eq(chnDtctId)
.or(mapSheetAnalDataInferenceGeomEntity.resultUid.in(cdObjectIds)));
List<ChangeDetectionDto.PointQueryData> list =
// pnu가 들어온 경우
boolean hasPnu = pnu != null && !pnu.isBlank();
// pnu 가 없을 때
if (!hasPnu) {
builder.and(
mapSheetAnalDataInferenceGeomEntity
.resultUid
.eq(cdObjectId)
.or(mapSheetAnalDataInferenceGeomEntity.resultUid.in(cdObjectIds)));
}
JPAQuery<PointQueryData> query =
queryFactory
.select(
Projections.constructor(
@@ -498,9 +524,19 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
.innerJoin(mapSheetAnalInferenceEntity)
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
.innerJoin(mapSheetLearnEntity)
.on(mapSheetLearnEntity.id.eq(mapSheetAnalInferenceEntity.learnId))
.where(builder)
.fetch();
.on(mapSheetLearnEntity.id.eq(mapSheetAnalInferenceEntity.learnId));
// pnu 조건이면 tb_pnu join 추가
if (hasPnu) {
query
.innerJoin(pnuEntity)
.on(pnuEntity.geo.geoUid.eq(mapSheetAnalDataInferenceGeomEntity.geoUid));
builder.and(pnuEntity.pnu.eq(pnu));
}
List<ChangeDetectionDto.PointQueryData> list =
query.where(builder).orderBy(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.asc()).fetch();
ObjectMapper mapper = new ObjectMapper();
List<ChangeDetectionDto.PointFeature> result =
@@ -659,4 +695,120 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
.where(mapSheetLearnEntity.uid.eq(chnDtctId))
.fetchOne());
}
@Override
public PolygonFeatureList getChangeDetectionPnuPolygonList(UUID uuid, String pnu) {
List<ChangeDetectionDto.PolygonQueryData> list =
queryFactory
.select(
Projections.constructor(
ChangeDetectionDto.PolygonQueryData.class,
Expressions.stringTemplate("{0}", "Feature"),
Expressions.stringTemplate(
"ST_AsGeoJSON({0})", mapSheetAnalDataInferenceGeomEntity.geom),
mapSheetAnalDataInferenceGeomEntity.geoUid,
mapSheetAnalDataInferenceGeomEntity.area,
mapSheetAnalDataInferenceGeomEntity.compareYyyy,
mapSheetAnalDataInferenceGeomEntity.classBeforeProb,
mapSheetAnalDataInferenceGeomEntity.classBeforeCd.toUpperCase(),
mapSheetAnalDataInferenceGeomEntity.targetYyyy,
mapSheetAnalDataInferenceGeomEntity.classAfterProb,
mapSheetAnalDataInferenceGeomEntity.classAfterCd.toUpperCase(),
mapSheetAnalDataInferenceGeomEntity.cdProb,
mapSheetAnalDataInferenceGeomEntity.uuid,
mapSheetAnalDataInferenceGeomEntity.resultUid))
.from(mapSheetAnalDataInferenceGeomEntity)
.innerJoin(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
.innerJoin(mapSheetAnalInferenceEntity)
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
.innerJoin(pnuEntity)
.on(mapSheetAnalDataInferenceGeomEntity.geoUid.eq(pnuEntity.geo.geoUid))
.where(mapSheetAnalInferenceEntity.uuid.eq(uuid), pnuEntity.pnu.eq(pnu))
.orderBy(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.asc())
.fetch();
ObjectMapper mapper = new ObjectMapper();
List<ChangeDetectionDto.PolygonFeature> result =
list.stream()
.map(
data -> {
String geoJson = data.getGeometry();
JsonNode jsonNode;
try {
jsonNode = mapper.readTree(geoJson);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
ChangeDetectionDto.PolygonProperties properties =
new ChangeDetectionDto.PolygonProperties(
data.getGeoUid(),
data.getArea(),
data.getBeforeYear(),
data.getBeforeConfidence(),
data.getBeforeClass(),
data.getAfterYear(),
data.getAfterConfidence(),
data.getAfterClass(),
data.getCdProb(),
data.getUuid(),
data.getResultUid());
return new ChangeDetectionDto.PolygonFeature(
data.getType(), jsonNode, properties);
})
.collect(Collectors.toList());
ChangeDetectionDto.PolygonFeatureList polygonList = new ChangeDetectionDto.PolygonFeatureList();
polygonList.setType("FeatureCollection");
polygonList.setFeatures(result);
return polygonList;
}
@Override
public PointFeatureList getChangeDetectionPnuPointList(UUID uuid, String pnu) {
List<ChangeDetectionDto.PointQueryData> list =
queryFactory
.select(
Projections.constructor(
ChangeDetectionDto.PointQueryData.class,
Expressions.stringTemplate("{0}", "Feature"),
Expressions.stringTemplate(
"ST_AsGeoJSON({0})",
mapSheetAnalDataInferenceGeomEntity.geomCenter), // point
Projections.constructor(
ChangeDetectionDto.PointProperties.class,
mapSheetAnalDataInferenceGeomEntity.geoUid,
mapSheetAnalDataInferenceGeomEntity.classAfterCd.toUpperCase())))
.from(mapSheetAnalDataInferenceGeomEntity)
.innerJoin(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
.innerJoin(mapSheetAnalInferenceEntity)
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
.innerJoin(pnuEntity)
.on(mapSheetAnalDataInferenceGeomEntity.geoUid.eq(pnuEntity.geo.geoUid))
.where(mapSheetAnalInferenceEntity.uuid.eq(uuid), pnuEntity.pnu.eq(pnu))
.fetch();
ObjectMapper mapper = new ObjectMapper();
List<ChangeDetectionDto.PointFeature> result =
list.stream()
.map(
data -> {
String geoJson = data.getGeometry();
JsonNode jsonNode;
try {
jsonNode = mapper.readTree(geoJson);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
return new ChangeDetectionDto.PointFeature(
data.getType(), jsonNode, data.getProperties());
})
.collect(Collectors.toList());
return new ChangeDetectionDto.PointFeatureList("FeatureCollection", result);
}
}

View File

@@ -0,0 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.gukyuin;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface GukYuinPnuCntUpdateJobRepository
extends JpaRepository<MapSheetLearnEntity, Long>, GukYuinPnuCntUpdateJobRepositoryCustom {}

View File

@@ -0,0 +1,8 @@
package com.kamco.cd.kamcoback.postgres.repository.gukyuin;
public interface GukYuinPnuCntUpdateJobRepositoryCustom {
void updateGukYuinContListPnuUpdateCnt();
void updateGukYuinApplyStatus(String uid, String status);
}

View File

@@ -0,0 +1,48 @@
package com.kamco.cd.kamcoback.postgres.repository.gukyuin;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import java.time.ZonedDateTime;
import lombok.RequiredArgsConstructor;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class GukYuinPnuCntUpdateJobRepositoryImpl
implements GukYuinPnuCntUpdateJobRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final JdbcTemplate jdbcTemplate;
@PersistenceContext private EntityManager em;
@Override
public void updateGukYuinContListPnuUpdateCnt() {
String sql =
"""
update tb_map_sheet_anal_data_inference_geom p
set pnu = c_count.actual_count
from (
select geo_uid, count(*) actual_count
from tb_pnu
group by geo_uid
) c_count
where p.geo_uid = c_count.geo_uid and p.pnu != c_count.actual_count;
""";
jdbcTemplate.update(sql);
}
@Override
public void updateGukYuinApplyStatus(String uid, String status) {
queryFactory
.update(mapSheetLearnEntity)
.set(mapSheetLearnEntity.applyStatus, status)
.set(mapSheetLearnEntity.applyStatusDttm, ZonedDateTime.now())
.where(mapSheetLearnEntity.uid.eq(uid))
.execute();
}
}

View File

@@ -47,4 +47,6 @@ public interface GukYuinRepositoryCustom {
void updateMapSheetInferenceLabelEndStatus(Long learnId);
List<String> findStbltObjectIds(String uid, String mapSheetNum);
Integer updateStbltRandomData(String uid, int updateCnt);
}

View File

@@ -7,6 +7,7 @@ import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntit
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QPnuEntity.pnuEntity;
import com.kamco.cd.kamcoback.common.enums.ImageryFitStatus;
import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto.Basic;
import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto.LabelSendDto;
import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto.LearnKeyDto;
@@ -336,6 +337,44 @@ public class GukYuinRepositoryImpl implements GukYuinRepositoryCustom {
.fetch();
}
/**
* mapSheetAnalDataInferenceGeomEntity 데이터 에 실태조사 값 들어온 것으로 간주하고 update 랜덤으로 하기
*
* @param uid
* @param updateCnt
* @return
*/
@Override
public Integer updateStbltRandomData(String uid, int updateCnt) {
List<Long> geoUids =
queryFactory
.select(mapSheetAnalDataInferenceGeomEntity.geoUid)
.from(mapSheetLearnEntity)
.innerJoin(mapSheetAnalInferenceEntity)
.on(mapSheetLearnEntity.id.eq(mapSheetAnalInferenceEntity.learnId))
.innerJoin(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
.innerJoin(mapSheetAnalDataInferenceGeomEntity)
.on(
mapSheetAnalDataInferenceEntity.id.eq(mapSheetAnalDataInferenceGeomEntity.dataUid),
mapSheetAnalDataInferenceGeomEntity.fitState.isNull())
.where(mapSheetLearnEntity.uid.eq(uid))
.orderBy(mapSheetAnalDataInferenceGeomEntity.geoUid.asc())
.limit(updateCnt)
.fetch();
for (Long geoUid : geoUids) {
queryFactory
.update(mapSheetAnalDataInferenceGeomEntity)
.set(mapSheetAnalDataInferenceGeomEntity.pnu, 1L)
.set(mapSheetAnalDataInferenceGeomEntity.fitState, ImageryFitStatus.UNFIT.getId())
.set(mapSheetAnalDataInferenceGeomEntity.fitStateDttm, ZonedDateTime.now())
.where(mapSheetAnalDataInferenceGeomEntity.geoUid.eq(geoUid))
.execute();
}
return updateCnt;
}
@Override
@Transactional
public void updateGukYuinApplyStateComplete(Long id, GukYuinStatus status) {

View File

@@ -99,8 +99,8 @@ public class GukYuinStbltJobRepositoryImpl implements GukYuinStbltJobRepositoryC
.set(
mapSheetAnalDataInferenceGeomEntity.fitState,
stbResult.getStbltYn().equals("Y")
? ImageryFitStatus.UNFIT.getId()
: ImageryFitStatus.FIT.getId()) // 적합여부가 Y 이면 부적합인 것, N 이면 적합한 것이라고 함
? ImageryFitStatus.FIT.getId()
: ImageryFitStatus.UNFIT.getId()) // 적합여부가 N 이면 부적합인 것, Y 이면 적합한 것이라고 함
.set(mapSheetAnalDataInferenceGeomEntity.fitStateDttm, ZonedDateTime.now())
.set(mapSheetAnalDataInferenceGeomEntity.fitStateCmmnt, stbResult.getIncyCmnt())
.where(mapSheetAnalDataInferenceGeomEntity.resultUid.eq(resultUid))

View File

@@ -388,12 +388,8 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(analEntity.getCompareYyyy()),
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(analEntity.getTargetYyyy()),
mapSheetAnalDataInferenceGeomEntity.stage.eq(analEntity.getStage()),
// mapSheetAnalDataInferenceGeomEntity.pnu.isNotNull()
mapSheetAnalDataInferenceGeomEntity.pnu.gt(0L),
mapSheetAnalDataInferenceGeomEntity.fitState.eq(
ImageryFitStatus.UNFIT.getId()) // TODO:
// 추후 라벨링 대상 조건 수정하기
)
mapSheetAnalDataInferenceGeomEntity.fitState.eq(ImageryFitStatus.UNFIT.getId()))
.fetchOne();
}
@@ -493,6 +489,23 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
inspectionStatus = inspectionRemaining > 0 ? "진행중" : "완료";
}
ZoneId zoneId = ZoneId.of("Asia/Seoul");
LocalDate targetDate = LocalDate.now(zoneId);
ZonedDateTime end = targetDate.plusDays(1).atStartOfDay(zoneId);
Long downloadPolygonCnt =
queryFactory
.select(labelingAssignmentEntity.inferenceGeomUid.count())
.from(labelingAssignmentEntity)
.innerJoin(mapSheetAnalInferenceEntity)
.on(
labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id),
mapSheetAnalInferenceEntity.id.eq(analUid))
.where(
labelingAssignmentEntity.inspectState.eq(InspectState.COMPLETE.getId()),
labelingAssignmentEntity.inspectStatDttm.lt(end))
.fetchOne();
return WorkProgressInfo.builder()
// 라벨링 (pass_yn = false인 부적합 데이터 기준)
.labelingProgressRate(labelingRate)
@@ -516,6 +529,7 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
.completedCount(labelCompleted)
.remainingLabelCount(labelingRemaining)
.remainingInspectCount(inspectionRemaining)
.downloadPolygonCnt(downloadPolygonCnt)
.build();
}
@@ -659,6 +673,23 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
inspectionStatus = inspectionRemaining > 0 ? "진행중" : "완료";
}
ZoneId zoneId = ZoneId.of("Asia/Seoul");
LocalDate targetDate = LocalDate.now(zoneId);
ZonedDateTime end = targetDate.plusDays(1).atStartOfDay(zoneId);
Long downloadPolygonCnt =
queryFactory
.select(labelingAssignmentEntity.inferenceGeomUid.count())
.from(labelingAssignmentEntity)
.innerJoin(mapSheetAnalInferenceEntity)
.on(
labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id),
mapSheetAnalInferenceEntity.uuid.eq(targetUuid))
.where(
labelingAssignmentEntity.inspectState.eq(InspectState.COMPLETE.getId()),
labelingAssignmentEntity.inspectStatDttm.lt(end))
.fetchOne();
return WorkProgressInfo.builder()
.labelingProgressRate(labelingRate)
.labelingStatus(labelingStatus)
@@ -679,6 +710,7 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
.completedCount(labelCompleted)
.remainingLabelCount(labelingRemaining)
.remainingInspectCount(inspectionRemaining)
.downloadPolygonCnt(downloadPolygonCnt)
.build();
}

View File

@@ -141,12 +141,13 @@ public interface MapSheetMngRepositoryCustom {
void insertMapSheetMngTile(@Valid AddReq addReq);
/**
* 연도 조건으로 도엽번호 조회
* 연도별 도엽 목록 조회
*
* @param year 연도
* @return 추론 가능한 도엽 정보
* @param year 관리연도
* @param mapSheetNums50k 50k 도엽번호 리스트 (null 또는 empty인 경우 전체 조회)
* @return 도엽 목록
*/
List<MngListDto> getMapSheetMngHst(Integer year, String mapSheetScope, List<String> mapSheetNum);
List<MngListDto> getMapSheetMngHst(Integer year, List<String> mapSheetNums50k);
/**
* 비교연도 사용 가능한 이전도엽을 조회한다.

View File

@@ -1,5 +1,6 @@
package com.kamco.cd.kamcoback.postgres.repository.mapsheet;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx50kEntity.mapInkx50kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity.mapInkx5kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngEntity.mapSheetMngEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngFileEntity.mapSheetMngFileEntity;
@@ -10,12 +11,12 @@ import static com.querydsl.core.types.dsl.Expressions.nullExpression;
import com.kamco.cd.kamcoback.common.enums.CommonUseStatus;
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.AddReq;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListDto;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.YearSearchReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetMngHstEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapInkx50kEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngFileEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngHstEntity;
@@ -27,7 +28,6 @@ import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberExpression;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
@@ -50,7 +50,6 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
implements MapSheetMngRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)");
@PersistenceContext private EntityManager em;
@@ -983,7 +982,8 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
.or(mapSheetMngHstEntity.syncCheckState.eq("DONE")));
// file_ext = 'tif'
whereBuilder.and(mapSheetMngFileEntity.fileExt.eq("tif"));
whereBuilder.and(
mapSheetMngFileEntity.fileExt.eq("tif").and(mapSheetMngFileEntity.fileDel.isFalse()));
// mng_yyyy = '2023'
if (yyyy != null && !yyyy.isEmpty()) {
@@ -1083,11 +1083,27 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
mapSheetMngHstEntity.mapSheetNum));
}
/**
* 영상데이터관리 > 연도별 도엽 목록 조회
*
* @param year 관리연도
* @param mapSheetNums50k 50k 도엽번호 리스트 (null 또는 empty인 경우 전체 조회)
* @return 도엽 목록
*/
@Override
public List<MngListDto> getMapSheetMngHst(
Integer year, String mapSheetScope, List<String> mapSheetNum) {
public List<MngListDto> getMapSheetMngHst(Integer year, List<String> mapSheetNums50k) {
/*
검색조건:
- ✅ 데이터 처리 완료(data_state='DONE')
- ✅ 동기화 완료(sync_state='DONE' OR sync_check_state='DONE')
- ✅ 추론 사용(use_inference='USE')
- ✅ 지정 연도(mng_yyyy=year)
- ✅ 완료된 TIF 파일 존재
- ✅ 사용 중인 도엽만(mapInkx5k.useInference='USE')
- ✅ 50k 도엽번호로 필터링 (mapSheetNums50k가 있는 경우)
*/
BooleanBuilder whereBuilder = new BooleanBuilder();
whereBuilder.and(mapSheetMngHstEntity.mngYyyy.eq(year));
whereBuilder.and(mapSheetMngHstEntity.dataState.eq("DONE"));
whereBuilder.and(
mapSheetMngHstEntity
@@ -1096,39 +1112,20 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
.or(mapSheetMngHstEntity.syncCheckState.eq("DONE")));
whereBuilder.and(mapSheetMngHstEntity.useInference.eq("USE"));
whereBuilder.and(mapSheetMngHstEntity.mngYyyy.eq(year));
// TIF 파일 존재 여부 확인
whereBuilder.and(
JPAExpressions.selectOne()
.from(mapSheetMngFileEntity)
.where(
mapSheetMngFileEntity
.hstUid
.eq(mapSheetMngHstEntity.hstUid) // FK 관계에 맞게 유지
.eq(mapSheetMngHstEntity.hstUid)
.and(mapSheetMngHstEntity.mngYyyy.eq(year))
.and(mapSheetMngFileEntity.fileExt.eq("tif"))
.and(mapSheetMngFileEntity.fileState.eq("DONE")))
.and(mapSheetMngFileEntity.fileState.eq("DONE"))
.and(mapSheetMngFileEntity.fileDel.eq(false)))
.exists());
BooleanBuilder likeBuilder = new BooleanBuilder();
if (MapSheetScope.PART.getId().equals(mapSheetScope)) {
List<String> list = mapSheetNum;
if (list == null || list.isEmpty()) {
return List.of();
}
for (String prefix : list) {
if (prefix == null || prefix.isBlank()) {
continue;
}
likeBuilder.or(mapSheetMngHstEntity.mapSheetNum.like(prefix.trim() + "%"));
}
}
if (likeBuilder.hasValue()) {
whereBuilder.and(likeBuilder);
}
return queryFactory
.select(
Projections.constructor(
@@ -1145,7 +1142,8 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
.mapidcdNo
.eq(mapSheetMngHstEntity.mapSheetNum)
.and(mapInkx5kEntity.useInference.eq(CommonUseStatus.USE)))
.where(whereBuilder)
.innerJoin(mapInkx5kEntity.mapInkx50k, mapInkx50kEntity)
.where(whereBuilder, inScenes50(mapInkx50kEntity, mapSheetNums50k))
.fetch();
}
@@ -1177,7 +1175,12 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
h.syncState.eq("DONE").or(h.syncCheckState.eq("DONE")),
JPAExpressions.selectOne()
.from(f)
.where(f.hstUid.eq(h.hstUid), f.fileExt.eq("tif"), f.fileState.eq("DONE"))
.where(
f.hstUid.eq(h.hstUid),
f.mngYyyy.eq(year),
f.fileExt.eq("tif"),
f.fileState.eq("DONE"),
f.fileDel.eq(false))
.exists(),
// mapSheetNum별 최대 mngYyyy인 행만 남김
@@ -1194,9 +1197,19 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
.from(f2)
.where(
f2.hstUid.eq(h2.hstUid),
f2.mngYyyy.eq(year),
f2.fileExt.eq("tif"),
f2.fileState.eq("DONE"))
f2.fileState.eq("DONE"),
f2.fileDel.eq(false))
.exists())))
.fetch();
}
// 도엽번호(1:50k) IN 쿼리조건
private BooleanExpression inScenes50(QMapInkx50kEntity mapInkx50k, List<String> sceneIds) {
if (sceneIds == null || sceneIds.isEmpty()) {
return null;
}
return mapInkx50k.mapidcdNo.in(sceneIds);
}
}

View File

@@ -88,17 +88,33 @@ public class MapSheetMngYearRepositoryImpl implements MapSheetMngYearRepositoryC
*/
@Override
public List<MngListCompareDto> findByHstMapSheetCompareList(int mngYyyy, List<String> mapIds) {
QMapSheetMngYearYnEntity y = QMapSheetMngYearYnEntity.mapSheetMngYearYnEntity;
QMapSheetMngYearYnEntity mapSheetMngYearYn = QMapSheetMngYearYnEntity.mapSheetMngYearYnEntity;
// SELECT
// concat(?, '') as col_0_0_, -- 파라미터 mngYyyy (문자열)
// m.map_sheet_num as col_1_0_, -- 도엽번호
// MAX(m.mng_yyyy) as col_2_0_ -- 최대 관리연도
// FROM tb_map_sheet_mng_year_yn m
// WHERE m.map_sheet_num IN (?, ?, ..., ?) -- mapIds 리스트
// AND m.yn = 'Y' -- 파일 존재 여부
// AND m.mng_yyyy <= ? -- 기준연도 이하만
// GROUP BY m.map_sheet_num
StringExpression mngYyyyStr = Expressions.stringTemplate("concat({0}, '')", mngYyyy);
return queryFactory
.select(
Projections.constructor(
MngListCompareDto.class, mngYyyyStr, y.id.mapSheetNum, y.id.mngYyyy.max()))
.from(y)
.where(y.id.mapSheetNum.in(mapIds), y.yn.eq("Y"), y.id.mngYyyy.loe(mngYyyy))
.groupBy(y.id.mapSheetNum)
MngListCompareDto.class,
mngYyyyStr,
mapSheetMngYearYn.id.mapSheetNum,
mapSheetMngYearYn.id.mngYyyy.max()))
.from(mapSheetMngYearYn)
.where(
mapSheetMngYearYn.id.mapSheetNum.in(mapIds),
mapSheetMngYearYn.yn.eq("Y"),
mapSheetMngYearYn.id.mngYyyy.loe(mngYyyy))
.groupBy(mapSheetMngYearYn.id.mapSheetNum)
.fetch();
}

View File

@@ -3,6 +3,8 @@ package com.kamco.cd.kamcoback.scheduler;
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.gukyuin.service.GukYuinApiService;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiLabelJobService;
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiPnuJobService;
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiStatusJobService;
@@ -11,6 +13,7 @@ import com.kamco.cd.kamcoback.scheduler.service.MemberInactiveJobService;
import com.kamco.cd.kamcoback.scheduler.service.TrainingDataLabelJobService;
import com.kamco.cd.kamcoback.scheduler.service.TrainingDataReviewJobService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
@@ -18,6 +21,7 @@ import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.time.LocalDate;
import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
@@ -28,7 +32,7 @@ import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "스케줄링 수동 호출 테스트", description = "스케줄링 수동 호출 테스트 API")
@Tag(name = "스케줄링 및 jar 수동 호출 테스트", description = "스케줄링 및 jar 수동 호출 테스트 API")
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/schedule")
@@ -42,6 +46,7 @@ public class SchedulerApiController {
private final TrainingDataReviewJobService trainingDataReviewJobService;
private final MemberInactiveJobService memberInactiveJobService;
private final MapSheetMngFileJobController mapSheetMngFileJobController;
private final InferenceResultShpService inferenceResultShpService;
private final GukYuinApiService gukYuinApiService;
@Operation(summary = "국유인 탐지객체 조회 PNU 업데이트 스케줄링", description = "국유인 탐지객체 조회 PNU 업데이트 스케줄링")
@@ -54,7 +59,7 @@ public class SchedulerApiController {
@Operation(summary = "국유인 등록 상태 체크 스케줄링", description = "국유인 등록 상태 체크 스케줄링")
@GetMapping("/gukyuin/status")
public ApiResponseDto<Void> findGukYuinMastCompleteYn() {
gukYuinApiStatusJobService.findGukYuinMastCompleteYn();
gukYuinApiStatusJobService.findGukYuinPnuCntUpdate();
return ApiResponseDto.ok(null);
}
@@ -137,4 +142,44 @@ public class SchedulerApiController {
return ApiResponseDto.createOK("OK");
}
@Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "데이터 저장 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(implementation = InferenceResultShpDto.InferenceCntDto.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/save/inference/{uuid}")
public ApiResponseDto<InferenceResultShpDto.InferenceCntDto> saveInferenceData(
@Parameter(example = "feb2ec0b-a0f7-49ca-95e4-98b2231bdaae", description = "추론 uuid")
@PathVariable
UUID uuid) {
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData(uuid));
}
@Operation(summary = "추론결과 shp 생성", description = "추론결과 shp 생성")
@PostMapping("/shp/inference/{uuid}")
public ApiResponseDto<Void> createShp(
@Parameter(example = "feb2ec0b-a0f7-49ca-95e4-98b2231bdaae", description = "추론 uuid")
@PathVariable
UUID uuid) {
// shp 파일 수동생성
inferenceResultShpService.createShp(uuid);
return ApiResponseDto.createOK(null);
}
@Operation(summary = "국유인 실태조사 적합여부 랜덤 업데이트", description = "국유인 실태조사 적합여부 랜덤 업데이트")
@PutMapping("/gukyuin/random-stblt-update/{uid}/{updateCnt}")
public ApiResponseDto<Integer> updateStbltRandomData(
@PathVariable String uid, @PathVariable int updateCnt) {
return ApiResponseDto.ok(gukYuinApiService.updateStbltRandomData(uid, updateCnt));
}
}

View File

@@ -21,6 +21,17 @@ public class AsyncConfig {
return ex;
}
@Bean(name = "makeShapeFile")
public Executor makeShapeFileExecutor() {
ThreadPoolTaskExecutor ex = new ThreadPoolTaskExecutor();
ex.setCorePoolSize(2);
ex.setMaxPoolSize(4);
ex.setQueueCapacity(50);
ex.setThreadNamePrefix("makeShapeFile-");
ex.initialize();
return ex;
}
@Bean(name = "auditLogExecutor")
public Executor auditLogExecutor() {
ThreadPoolTaskExecutor exec = new ThreadPoolTaskExecutor();
@@ -31,4 +42,16 @@ public class AsyncConfig {
exec.initialize();
return exec;
}
// 0313
@Bean(name = "inferenceEndExecutor")
public Executor inferenceEndExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(5);
executor.setMaxPoolSize(10);
executor.setQueueCapacity(100);
executor.setThreadNamePrefix("inference-async-");
executor.initialize();
return executor;
}
}

View File

@@ -31,26 +31,33 @@ public class GukYuinApiLabelJobService {
return "local".equalsIgnoreCase(profile);
}
// @Scheduled(cron = "0 0 2 * * *")
public void runTask() {
findLabelingCompleteSend(null);
}
/** 어제 라벨링 검수 완료된 것 -> 국유인에 전송 */
public void findLabelingCompleteSend(LocalDate baseDate) {
// if (isLocalProfile()) {
// return;
// }
log.info("[Step 1-1] 어제 검수완료된 라벨링을 검색한다.");
log.info("=== baseDate : {}", baseDate);
log.info("=== baseDate 있으면 해당 일자, 없으면 어제일자로 조회");
List<GeomUidDto> list = gukYuinLabelJobCoreService.findYesterdayLabelingCompleteList(baseDate);
log.info("[Step 1-2] 검수완료된 폴리곤 객체 수 : {}", list == null ? 0 : list.size());
if (list.isEmpty()) {
log.info("[Step 1-3] 객체 없어서 return : 스케줄링 종료");
return;
}
log.info("[Step 2-1] 객체 목록으로 라벨링 전송 API 호출 시작");
for (GeomUidDto gto : list) {
log.info("[Step 2-2] 객체ID 확인 gto.getResultUid(): {}", gto.getResultUid());
ChngDetectContDto.ResultLabelDto dto =
gukYuinApiService.updateChnDtctObjtLabelingYn(gto.getResultUid(), "Y", "Y");
log.info("[Step 2-3] 결과 dto.getSuccess(): {}", dto.getSuccess());
if (dto.getSuccess()) {
log.info("[Step 2-4] 결과가 성공일 때 inference_geom에 label_send_dttm 업데이트 하기");
log.info("==== 업데이트 하는 객체 gto.getGeoUid(): {}", gto.getGeoUid());
// inference_geom 에 label_send_dttm 업데이트 하기
gukYuinLabelJobCoreService.updateAnalDataInferenceGeomSendDttm(gto.getGeoUid());
}

View File

@@ -41,16 +41,24 @@ public class GukYuinApiPnuJobService {
// return;
// }
log.info("[Step 1-1] 국유인 연동까지 완료된 추론 목록 가져오기");
log.info("=== apply_status -> 100% 다운 완료: GUK_COMPLETED, PNU매핑 실패: PNU_FAILED");
List<LearnKeyDto> list =
gukYuinPnuJobCoreService.findGukyuinApplyStatusUidList(
List.of(GukYuinStatus.GUK_COMPLETED.getId(), GukYuinStatus.PNU_FAILED.getId()));
log.info("[Step 1-2] 매핑할 추론 회차 갯수 : {}", list == null ? 0 : list.size());
if (list.isEmpty()) {
log.info("[Step 1-3] 매핑할 추론 회차 갯수 없어서 리턴하고 끝남");
return;
}
log.info("[Step 2-1] 추론 회차별 pnu 매핑 for문 시작 ");
for (LearnKeyDto dto : list) {
try {
processUid(dto.getUid(), dto.getUid());
log.info("[Step 2-2] 진행하는 추론 Uid: {}", dto.getUid());
processUid(dto.getUid());
gukYuinPnuJobCoreService.updateGukYuinApplyStateComplete(
dto.getId(), GukYuinStatus.PNU_COMPLETED);
} catch (Exception e) {
@@ -61,15 +69,21 @@ public class GukYuinApiPnuJobService {
}
}
private void processUid(String chnDtctId, String uid) {
ResultDto result = gukYuinApiService.listChnDtctId(chnDtctId, "Y");
private void processUid(String uid) {
log.info("[Step 2-4] 탐지 등록목록 상세 API 호출 시작");
ResultDto result = gukYuinApiService.listChnDtctId(uid, "Y");
if (result == null || result.getResult() == null || result.getResult().isEmpty()) {
log.info("[Step 2-5] 결과값 없어서 return");
return;
}
log.info("[Step 2-5] 결과값 첫번째 값 가져오기");
ChngDetectMastDto.Basic basic = result.getResult().get(0);
String chnDtctCnt = basic.getChnDtctCnt();
log.info("[Step 2-6] 탐지 객체 전체 갯수 chnDtctCnt = {}", chnDtctCnt);
if (chnDtctCnt == null || chnDtctCnt.isEmpty()) {
log.info("[Step 2-76] 탐지 객체 전체 갯수 없어서 return");
return;
}
@@ -78,29 +92,44 @@ public class GukYuinApiPnuJobService {
int totalCount = Integer.parseInt(chnDtctCnt);
int totalPages = (totalCount + pageSize - 1) / pageSize;
log.info("[Step 3-1] 탐지 객체 전체 수로 페이지 계산 : {}", totalPages);
for (int page = 0; page < totalPages; page++) {
log.info("[Step 3-2] 페이지 별 호출 : {}", page);
processPage(uid, page, pageSize);
}
}
private void processPage(String uid, int page, int pageSize) {
log.info("[Step 4-1] 탐지객체 목록 API 호출 시작");
ResultContDto resContList = gukYuinApiService.findChnContList(uid, page, pageSize, "Y");
if (resContList.getResult() == null || resContList.getResult().isEmpty()) {
log.info("[Step 4-2] 탐지객체 목록 결과 없어서 return");
return; // 외부 API 이상 방어
}
List<ContBasic> contList = resContList.getResult();
log.info("[Step 4-3] 탐지객체 목록 결과 contList.size : {}", contList == null ? 0 : contList.size());
for (ContBasic cont : contList) {
String[] pnuList = cont.getPnuList();
long pnuCnt = pnuList == null ? 0 : pnuList.length;
log.info("[Step 4-4] 객체에 연결된 pnuCnt : {}", pnuCnt);
if (cont.getChnDtctObjtId() != null) {
log.info(
"[Step 4-5] inference_geom 에 pnu 갯수 update : cont.getChnDtctObjtId = {}",
cont.getChnDtctObjtId());
log.info(" === cont.getChnDtctObjtId : {}", cont.getChnDtctObjtId());
log.info(" === pnuCnt : {}", pnuCnt);
gukYuinPnuJobCoreService.updateInferenceGeomDataPnuCnt(cont.getChnDtctObjtId(), pnuCnt);
if (pnuCnt > 0) {
log.info("[Step 4-6] 객체 ID로 geoUid 검색 = {}", cont.getChnDtctObjtId());
Long geoUid =
gukYuinPnuJobCoreService.findMapSheetAnalDataInferenceGeomUid(
cont.getChnDtctObjtId());
log.info("[Step 4-7] tb_pnu 에 데이터 upsert 수행");
log.info("===== geoUid = {}", geoUid);
gukYuinPnuJobCoreService.insertGeoUidPnuData(geoUid, pnuList, cont.getChnDtctObjtId());
}
}

View File

@@ -5,6 +5,7 @@ import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto.LearnKeyDto;
import com.kamco.cd.kamcoback.gukyuin.dto.GukYuinStatus;
import com.kamco.cd.kamcoback.gukyuin.service.GukYuinApiService;
import com.kamco.cd.kamcoback.postgres.core.GukYuinJobCoreService;
import com.kamco.cd.kamcoback.postgres.core.GukYuinPnuCntUpdateJobCoreService;
import java.util.List;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
@@ -18,6 +19,7 @@ public class GukYuinApiStatusJobService {
private final GukYuinJobCoreService gukYuinJobCoreService;
private final GukYuinApiService gukYuinApiService;
private final GukYuinPnuCntUpdateJobCoreService gukYuinPnuCntUpdateJobCoreService;
@Value("${spring.profiles.active}")
private String profile;
@@ -31,12 +33,8 @@ public class GukYuinApiStatusJobService {
return "local".equalsIgnoreCase(profile);
}
/** 국유인 연동 후, 100% 되었는지 확인하는 스케줄링 매 10분마다 호출 */
// @Scheduled(cron = "0 0/10 * * * *")
public void findGukYuinMastCompleteYn() {
// if (isLocalProfile()) {
// return;
// }
/** 매일 00시에 pnu cnt 업데이트 */
public void findGukYuinPnuCntUpdate() {
List<LearnKeyDto> list =
gukYuinJobCoreService.findGukyuinApplyStatusUidList(
@@ -59,8 +57,9 @@ public class GukYuinApiStatusJobService {
Integer progress =
basic.getExcnPgrt() == null ? null : Integer.parseInt(basic.getExcnPgrt().trim());
if (progress != null && progress == 100) {
gukYuinJobCoreService.updateGukYuinApplyStateComplete(
dto.getId(), GukYuinStatus.GUK_COMPLETED);
gukYuinPnuCntUpdateJobCoreService.updateGukYuinContListPnuUpdateCnt();
gukYuinPnuCntUpdateJobCoreService.updateGukYuinApplyStatus(
dto.getUid(), GukYuinStatus.PNU_COMPLETED.getId());
}
} catch (Exception e) {
log.error("[GUKYUIN] failed uid={}", dto.getChnDtctMstId(), e);

View File

@@ -38,24 +38,26 @@ public class GukYuinApiStbltJobService {
return "local".equalsIgnoreCase(profile);
}
// @Scheduled(cron = "0 0 3 * * *")
public void runTask() {
findGukYuinEligibleForSurvey(null);
}
/** 국유인 연동 후, 실태조사 적합여부 확인하여 update */
public void findGukYuinEligibleForSurvey(LocalDate baseDate) {
// if (isLocalProfile()) {
// return;
// }
log.info("[Step 1-1] 국유인 연동 PNU 완료된 추론 회차 정보 가져오기 ");
log.info(" learn 테이블의 apply_status : {}", GukYuinStatus.PNU_COMPLETED.getId());
List<LearnKeyDto> list =
gukYuinStbltJobCoreService.findGukYuinEligibleForSurveyList(
GukYuinStatus.PNU_COMPLETED.getId());
log.info("[Step 1-2] 국유인 연동 PNU 완료된 추론 회차 갯수 : {}", list == null ? 0 : list.size());
if (list.isEmpty()) {
log.info("[Step 1-3] 국유인 연동 PNU 완료된 추론 회차 갯수 없어서 return");
return;
}
log.info("[Step 2-1] 추론 회차 list 로 for문 실행하기 ");
for (LearnKeyDto dto : list) {
try {
String targetDate =
@@ -63,19 +65,30 @@ public class GukYuinApiStbltJobService {
.minusDays(1)
.format(DateTimeFormatter.ofPattern("yyyyMMdd"));
log.info("[Step 2-2] 실태조사 적합여부 조회 날짜 확인 : {}", targetDate);
if (baseDate != null) { // 파라미터가 있으면
targetDate = baseDate.format(DateTimeFormatter.ofPattern("yyyyMMdd"));
log.info("[Step 2-3] 수동호출 baseDate 가 있을 경우, 실태조사 적합여부 조회 날짜 확인 : {}", targetDate);
}
log.info("[Step 3-1] 국유인 실태조사 적합여부 API 호출 시작 ");
log.info(" === 값 확인 - uid : {}", dto.getUid());
log.info(" === 값 확인 - targetDate : {}", targetDate);
RlbDtctDto result = gukYuinApiService.findRlbDtctList(dto.getUid(), targetDate, "Y");
if (result == null || result.getResult() == null || result.getResult().isEmpty()) {
log.warn("[GUKYUIN] empty result chnDtctMstId={}", dto.getChnDtctMstId());
log.info("[GUKYUIN] empty result chnDtctId={}", dto.getUid());
log.info("=== 국유인 API 조회 결과 없어서 continue");
continue;
}
log.info("[Step 4-1] 국유인 실태조사 적합여부 result 값으로 데이터 업데이트");
log.info(" === 데이터 갯수 : {}", result.getResult() == null ? 0 : result.getResult().size());
for (RlbDtctMastDto stbltDto : result.getResult()) {
log.info("[Step 4-2] 국유인 실태조사 적합여부 결과 가져오기");
String resultUid = stbltDto.getChnDtctObjtId();
log.info(" == 테이블 tb_pnu 에 적합여부 리턴 결과를 upsert 진행, 객체 uid : {}", resultUid);
gukYuinStbltJobCoreService.updateGukYuinEligibleForSurvey(resultUid, stbltDto);
}
@@ -90,6 +103,7 @@ public class GukYuinApiStbltJobService {
e -> {
List<RlbDtctMastDto> pnuList = e.getValue();
log.info("[Step 4-3] 국유인 실태조사 적합여부 업데이트 값을 객체 uid 기준으로 DTO 생성");
boolean hasY = pnuList.stream().anyMatch(v -> "Y".equals(v.getStbltYn()));
String fitYn = hasY ? "Y" : "N";
@@ -105,7 +119,9 @@ public class GukYuinApiStbltJobService {
.findFirst()
.orElse(null);
log.info(" === selected DTO : {}", selected);
if (selected == null) {
log.info(" === selected NULL");
return null; // 방어 코드
}
@@ -113,10 +129,11 @@ public class GukYuinApiStbltJobService {
fitYn, selected.getIncyCd(), selected.getIncyRsnCont());
}));
log.info("[Step 4-4] 국유인 실태조사 적합여부, 사유, 내용을 inference_geom 테이블에 update");
resultMap.forEach(gukYuinStbltJobCoreService::updateGukYuinObjectStbltYn);
} catch (Exception e) {
log.error("[GUKYUIN] failed uid={}", dto.getChnDtctMstId(), e);
log.error("[GUKYUIN] failed uid={}", dto.getUid(), e);
}
}
}

View File

@@ -214,7 +214,7 @@ public class MapSheetInferenceJobService {
// 현재 모델 종료 업데이트
updateProcessingEndTimeByModel(job, sheet.getUuid(), now, currentType);
// M3이면 전체 종료
// G3이면 전체 종료
if (ModelType.G3.getId().equals(currentType)) {
endAll(sheet, now);
return;
@@ -235,6 +235,13 @@ public class MapSheetInferenceJobService {
* @param now
*/
private void endAll(InferenceBatchSheet sheet, ZonedDateTime now) {
List<Long> batchIds =
Stream.of(sheet.getM1BatchId(), sheet.getM2BatchId(), sheet.getM3BatchId())
.filter(Objects::nonNull)
.distinct()
.toList();
SaveInferenceAiDto save = new SaveInferenceAiDto();
save.setUuid(sheet.getUuid());
save.setStatus(Status.END.getId());
@@ -246,16 +253,11 @@ public class MapSheetInferenceJobService {
inferenceResultCoreService.upsertGeomData(sheet.getId());
// 추론 종료일때 shp 파일 생성
List<Long> batchIds =
Stream.of(sheet.getM1BatchId(), sheet.getM2BatchId(), sheet.getM3BatchId())
.filter(Objects::nonNull)
.distinct()
.toList();
String batchIdStr = batchIds.stream().map(String::valueOf).collect(Collectors.joining(","));
// shp 파일 비동기 생성
shpPipelineService.runPipeline(jarPath, datasetDir, batchIdStr, sheet.getUid());
// 0312 shp 파일 비동기 생성 (바꿔주세요)
shpPipelineService.makeShapeFile(sheet.getUid(), batchIds);
// shpPipelineService.runPipeline(jarPath, datasetDir, batchIdStr, sheet.getUid());
}
/**

View File

@@ -1,8 +1,9 @@
package com.kamco.cd.kamcoback.scheduler.service;
import com.kamco.cd.kamcoback.common.service.ExternalJarRunner;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.scheduler.config.ShpKeyLock;
import java.nio.file.Paths;
import java.util.List;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.scheduling.annotation.Async;
@@ -13,11 +14,44 @@ import org.springframework.stereotype.Service;
@RequiredArgsConstructor
public class ShpPipelineService {
private final InferenceResultCoreService inferenceResultCoreService;
private final ExternalJarRunner externalJarRunner;
private final ShpKeyLock shpKeyLock;
// 0312 shp 파일 비동기 생성
@Async("makeShapeFile")
public void makeShapeFile(String inferenceId, List<Long> batchIds) {
if (!shpKeyLock.tryLock(inferenceId)) {
log.info("");
log.info("============================================================");
log.info("SHP pipeline already running. inferenceId={}", inferenceId);
log.info("============================================================");
try {
log.info("SHP pipeline already start. inferenceId={}", inferenceId);
externalJarRunner.run(inferenceId, batchIds);
} catch (Exception e) {
log.error("SHP pipeline failed. inferenceId={}", inferenceId, e);
// TODO 실패 상태 업데이트 로직 추가
} finally {
log.info("============================================================");
log.info("SHP pipeline DONE. inferenceId={}", inferenceId);
log.info("============================================================");
log.info("");
shpKeyLock.unlock(inferenceId);
}
}
}
/**
* shp 파일 생성 1. merge 생성 2. 생성된 merge shp 파일로 geoserver 등록, 3.도엽별로 shp 생성
*
* @param jarPath 실행 jar 파일 경로
* @param datasetDir shp 파일이 생성될 경로
* @param batchIds 추론 batch id = 12,13,14
* @param inferenceId 추론 uid 32자
*/
@Async("shpExecutor")
public void runPipeline(String jarPath, String datasetDir, String batchId, String inferenceId) {
public void runPipeline(String jarPath, String datasetDir, String batchIds, String inferenceId) {
// inferenceId 기준 동시 실행 제한
if (!shpKeyLock.tryLock(inferenceId)) {
@@ -26,24 +60,33 @@ public class ShpPipelineService {
}
try {
log.info("");
log.info("============================================================");
log.info("SHP pipeline started. inferenceId={}", inferenceId);
log.info("============================================================");
// uid 기준 merge shp, geojson 파일 생성
externalJarRunner.run(jarPath, batchId, inferenceId, "", "MERGED");
externalJarRunner.run(jarPath, batchIds, inferenceId, "", "MERGED");
// uid 기준 shp 파일 geoserver 등록
String register =
Paths.get(datasetDir, inferenceId, "merge", inferenceId + ".shp").toString();
log.info("register={}", register);
externalJarRunner.run(jarPath, register, inferenceId);
// uid 기준 도엽별 shp, geojson 파일 생성
externalJarRunner.run(jarPath, batchId, inferenceId, "", "RESOLVE");
log.info("SHP pipeline finished. inferenceId={}", inferenceId);
// String register =
// Paths.get(datasetDir, inferenceId, "merge", inferenceId + ".shp").toString();
// log.info("register={}", register);
// externalJarRunner.run(jarPath, register, inferenceId);
//
// // uid 기준 도엽별 shp, geojson 파일 생성
// externalJarRunner.run(jarPath, batchIds, inferenceId, "", "RESOLVE");
//
// log.info("SHP pipeline finished. inferenceId={}", inferenceId);
} catch (Exception e) {
log.error("SHP pipeline failed. inferenceId={}", inferenceId, e);
// TODO 실패 상태 업데이트 로직 추가
} finally {
log.info("============================================================");
log.info("SHP pipeline DONE. inferenceId={}", inferenceId);
log.info("============================================================");
shpKeyLock.unlock(inferenceId);
}
}

View File

@@ -32,7 +32,6 @@ public class TrainingDataLabelJobService {
return "local".equalsIgnoreCase(profile);
}
// @Scheduled(cron = "0 0 0 * * *")
public void runTask() {
// 프록시를 통해 호출해야 @Transactional이 적용됨
applicationContext
@@ -43,55 +42,72 @@ public class TrainingDataLabelJobService {
@Transactional
public void assignReviewerYesterdayLabelComplete(LocalDate baseDate) {
// if (isLocalProfile()) {
// return;
// }
try {
log.info("[Step 1-1] 라벨링 완료된 데이터 목록 조회한다.");
log.info("=== baseDate : {}", baseDate);
log.info("=== baseDate 있으면 해당 일자, 없으면 어제일자로 조회");
List<Tasks> tasks =
trainingDataLabelJobCoreService.findCompletedYesterdayUnassigned(baseDate);
log.info("[Step 1-2] 목록 객체 건수 count : {}", tasks == null ? 0 : tasks.size());
if (tasks.isEmpty()) {
log.info("[Step 1-3] 조회된 것 없어 return");
return;
}
// 회차별로 그룹핑
log.info("[Step 2-1] 회차별로 그룹핑 시작");
Map<Long, List<Tasks>> taskByRound =
tasks.stream().collect(Collectors.groupingBy(Tasks::getAnalUid));
// 회차별 분배
log.info("[Step 3-1] 회차별로 분배 시작");
for (Map.Entry<Long, List<Tasks>> entry : taskByRound.entrySet()) {
Long analUid = entry.getKey();
List<Tasks> analTasks = entry.getValue();
// pending 계산
log.info("[Step 3-2] 수행하는 회차 analUid: {}", analUid);
log.info("해당 회차에 라벨링 할당받은 검수자별 완료 건수 count(), 완료한 게 적은 순으로 해야 일이 한 사람에게 몰리지 않음");
List<InspectorPendingDto> pendings =
trainingDataLabelJobCoreService.findInspectorPendingByRound(analUid);
log.info("검수자 수: {}", pendings == null ? 0 : pendings.size());
if (pendings.isEmpty()) {
log.info("[Step 3-3] 할당된 검수자가 없으면 return");
continue;
}
log.info("[Step 4-1] 검수자 사번 List 생성");
List<String> reviewerIds =
pendings.stream().map(InspectorPendingDto::getInspectorUid).toList();
// Lock 걸릴 수 있기 때문에 엔티티 조회하는 Repository 에서 구현
log.info("[Step 4-2] 검수자 테이블 lock 걸리지 않게 처리");
trainingDataLabelJobCoreService.lockInspectors(analUid, reviewerIds);
// 균등 분배
log.info("[Step 5-1] 검수자에게 라벨 작업 균등분배 시작");
Map<String, List<Tasks>> assignMap = distributeByLeastPending(analTasks, reviewerIds);
log.info("[Step 5-2] 검수자에게 라벨 작업 균등분배 완료");
// reviewer별 batch update
log.info("[Step 5-3] 검수자별 할당 데이터를 batch update 시작");
assignMap.forEach(
(reviewerId, assignedTasks) -> {
if (assignedTasks.isEmpty()) {
log.info("[Step 5-4] 할당된 데이터 없으면 return");
return;
}
List<UUID> assignmentUids =
assignedTasks.stream().map(Tasks::getAssignmentUid).toList();
log.info("[Step 6-1] 할당 작업에 검수자 아이디 update");
log.info("==== 검수자 사번: {}", reviewerId);
log.info("==== 할당 갯수: {}", assignmentUids == null ? 0 : assignmentUids.size());
trainingDataLabelJobCoreService.assignReviewerBatch(assignmentUids, reviewerId);
log.info("[Step 7-1] geom 테이블에 검수 상태 update");
List<Long> geomUids = assignedTasks.stream().map(Tasks::getInferenceUid).toList();
trainingDataLabelJobCoreService.updateGeomUidTestState(geomUids);
});

View File

@@ -15,7 +15,6 @@ import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDate;
import java.util.List;
import java.util.Objects;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Value;
@@ -39,52 +38,68 @@ public class TrainingDataReviewJobService {
}
@Transactional
// @Scheduled(cron = "0 0 2 * * *")
public void runTask() {
exportGeojsonLabelingGeom(null);
}
/**
* 미사용 -> kamco-cd-cron GIT에 kamco-make-dataset-generation jar 생성 로직에 포함되어 해당 로직은 미사용
*
* @param baseDate
*/
public void exportGeojsonLabelingGeom(LocalDate baseDate) {
// if (isLocalProfile()) {
// return;
// }
// 1) 경로/파일명 결정
String targetDir =
"local".equals(profile) ? System.getProperty("user.home") + "/geojson" : trainingDataDir;
log.info("[Step 1-1] geojson 파일 생성할 경로: {}", targetDir);
// 2) 진행중인 회차 중, complete_cnt 가 존재하는 회차 목록 가져오기
log.info("[Step 1-2] 진행중(ING)인 회차 중, 검수완료한(complete_cnt) 갯수가 존재하는 회차 목록 가져오기");
List<AnalCntInfo> analList = trainingDataReviewJobCoreService.findAnalCntInfoList();
log.info("[Step 1-3] 회차 리스트 건수: {}", analList == null ? 0 : analList.size());
if (analList.isEmpty()) {
log.info("[Step 1-4] 회차 리스트 없어 return 하고 종료");
return;
}
for (AnalCntInfo info : analList) {
if (Objects.equals(info.getAllCnt(), info.getFileCnt())) {
continue;
}
String resultUid = info.getResultUid(); // 회차의 대문자 uid (폴더명으로 사용)
// 3) 회차 + 어제까지 검수 완료된 총 데이터의 도엽별 목록 가져오기
log.info("[Step 3-1] 회차 + 어제까지 검수 완료된 총 데이터의 도엽별 목록 가져오기");
log.info(" === 기준일자 baseDate : " + baseDate);
log.info(" === 검수완료일자 < 기준일자인 폴리곤의 도엽 목록을 조회");
List<AnalMapSheetList> analMapList =
trainingDataReviewJobCoreService.findCompletedAnalMapSheetList(
info.getAnalUid(), baseDate);
log.info("=== analMapList cnt: {}", analMapList == null ? 0 : analMapList.size());
if (analMapList.isEmpty()) {
log.info("[Step 3-2] 도엽 목록 조회되지 않아 continue");
continue;
}
log.info("[Step 4-1] 도엽별 geom 데이터 가지고 와서 geojson 만들기 시작");
for (AnalMapSheetList mapSheet : analMapList) {
// 4) 도엽별 geom 데이터 가지고 와서 geojson 만들기
log.info("[Step 4-2] 도엽별 검수완료된 폴리곤 데이터 목록 조회");
List<CompleteLabelData> completeList =
trainingDataReviewJobCoreService.findCompletedYesterdayLabelingList(
info.getAnalUid(), mapSheet.getMapSheetNum(), baseDate);
log.info("=== completeList size: {}", completeList == null ? 0 : completeList.size());
if (!completeList.isEmpty()) {
log.info("[Step 4-3] 목록에서 filter로 geoUid List 생성, 폴리곤 feature별 리스트 생성");
List<Long> geoUids = completeList.stream().map(CompleteLabelData::getGeoUid).toList();
List<GeoJsonFeature> features = completeList.stream().map(GeoJsonFeature::from).toList();
// 5) 파일서버에 uid 폴더 생성 후 업로드 하기
log.info("[Step 5-1] 파일서버에 uid 폴더 생성 후 업로드 하기 시작");
FeatureCollection collection = new FeatureCollection(features);
String filename =
String.format(
@@ -94,16 +109,20 @@ public class TrainingDataReviewJobService {
mapSheet.getTargetYyyy(),
mapSheet.getMapSheetNum());
log.info("=== filename: {}", filename);
log.info("=== 회차의 uid: {}", resultUid);
Path outputPath = Paths.get(targetDir + "/" + resultUid, filename);
log.info("=== outputPath: {}", outputPath);
try {
log.info("[Step 6-1] Uid로 폴더 생성");
Files.createDirectories(outputPath.getParent());
log.info("[Step 6-2] geoJson 파일 생성");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.enable(SerializationFeature.INDENT_OUTPUT);
objectMapper.writeValue(outputPath.toFile(), collection);
// geoUids : file_create_yn = true 로 업데이트
trainingDataReviewJobCoreService.updateLearnDataGeomFileCreateYn(geoUids);
log.info("[Step 6-3] geoJson 파일 생성 완료");
} catch (IOException e) {
log.error(e.getMessage());

View File

@@ -0,0 +1,50 @@
package com.kamco.cd.kamcoback.test;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.scheduler.service.ShpPipelineService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.web.ErrorResponse;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "test shape api", description = "test shape api")
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/test")
public class TestShapeApiController {
private final ShpPipelineService shpPipelineService;
@Operation(
summary = "shapefile 생성 테스트",
description = "지정된 inference ID와 batch ID 목록으로 shapefile을 생성합니다.")
@ApiResponses({
@ApiResponse(
responseCode = "200",
description = "shapefile 생성 요청 성공",
content = @Content(schema = @Schema(implementation = String.class))),
@ApiResponse(
responseCode = "400",
description = "잘못된 요청 데이터",
content = @Content(schema = @Schema(implementation = ErrorResponse.class))),
@ApiResponse(
responseCode = "500",
description = "서버 오류",
content = @Content(schema = @Schema(implementation = ErrorResponse.class)))
})
@GetMapping("/make-shapefile")
public ApiResponseDto<String> makeShapeFile(
@RequestParam String inferenceId, @RequestParam List<Long> batchIds) {
shpPipelineService.makeShapeFile(inferenceId, batchIds);
return ApiResponseDto.ok("Shapefile 생성이 시작되었습니다. inferenceId: " + inferenceId);
}
}

View File

@@ -59,8 +59,8 @@ jwt:
secret: "kamco_token_9b71e778-19a3-4c1d-97bf-2d687de17d5b"
access-token-validity-in-ms: 86400000 # 1일
refresh-token-validity-in-ms: 604800000 # 7일
#access-token-validity-in-ms: 60000 # 1
#refresh-token-validity-in-ms: 300000 # 5
#access-token-validity-in-ms: 300000 # 5
#refresh-token-validity-in-ms: 600000 # 10
token:
refresh-cookie-name: kamco-dev # 개발용 쿠키 이름
@@ -100,6 +100,7 @@ inference:
url: http://192.168.2.183:8000/jobs
batch-url: http://192.168.2.183:8000/batches
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
jar-path-v2: ${inference.nfs}/repo/jar/shp-exporter-v2.jar
inference-server-name: server1,server2,server3,server4
output-dir: ${inference.nfs}/model_output/export

View File

@@ -74,11 +74,13 @@ file:
pt-FileName: v5-best.pt
inference:
nfs: C:/Users/gyparkkamco-nfs
nfs: C:/Users/gypark/kamco-nfs
url: http://10.100.0.11:8000/jobs
batch-url: http://10.100.0.11:8000/batches
jar-path: jar/shp-exporter.jar
jar-path-v2: jar/shp-exporter-v2.jar
inference-server-name: server1,server2,server3,server4
output-dir: ${inference.nfs}/model_output/export
gukyuin:
#url: http://localhost:8080
@@ -90,3 +92,6 @@ layer:
wms-path: geoserver/cd
wmts-path: geoserver/cd/gwc/service
workspace: cd
training-data:
geojson-dir: /kamco-nfs/dataset/request/

View File

@@ -88,12 +88,15 @@ file:
nfs: /data
pt-path: ${file.nfs}/ckpt/model/v6-cls-checkpoints/
pt-FileName: yolov8_6th-6m.pt
output-dir: /kamco-nfs/dataset/export/
inference:
nfs: /data
url: http://172.16.4.56:8000/jobs
batch-url: http://172.16.4.56:8000/batches
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
# //0312
jar-path-v2: ${inference.nfs}/repo/jar/shp-exporter-v2.jar
inference-server-name: server1,server2,server3,server4
output-dir: ${inference.nfs}/model_output/export

View File

@@ -88,3 +88,6 @@ inference:
nfs: /kamco-nfs
geojson-dir: ${inference.nfs}/requests/ # 추론실행을 위한 파일생성경로
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
# //0312
jar-path-v2: ${inference.nfs}/repo/jar/shp-exporter-v2.jar