354 Commits

Author SHA1 Message Date
0425a6486d Merge pull request 'shp 테스트' (#160) from feat/infer_dev_260211 into develop
Reviewed-on: #160
2026-03-25 14:07:21 +09:00
241c7222d1 shp 테스트 2026-03-25 14:07:06 +09:00
71e4ab14bd Merge pull request 'shp 테스트' (#159) from feat/infer_dev_260211 into develop
Reviewed-on: #159
2026-03-25 14:01:26 +09:00
52da4dafc3 shp 테스트 2026-03-25 14:01:02 +09:00
24d615174d Merge pull request '토근 정보 변경' (#158) from feat/infer_dev_260211 into develop
Reviewed-on: #158
2026-03-19 13:48:56 +09:00
12b0f0867d 토근 정보 변경 2026-03-19 13:48:14 +09:00
283d906da6 Merge pull request '토큰시간 변경' (#157) from feat/infer_dev_260211 into develop
Reviewed-on: #157
2026-03-19 10:31:05 +09:00
360b451c38 토큰시간 변경 2026-03-19 10:30:42 +09:00
80e281cb99 Merge pull request '개발서버 토큰 시간 변경' (#156) from feat/infer_dev_260211 into develop
Reviewed-on: #156
2026-03-18 14:02:29 +09:00
b07bc38ee8 개발서버 토큰 시간 변경 2026-03-18 14:01:56 +09:00
e4c1c76b2b Merge pull request '재할당 transactional 추가' (#155) from feat/infer_dev_260211 into develop
Reviewed-on: #155
2026-03-17 17:47:43 +09:00
01b64eeca7 재할당 transactional 추가 2026-03-17 17:47:15 +09:00
516f949a37 Merge pull request 'spotless 적용' (#154) from feat/infer_dev_260211 into develop
Reviewed-on: #154
2026-03-17 14:00:22 +09:00
b6ed3b57ef spotless 적용 2026-03-17 14:00:04 +09:00
9eebf23774 Merge pull request 'dev 토큰 유효시간 변경, 인증 Log 추가한거 삭제' (#153) from feat/infer_dev_260211 into develop
Reviewed-on: #153
2026-03-17 13:55:53 +09:00
4f742edf8b dev 토큰 유효시간 변경, 인증 Log 추가한거 삭제 2026-03-17 13:55:30 +09:00
0aa415cf3a Merge pull request '토큰 생성 로그 추가' (#152) from feat/infer_dev_260211 into develop
Reviewed-on: #152
2026-03-17 13:33:34 +09:00
884b635585 토큰 생성 로그 추가 2026-03-17 13:33:17 +09:00
6861f6b8b6 Merge pull request '메뉴 권한 Log 추가' (#151) from feat/infer_dev_260211 into develop
Reviewed-on: #151
2026-03-17 13:22:06 +09:00
a97af0d4dd 메뉴 권한 Log 추가 2026-03-17 13:21:46 +09:00
9297d19e24 Merge pull request 'refresh 토큰 인증 로그 추가' (#150) from feat/infer_dev_260211 into develop
Reviewed-on: #150
2026-03-17 12:46:12 +09:00
65c38b3083 refresh 토큰 인증 로그 추가 2026-03-17 12:45:56 +09:00
24dca652f0 Merge pull request 'refresh 토큰 재발급 로그 추가' (#149) from feat/infer_dev_260211 into develop
Reviewed-on: #149
2026-03-17 12:26:31 +09:00
193cd449a8 refresh 토큰 재발급 로그 추가 2026-03-17 12:23:45 +09:00
dean
0efde4e5bb access-token 2026-03-17 07:49:13 +09:00
dean
548d82da12 access-token 2026-03-17 07:20:08 +09:00
536ff8fc65 추론 종료 async 수정 2026-03-13 15:52:20 +09:00
1dc1ce741e 추론 종료 비동기 호출 2026-03-13 15:38:11 +09:00
d21ed61666 hello 2026-03-12 08:22:39 +09:00
c9a1007c21 hello 2026-03-12 08:08:59 +09:00
413afb0b7c shp파일 바꾸는로직정리 2026-03-12 08:08:11 +09:00
e69eccc82b shp파일 바꾸는로직정리 2026-03-12 07:57:14 +09:00
828a4c5dca shp파일 바꾸는로직정리 2026-03-12 07:31:54 +09:00
5d417d85ff Merge pull request '국유인연동 알럿 문구 변경' (#148) from feat/infer_dev_260211 into develop
Reviewed-on: #148
2026-03-09 10:52:25 +09:00
614d6da695 국유인연동 알럿 문구 변경 2026-03-09 10:51:57 +09:00
8d45e91982 Merge pull request '국유인연동 : 라벨링 진행중 회차 uuid 추가' (#147) from feat/infer_dev_260211 into develop
Reviewed-on: #147
2026-03-09 10:10:52 +09:00
e1febf5863 국유인연동 : 라벨링 진행중 회차 uuid 추가 2026-03-09 10:10:08 +09:00
97fb659f15 enum @notBlank제거 2026-03-07 01:43:07 +09:00
ebb48c3f57 enum @notBlank제거 2026-03-07 01:41:34 +09:00
a35b4b8f59 enum @notBlank제거 2026-03-07 01:24:55 +09:00
0a53e186dc enum @notBlank제거 2026-03-07 01:21:30 +09:00
f3c7c5e8e8 enum @notBlank제거 2026-03-07 01:13:28 +09:00
c505e9b740 @enumvalid제거 2026-03-07 01:11:56 +09:00
8c2f2eff1b enum @notBlank제거 2026-03-07 01:08:21 +09:00
ade8bfa76a Merge pull request 'feat/dean/polishing_2' (#146) from feat/dean/polishing_2 into develop
Reviewed-on: #146
2026-03-07 01:02:58 +09:00
3752b83292 인퍼런스실행 정리 2026-03-07 00:56:51 +09:00
9f31f661fc 원복 2026-03-06 22:12:54 +09:00
b477928261 Merge pull request 'testing 테이블 조회 조건 추가' (#145) from feat/infer_dev_260211 into develop
Reviewed-on: #145
2026-03-06 19:51:41 +09:00
f4f75f353c testing 테이블 조회 조건 추가 2026-03-06 19:51:08 +09:00
f977e4be7c Merge pull request 'shp 파일 생성중 오류 발생시 중지되게 수정' (#144) from feat/infer_dev_260211 into develop
Reviewed-on: #144
2026-03-06 19:29:27 +09:00
573da5b53a shp 파일 생성중 오류 발생시 중지되게 수정 2026-03-06 19:21:08 +09:00
bd72852556 Merge pull request '추론실행 오류 수정' (#143) from feat/infer_dev_260211 into develop
Reviewed-on: #143
2026-03-06 18:54:16 +09:00
e4b904606f 추론실행 오류 수정 2026-03-06 18:53:38 +09:00
0d14dafecc Merge pull request 'feat/infer_dev_260211' (#142) from feat/infer_dev_260211 into develop
Reviewed-on: #142
2026-03-06 18:05:00 +09:00
37f534abff spotless 2026-03-06 18:04:38 +09:00
3521a5fd3d trim, foundUnique.add 추가 2026-03-06 18:04:23 +09:00
cbae052338 Merge branch 'develop' into feat/infer_dev_260211
# Conflicts:
#	src/main/java/com/kamco/cd/kamcoback/common/inference/utils/GeoJsonValidator.java
2026-03-06 18:02:44 +09:00
b2c9c36d4c trim, log 추가 2026-03-06 17:58:23 +09:00
114088469e 추론실행 테스트 2026-03-06 17:53:01 +09:00
7d6dca8b24 Merge pull request '추론실행 테스트' (#138) from feat/infer_dev_260211 into develop
Reviewed-on: #138
2026-03-06 17:42:58 +09:00
2e7ad26528 추론실행 테스트 2026-03-06 17:42:40 +09:00
0353e172ed Merge pull request 'feat/infer_dev_260211' (#137) from feat/infer_dev_260211 into develop
Reviewed-on: #137
2026-03-06 17:15:20 +09:00
1d5b1343a9 file del 추가 2026-03-06 17:14:44 +09:00
65f9026922 학습서버배포정보 수정 2026-03-06 16:02:18 +09:00
9b79f31d7b 줄맞춤 2026-03-06 16:01:28 +09:00
de45bf47c5 review-to-geojson 로직 수정, 미사용 주석 추가 2026-03-06 13:16:08 +09:00
a413de4b93 학습데이터 상세 > 폴리곤 수 로직 수정 2026-03-06 13:12:33 +09:00
815675f112 Merge pull request 'inference_results 결과 저장 중복방지' (#136) from feat/infer_dev_260211 into develop
Reviewed-on: #136
2026-03-06 12:43:56 +09:00
b9f7e36175 inference_results 결과 저장 중복방지 2026-03-06 12:43:36 +09:00
855aca6e5a Merge pull request 'feat/infer_dev_260211' (#135) from feat/infer_dev_260211 into develop
Reviewed-on: #135
2026-03-06 12:25:04 +09:00
206dba6ff9 추론결과 저장 result 조회 조건 수정 2026-03-06 12:24:52 +09:00
5db9127f0c 추론결과 저장 result 조회 저장 테스트 2026-03-06 12:23:16 +09:00
132bad8c33 Merge pull request '추론결과 저장 result 조회 저장 테스트' (#134) from feat/infer_dev_260211 into develop
Reviewed-on: #134
2026-03-06 12:00:01 +09:00
6dde4cd891 추론결과 저장 result 조회 저장 테스트 2026-03-06 11:59:38 +09:00
ac248c2f30 Merge pull request '추론결과 저장 result 조회 조건 변경' (#133) from feat/infer_dev_260211 into develop
Reviewed-on: #133
2026-03-06 11:10:04 +09:00
15d082af0e 추론결과 저장 result 조회 조건 변경 2026-03-06 11:09:34 +09:00
3be536424a Merge pull request 'feat/infer_dev_260211' (#132) from feat/infer_dev_260211 into develop
Reviewed-on: #132
2026-03-05 11:33:24 +09:00
a3b2fd0c73 status 체크 후 pnu cnt update 로 수정 2026-03-05 11:32:43 +09:00
9b504396bc status 체크 후 pnu cnt update 추가 2026-03-05 09:39:00 +09:00
2cc490012e Merge pull request 'feat/infer_dev_260211' (#131) from feat/infer_dev_260211 into develop
Reviewed-on: #131
2026-03-04 23:01:14 +09:00
f99144eccc pnu cnt update 수동호출 추가 2026-03-04 23:00:46 +09:00
66b78022a9 변화탐지 selected polygon, point 조건 수정 2026-03-04 22:59:59 +09:00
199e302068 Merge pull request '실태조사 적합여부 값 수정' (#130) from feat/infer_dev_260211 into develop
Reviewed-on: #130
2026-03-04 21:01:52 +09:00
5a3370d5fb 실태조사 적합여부 값 수정 2026-03-04 21:01:21 +09:00
5d7cb18fb8 줄맞춤 2026-03-04 20:05:00 +09:00
57baf4d911 국유인의타입추가 2026-03-04 19:58:02 +09:00
2fee2143ba Merge pull request 'pnu 로 polygon, point 조회 API 수정' (#129) from feat/infer_dev_260211 into develop
Reviewed-on: #129
2026-03-04 17:34:46 +09:00
f4a410ba49 pnu 로 polygon, point 조회 API 수정 2026-03-04 17:34:12 +09:00
deba2b84f0 Merge pull request '추론결과 상세 조회 api 시간 log 추가' (#128) from feat/infer_dev_260211 into develop
Reviewed-on: #128
2026-03-04 17:04:25 +09:00
85d8efc493 추론결과 상세 조회 api 시간 log 추가
- 추론결과 기본정보
- 분류별 탐지건수 조회
- 추론관리 분석결과 geom 상세 목록
2026-03-04 17:03:50 +09:00
5a82a9507a Merge pull request 'unfit 업데이트 수정' (#127) from feat/infer_dev_260211 into develop
Reviewed-on: #127
2026-03-04 13:21:37 +09:00
b9a268d43a unfit 업데이트 수정 2026-03-04 13:20:55 +09:00
4d1ea53d8f 변화탐지 pnu로도 조회 가능하게 하기 2026-03-04 08:04:04 +09:00
278d5d20e8 restTemplate read timeout, connect timeout 늘리기 2026-03-03 17:07:19 +09:00
4d9fa2d6a5 좌표계 5179 추가 2026-03-03 15:03:44 +09:00
e28e5ba371 국유인 반영 파일 경로 수정 2026-03-03 14:23:24 +09:00
22cc996399 Merge pull request 'feat/infer_dev_260211' (#126) from feat/infer_dev_260211 into develop
Reviewed-on: #126
2026-02-28 00:34:35 +09:00
e32f5e33d9 추론 기본 정보에 상태 추가 2026-02-28 00:34:03 +09:00
f95b82bf7f 스케줄링에 log 추가 완료 2026-02-28 00:23:48 +09:00
060a0db126 Merge pull request 'feat/infer_dev_260211' (#125) from feat/infer_dev_260211 into develop
Reviewed-on: #125
2026-02-27 22:53:07 +09:00
0692456b5b 학습데이터제작 > 작업현황 > 폴리곤 수 추가 2026-02-27 22:52:45 +09:00
8fb9f89c8f 국유인 실태조사 적합여부 임의로 업데이트 API 2026-02-27 22:15:31 +09:00
1cfe6e923f Merge remote-tracking branch 'origin/feat/infer_dev_260211' into feat/infer_dev_260211 2026-02-27 21:04:24 +09:00
af2721949c shp 생성 컨트롤러 이동, 주석추가 2026-02-27 21:04:18 +09:00
e9b9075c72 국유인 실태조사 job 에 log 찍기 추가 2026-02-27 20:37:56 +09:00
2b28241110 Merge pull request '국유인 연동 전 파일 경로 확인 log 추가' (#124) from feat/infer_dev_260211 into develop
Reviewed-on: #124
2026-02-27 19:41:26 +09:00
8e002f5673 국유인 연동 전 파일 경로 확인 log 추가 2026-02-27 19:40:10 +09:00
972cf808a9 Merge pull request '학습데이터 관리 목록 순서 변경, 주석추가' (#123) from feat/infer_dev_260211 into develop
Reviewed-on: #123
2026-02-27 18:54:29 +09:00
086eb20e8d 학습데이터 관리 목록 순서 변경, 주석추가 2026-02-27 18:54:03 +09:00
c3c194c067 Merge pull request '국유인 실태조사 bulk 확인' (#122) from feat/infer_dev_260211 into develop
Reviewed-on: #122
2026-02-27 17:02:22 +09:00
0acaeaac09 국유인 실태조사 bulk 확인 2026-02-27 17:01:54 +09:00
53ce735bca Merge pull request '국유인 체크 url 수정' (#121) from feat/infer_dev_260211 into develop
Reviewed-on: #121
2026-02-27 16:24:11 +09:00
a341be7ed6 국유인 체크 url 수정 2026-02-27 16:23:50 +09:00
9f6dc2b3c6 Merge pull request '국유인 연동 url 수정' (#120) from feat/infer_dev_260211 into develop
Reviewed-on: #120
2026-02-27 16:01:32 +09:00
cc46315e3a 국유인 연동 url 수정 2026-02-27 16:01:05 +09:00
ca631d5d58 Merge pull request '변화지도 도엽 목록에 도엽별bbox 추가' (#119) from feat/infer_dev_260211 into develop
Reviewed-on: #119
2026-02-27 15:31:52 +09:00
5d0590bd3c 변화지도 도엽 목록에 도엽별bbox 추가 2026-02-27 15:31:33 +09:00
3aed3cf1ec Merge pull request 'feat/infer_dev_260211' (#118) from feat/infer_dev_260211 into develop
Reviewed-on: #118
2026-02-27 15:14:37 +09:00
396e76c362 국유인 연동 outputdir 경로 변경 2026-02-27 15:14:18 +09:00
9f379c6dc3 영상관리, 도엽관리 API만 우선 정리함 2026-02-27 14:07:44 +09:00
d069981c8f 패스워드 변경 권한 수정 2026-02-27 14:01:54 +09:00
ece70f1d68 Merge remote-tracking branch 'origin/feat/infer_dev_260211' into feat/infer_dev_260211 2026-02-27 14:01:21 +09:00
b7470d11d4 추론 주석 추가 2026-02-27 14:01:14 +09:00
a7108c44f4 Merge pull request '비밀번호 변경 security 설정 수정' (#117) from feat/infer_dev_260211 into develop
Reviewed-on: #117
2026-02-27 14:00:11 +09:00
5039dd0f51 비밀번호 변경 security 설정 수정 2026-02-27 13:59:47 +09:00
9cb3a100aa Merge pull request '주석 추가, 패스워드변경 권한 수정' (#116) from feat/infer_dev_260211 into develop
Reviewed-on: #116
2026-02-27 13:41:32 +09:00
dc42baf91a 주석 추가, 패스워드변경 권한 수정 2026-02-27 13:41:17 +09:00
9d36208845 Merge pull request '국유인 반영 부분 막기 해제' (#115) from feat/infer_dev_260211 into develop
Reviewed-on: #115
2026-02-27 11:41:57 +09:00
ae4c1c61e8 국유인 반영 부분 막기 해제 2026-02-27 11:40:59 +09:00
369f303f6c Merge pull request 'feat/infer_dev_260211' (#114) from feat/infer_dev_260211 into develop
Reviewed-on: #114
2026-02-27 10:09:04 +09:00
96d4bb4af3 주석 제거 2026-02-27 10:08:39 +09:00
7b55204ae1 추론 api 호출시 대용량이면 3초 timeout 문제
60초 long restTemplate 추가
2026-02-27 10:06:39 +09:00
f54655c191 추론 수정 2026-02-27 09:25:53 +09:00
f4cbb48aa2 Merge pull request '추론 수정' (#113) from feat/infer_dev_260211 into develop
Reviewed-on: #113
2026-02-27 09:15:56 +09:00
fbad8d1cd3 추론 수정 2026-02-27 09:15:38 +09:00
6e4682dad6 Merge pull request '추론 수정' (#112) from feat/infer_dev_260211 into develop
Reviewed-on: #112
2026-02-27 08:29:22 +09:00
4629715443 추론 수정 2026-02-27 08:28:59 +09:00
5ba7f9ccfc Merge pull request '추론 수정' (#111) from feat/infer_dev_260211 into develop
Reviewed-on: #111
2026-02-27 00:03:26 +09:00
264dae3ba9 추론 수정 2026-02-27 00:03:03 +09:00
fab5b02211 Merge pull request '추론 수정' (#110) from feat/infer_dev_260211 into develop
Reviewed-on: #110
2026-02-26 23:55:53 +09:00
38f70b933f 추론 수정 2026-02-26 23:55:29 +09:00
a4b1db462b Merge pull request '추론 수정' (#109) from feat/infer_dev_260211 into develop
Reviewed-on: #109
2026-02-26 23:42:09 +09:00
2bf7c42a3f 추론 수정 2026-02-26 23:41:49 +09:00
a1be4e9faf Merge pull request '추론 수정' (#108) from feat/infer_dev_260211 into develop
Reviewed-on: #108
2026-02-26 23:18:50 +09:00
8904de0e3d 추론 수정 2026-02-26 23:18:31 +09:00
a21df9d018 중복 경고 2026-02-26 18:38:56 +09:00
85cad2dd28 중복 경고 2026-02-26 18:34:29 +09:00
7b8bf8726b 중복 경고 2026-02-26 18:29:03 +09:00
c841d460aa 중복 경고 2026-02-26 18:26:40 +09:00
3a8ac3a24f Merge pull request 'ensureAccepted 함수 InferenceCommonService 생성후 공통으로 생성' (#107) from feat/infer_dev_260211 into develop
Reviewed-on: #107
2026-02-26 16:56:04 +09:00
046f4f06d3 기준년도 없을때 throw 추가 2026-02-26 16:53:29 +09:00
5c9f33d210 Merge branch 'develop' of https://kamco.git.gs.dabeeo.com/MVPTeam/kamco-cd-api into feat/infer_dev_260211 2026-02-26 16:31:33 +09:00
ea7e98d28e ensureAccepted 함수 InferenceCommonService 생성후 공통으로 생성 2026-02-26 16:30:52 +09:00
3e78f744a4 Merge pull request 'shp파일 a 링크로 다운로드할때 이력 저장 변경' (#106) from feat/infer_dev_260211 into develop
Reviewed-on: #106
2026-02-26 15:27:17 +09:00
cea1f01ed9 shp파일 a 링크로 다운로드할때 이력 저장 변경 2026-02-26 15:26:21 +09:00
d7f2d22b93 Merge branch 'develop' of https://kamco.git.gs.dabeeo.com/MVPTeam/kamco-cd-api into feat/infer_dev_260211 2026-02-26 15:25:21 +09:00
eccdfb17e6 shp파일 a 링크로 다운로드할때 이력 저장 변경 2026-02-26 15:25:10 +09:00
d2fa86a89f Merge pull request 'merge develop_add_log' (#105) from feat/dean/add_log into develop
Reviewed-on: #105
2026-02-26 13:22:05 +09:00
32d56cf8fe merge develop_add_log 2026-02-26 13:20:11 +09:00
c3b7daebb7 Merge pull request '미사용 쿼리 삭제, 추론 주석 추가, api/inference/download/ 권한 제거 수정' (#104) from feat/infer_dev_260211 into develop
Reviewed-on: #104
2026-02-26 12:36:40 +09:00
2188d426d4 미사용 쿼리 삭제, 추론 주석 추가, api/inference/download/ 권한 제거 수정 2026-02-26 12:36:02 +09:00
5c2ee0974b Merge pull request '추론실행 수정, develop pull 반영, 국유인 파일경로 dir 하드코딩 수정' (#103) from feat/infer_dev_260211 into develop
Reviewed-on: #103
2026-02-26 12:21:06 +09:00
7980fe1d42 InferenceRunService 미사용 삭제 및 추론 실행
추론제외, 이전연도 사용 별로 분기처리
2026-02-26 12:16:48 +09:00
c10141e915 Merge branch 'develop' of https://kamco.git.gs.dabeeo.com/MVPTeam/kamco-cd-api into feat/infer_dev_260211
# Conflicts:
#	src/main/java/com/kamco/cd/kamcoback/gukyuin/service/GukYuinApiService.java
2026-02-26 12:12:50 +09:00
97565c5369 Merge pull request 'feat/dean/test2_temp' (#102) from feat/dean/test2_temp into develop
Reviewed-on: #102
2026-02-26 11:58:48 +09:00
30f0e1a885 merge develop 2026-02-26 11:57:52 +09:00
ba562261c3 /api/inference/download 로그인 없이 접근 가능하도록 수정 2026-02-26 11:56:21 +09:00
a084c80715 Merge remote-tracking branch 'origin/feat/infer_dev_260211' into feat/infer_dev_260211 2026-02-26 11:53:01 +09:00
a44e93c234 추론 실행 수정 및 추석 추가 작업중 2026-02-26 11:52:56 +09:00
a63b81008a inference_hard_coding 2026-02-26 11:52:51 +09:00
2309357c0d 파일경로를 application.yml에서 가져올수있게 동적으로 처리 (#100)
Reviewed-on: #100
Co-authored-by: dean[백병남] <byungnam.baek@dabeeo.com>
Co-committed-by: dean[백병남] <byungnam.baek@dabeeo.com>
2026-02-26 11:49:49 +09:00
ee76389d6c 파일경로를 application.yml에서 가져올수있게 동적으로 처리 2026-02-26 11:46:17 +09:00
7b15e5bb8c Merge branch 'develop' into feat/infer_dev_260211
# Conflicts:
#	src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceRunService.java
2026-02-26 11:36:40 +09:00
001ad73de7 국유인 파일경로 dir 하드코딩 수정 2026-02-26 11:34:02 +09:00
2508f59a72 운영환경일때 ai팀경로수정 2026-02-26 10:36:10 +09:00
f2307ff0f4 운영환경일때 ai팀경로수정 2026-02-26 10:29:10 +09:00
6f44319d33 운영환경일때 ai팀경로수정 2026-02-26 10:24:29 +09:00
cefacb291b develop pull 2026-02-26 09:54:43 +09:00
744cbb55a9 Merge branch 'develop' of https://kamco.git.gs.dabeeo.com/MVPTeam/kamco-cd-api into feat/infer_dev_260211
# Conflicts:
#	src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceSendDto.java
#	src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceRunService.java
2026-02-26 09:53:11 +09:00
4a120ae5fd 운영환경일때 ai팀경로수정 2026-02-26 09:23:00 +09:00
7c200b057a 운영환경일때 ai팀경로수정 2026-02-26 08:36:53 +09:00
8ac0a00311 운영환경일때 ai팀경로수정 2026-02-26 08:33:53 +09:00
4863091406 추론 run 수정 2026-02-25 22:50:38 +09:00
70c28e0b54 추론 run 수정 2026-02-25 22:40:30 +09:00
9197819340 추론 run 수정 2026-02-25 22:39:49 +09:00
f2500c33e6 Merge remote-tracking branch 'origin/feat/infer_dev_260211' into feat/infer_dev_260211 2026-02-25 22:30:46 +09:00
18dc831b05 추론 run 수정 2026-02-25 22:30:36 +09:00
48b46035fd Merge branch 'develop' of https://kamco.git.gs.dabeeo.com/MVPTeam/kamco-cd-api into develop 2026-02-25 21:45:44 +09:00
1b9c7faf22 aibabo 2026-02-25 21:45:11 +09:00
fcdba49430 Merge pull request '추론 run 추가' (#98) from feat/infer_dev_260211 into develop
Reviewed-on: #98
2026-02-25 19:15:19 +09:00
7599c99025 추론 run 추가 2026-02-25 19:15:02 +09:00
8fd1948d7c Merge pull request '추론 run 추가' (#97) from feat/infer_dev_260211 into develop
Reviewed-on: #97
2026-02-25 18:44:24 +09:00
2c1047a014 추론 run 추가 2026-02-25 18:44:04 +09:00
8c54e5c176 Merge pull request '추론 run 추가' (#96) from feat/infer_dev_260211 into develop
Reviewed-on: #96
2026-02-25 18:41:52 +09:00
d3faa87d4f 추론 run 추가 2026-02-25 18:39:32 +09:00
8d8d9d7a9f Merge pull request '추론 run 추가' (#95) from feat/infer_dev_260211 into develop
Reviewed-on: #95
2026-02-25 18:03:42 +09:00
9c3d6c01f7 추론 run 추가 2026-02-25 18:02:54 +09:00
02b9a97ee8 Merge pull request 'feat/infer_dev_260211' (#94) from feat/infer_dev_260211 into develop
Reviewed-on: #94
2026-02-25 17:50:15 +09:00
438fb3ec9b Merge remote-tracking branch 'origin/feat/infer_dev_260211' into feat/infer_dev_260211 2026-02-25 17:49:09 +09:00
3105b60759 추론 run 추가 2026-02-25 17:49:00 +09:00
5dddafbe0c inference_hard_coding 2026-02-25 13:34:58 +09:00
c2872c7748 inference_hard_coding 2026-02-25 13:28:26 +09:00
7128eb007e polishing 2026-02-24 20:14:47 +09:00
815ee57e06 Merge branch 'develop' of https://kamco.git.gs.dabeeo.com/MVPTeam/kamco-cd-api into develop 2026-02-24 20:09:36 +09:00
ab52256c05 add cdms nginx infomation for production 2026-02-24 20:08:21 +09:00
3ee3cf8425 Merge pull request '변화탐지 분류 API 보안 해제' (#93) from feat/infer_dev_260211 into develop
Reviewed-on: #93
2026-02-24 19:45:55 +09:00
ba11e4c801 변화탐지 분류 API 보안 해제 2026-02-24 19:45:36 +09:00
14248b29e7 Merge pull request '영상관리 자동추론제외 로직 수정' (#92) from feat/infer_dev_260211 into develop
Reviewed-on: #92
2026-02-24 18:21:23 +09:00
e95bea7d29 영상관리 자동추론제외 로직 수정 2026-02-24 18:21:00 +09:00
a4c3fc5185 Merge pull request 'feat/infer_dev_260211' (#91) from feat/infer_dev_260211 into develop
Reviewed-on: #91
2026-02-24 18:00:55 +09:00
d391a73197 년도 타일 url 수정 2026-02-24 18:00:32 +09:00
fdbda7d945 스케줄 어노테이션 주석, 로컬에서도 실행되도록 수정 2026-02-24 15:32:52 +09:00
d36703fd84 Merge pull request '스케줄링 수동 호출, 영상관리 싱크 자동추론제외 수정' (#90) from feat/infer_dev_260211 into develop
Reviewed-on: #90
2026-02-24 15:06:44 +09:00
9ffab423c8 스케줄링 수동 호출, 영상관리 싱크 자동추론제외 수정 2026-02-24 15:05:59 +09:00
496f9c562d Merge pull request '선택 폴리곤조회 api 수정' (#89) from feat/infer_dev_260211 into develop
Reviewed-on: #89
2026-02-24 14:57:26 +09:00
2720cc3766 선택 폴리곤조회 api 수정 2026-02-24 14:56:56 +09:00
72778d6996 Merge pull request '회차 Uid로 Uuid 조회 api 추가' (#88) from feat/infer_dev_260211 into develop
Reviewed-on: #88
2026-02-24 12:37:19 +09:00
514b07356e 회차 Uid로 Uuid 조회 api 추가 2026-02-24 12:37:01 +09:00
85834f2221 Merge pull request '선택 폴리곤, 포인트 정보 api 추가' (#87) from feat/infer_dev_260211 into develop
Reviewed-on: #87
2026-02-24 12:18:25 +09:00
c93d40f3f3 선택 폴리곤, 포인트 정보 api 추가 2026-02-24 12:17:20 +09:00
74e6485930 dd 2026-02-24 11:42:38 +09:00
8cb8632a51 Merge pull request 'feat/infer_dev_260211' (#86) from feat/infer_dev_260211 into develop
Reviewed-on: #86
2026-02-23 19:59:37 +09:00
190ba525d5 Merge remote-tracking branch 'origin/feat/infer_dev_260211' into feat/infer_dev_260211 2026-02-23 19:59:21 +09:00
70e01a2044 변화지도 uuid 조회 기능 추가 2026-02-23 19:59:15 +09:00
fad797eea4 Merge pull request '국유인 배치일 때 reqEpno BATCH로 하기' (#85) from feat/infer_dev_260211 into develop
Reviewed-on: #85
2026-02-23 19:35:44 +09:00
9ee1ec94c0 국유인 배치일 때 reqEpno BATCH로 하기 2026-02-23 19:29:17 +09:00
670cedda59 Merge pull request '추론실행 에러로그 추가' (#84) from feat/infer_dev_260211 into develop
Reviewed-on: #84
2026-02-23 16:36:26 +09:00
3683c193d4 Merge remote-tracking branch 'origin/feat/infer_dev_260211' into feat/infer_dev_260211 2026-02-23 16:35:58 +09:00
a2293ad1ab 추론실행 에러로그 추가 2026-02-23 16:35:52 +09:00
78fe7f013b Merge pull request '파일 목록 한글,공백 조건 주석 처리' (#83) from feat/infer_dev_260211 into develop
Reviewed-on: #83
2026-02-23 15:07:11 +09:00
22c3b28237 파일 목록 한글,공백 조건 주석 처리 2026-02-23 15:06:22 +09:00
48fa13615e Merge pull request '라벨링 추가할당 API 추가, 라벨링툴 목록 도엽순으로 소팅' (#82) from feat/infer_dev_260211 into develop
Reviewed-on: #82
2026-02-20 18:25:18 +09:00
8d7ddc4c33 라벨링 추가할당 API 추가, 라벨링툴 목록 도엽순으로 소팅 2026-02-20 18:24:40 +09:00
1f9d6861a0 Merge pull request 'M->G 변환' (#81) from feat/infer_dev_260211 into develop
Reviewed-on: #81
2026-02-20 12:19:10 +09:00
b859a56ab0 Merge remote-tracking branch 'origin/feat/infer_dev_260211' into feat/infer_dev_260211 2026-02-20 12:18:51 +09:00
84b2149f78 M->G 변환 2026-02-20 12:18:20 +09:00
4b04fb64ec Merge branch 'develop' of https://kamco.git.gs.dabeeo.com/MVPTeam/kamco-cd-api into develop 2026-02-20 11:25:49 +09:00
df0c689243 dd 2026-02-20 11:25:06 +09:00
827f701186 Merge pull request 'feat/infer_dev_260211' (#80) from feat/infer_dev_260211 into develop
Reviewed-on: #80
2026-02-20 11:12:25 +09:00
db897268de 비밀번호 변경 API 시큐리티 로직 수정 2026-02-20 11:11:39 +09:00
4dc5c196ca 추론실행 변경 2026-02-20 09:46:47 +09:00
ea74203667 add log 2026-02-19 22:18:36 +09:00
9421df2b9b dd 2026-02-19 22:17:55 +09:00
2a3bf9852d Merge pull request '모델 M1,M2,M3를 G1,G2,G3 으로 변경(추론실행 포함)' (#79) from feat/infer_dev_260211 into develop
Reviewed-on: #79
2026-02-19 16:08:33 +09:00
3f1bb8f082 모델 M1,M2,M3를 G1,G2,G3 으로 변경(추론실행 포함) 2026-02-19 16:07:59 +09:00
21ac562fd5 Merge pull request '모델타입 M -> G로 수정 다시 원복' (#78) from feat/infer_dev_260211 into develop
Reviewed-on: #78
2026-02-18 16:56:53 +09:00
778e87383c 모델타입 M -> G로 수정 다시 원복 2026-02-18 16:56:37 +09:00
aac8c91cd0 Merge pull request '모델타입 M -> G로 수정' (#77) from feat/infer_dev_260211 into develop
Reviewed-on: #77
2026-02-18 16:49:27 +09:00
38c4fbf4e5 모델타입 M -> G로 수정 2026-02-18 16:48:56 +09:00
b8fc314bff Merge pull request '라벨링툴 count 종료된 회차는 count하지 않게 조건 추가' (#76) from feat/infer_dev_260206 into develop
Reviewed-on: #76
2026-02-12 18:28:38 +09:00
a2bb1b2442 라벨링툴 count 종료된 회차는 count하지 않게 조건 추가 2026-02-12 18:03:34 +09:00
4e2e5c0b1d Merge pull request 'open-in-view: false' (#75) from feat/infer_dev_260211 into develop
Reviewed-on: #75
2026-02-11 16:33:50 +09:00
fd1ba1ef3b 대용량 다운로드 테스트 2026-02-11 16:33:21 +09:00
6b65dbdc75 Merge pull request 'feat/infer_dev_260211' (#74) from feat/infer_dev_260211 into develop
Reviewed-on: #74
2026-02-11 16:23:42 +09:00
2d2b55efcd 대용량 다운로드 테스트 2026-02-11 16:23:24 +09:00
ac13f36663 대용량 다운로드 테스트 2026-02-11 15:55:27 +09:00
82f08c4240 Merge pull request '대용량 다운로드 테스트' (#73) from feat/infer_dev_260211 into develop
Reviewed-on: #73
2026-02-11 15:52:37 +09:00
e15b35943b 대용량 다운로드 테스트 2026-02-11 15:52:19 +09:00
8bdccfdce6 Merge pull request '대용량 다운로드 테스트' (#72) from feat/infer_dev_260211 into develop
Reviewed-on: #72
2026-02-11 15:34:38 +09:00
e209eeb826 대용량 다운로드 테스트 2026-02-11 15:34:10 +09:00
3aca011104 Merge pull request '대용량 다운로드 테스트' (#71) from feat/infer_dev_260211 into develop
Reviewed-on: #71
2026-02-11 15:18:04 +09:00
2c320194b4 대용량 다운로드 테스트 2026-02-11 15:17:43 +09:00
3f6737706a Merge pull request '대용량 다운로드 수정' (#70) from feat/infer_dev_260211 into develop
Reviewed-on: #70
2026-02-11 13:54:24 +09:00
0df7d7c5cf 대용량 다운로드 수정 2026-02-11 13:54:10 +09:00
3724528ea9 Merge pull request '대용량 다운로드 수정' (#69) from feat/infer_dev_260211 into develop
Reviewed-on: #69
2026-02-11 13:46:43 +09:00
9885c19b50 대용량 다운로드 수정 2026-02-11 13:46:28 +09:00
079a899822 Merge pull request '대용량 다운로드 수정' (#68) from feat/infer_dev_260211 into develop
Reviewed-on: #68
2026-02-11 12:35:04 +09:00
5b09b2e29a 대용량 다운로드 수정 2026-02-11 12:34:51 +09:00
58a73de9ab Merge pull request '대용량 다운로드 테스트 html 추가' (#67) from feat/infer_dev_260211 into develop
Reviewed-on: #67
2026-02-11 12:16:52 +09:00
4cbd2b8d76 대용량 다운로드 테스트 html 추가 2026-02-11 12:16:32 +09:00
f4a890bec8 Merge pull request '대용량 다운로드 테스트 html 추가' (#66) from feat/infer_dev_260211 into develop
Reviewed-on: #66
2026-02-11 12:01:47 +09:00
89504e4156 대용량 다운로드 테스트 html 추가 2026-02-11 12:01:18 +09:00
783609b015 Merge pull request '대용량 다운로드 타임아웃 설정' (#65) from feat/infer_dev_260211 into develop
Reviewed-on: #65
2026-02-11 11:47:02 +09:00
5d33190c31 대용량 다운로드 타임아웃 설정 2026-02-11 11:46:34 +09:00
92232e13f1 Merge pull request '라벨 다운로드 수정' (#64) from feat/infer_dev_260211 into develop
Reviewed-on: #64
2026-02-11 11:37:59 +09:00
81b0b55d57 라벨 다운로드 수정 2026-02-11 11:37:36 +09:00
83ef7e36ed shp 생성 profile 파라미터 추가 2026-02-11 10:46:02 +09:00
0d13e6989f Merge pull request '라벨링 다운로드 경로 추가' (#63) from feat/infer_dev_260206 into develop
Reviewed-on: #63
2026-02-11 09:57:36 +09:00
80b037a9cb 라벨링 다운로드 경로 추가 2026-02-11 09:57:02 +09:00
4342df9bf5 대용량 다운로드, 라벨링 다운로드 이력 기능 추가
Reviewed-on: #62
2026-02-11 09:54:57 +09:00
8f9585b516 dd 2026-02-11 06:54:46 +09:00
43b5a79031 Merge pull request 'change prod properties' (#61) from feat/dean/change_point into develop
Reviewed-on: #61
2026-02-11 06:33:02 +09:00
3ba3b05f2f change prod properties 2026-02-11 06:29:30 +09:00
298b90a289 라벨 다운로드 확인 API 추가 2026-02-10 16:50:02 +09:00
985e1789d2 파일다운로드 변경, 파일다운로드 로그 저장 변경, 라벨 다운로드 이력 추가, 라벨 다운로드 추가 2026-02-10 11:20:16 +09:00
fffc2efd96 change prod properties 2026-02-09 22:42:23 +09:00
2d86fab030 라벨링툴 > 검수자 상세 라벨러 이름 조건 수정, 이노팸 object DTO 주석 추가 2026-02-09 12:29:35 +09:00
82e3250fd4 Merge pull request '라벨링툴 탐지분류 명칭 추가' (#60) from feat/infer_dev_260206 into develop
Reviewed-on: #60
2026-02-06 16:46:28 +09:00
cf6b1323d8 라벨링툴 탐지분류 명칭 추가 2026-02-06 16:46:04 +09:00
470f2191b7 Merge pull request '국유인 배치 수정, 라벨링툴 적합여부 수정' (#59) from feat/infer_dev_260206 into develop
Reviewed-on: #59
2026-02-06 16:33:56 +09:00
5377294e6e 국유인 배치 수정, 라벨링툴 적합여부 수정 2026-02-06 16:32:46 +09:00
c127531412 Merge pull request '레이어명 추가' (#58) from feat/infer_dev_260206 into develop
Reviewed-on: #58
2026-02-06 16:00:27 +09:00
4e3e2a0181 레이어명 추가 2026-02-06 15:59:56 +09:00
61cfd8240a Merge pull request '스웨거 로그인 수정' (#57) from feat/infer_dev_260206 into develop
Reviewed-on: #57
2026-02-06 14:54:29 +09:00
57a2ec8367 스웨거 로그인 수정 2026-02-06 14:53:54 +09:00
54b6712273 Merge pull request '국유인 등록 로직 순서 변경' (#56) from feat/infer_dev_260206 into develop
Reviewed-on: #56
2026-02-06 14:25:51 +09:00
fe6edbb19f 국유인 등록 로직 순서 변경 2026-02-06 14:25:16 +09:00
b2141e98c0 Merge pull request '국유인 실태조사 적합여부 업데이트 로직 수정, 라벨링 건수 조건 수정' (#55) from feat/infer_dev_260206 into develop
Reviewed-on: #55
2026-02-06 11:12:56 +09:00
0e45adc52e 국유인 실태조사 적합여부 업데이트 로직 수정, 라벨링 건수 조건 수정 2026-02-06 11:12:15 +09:00
581b8c968e Merge pull request '국유인 수정, 라벨대상 건수 수정' (#54) from feat/infer_dev_260107 into develop
Reviewed-on: #54
2026-02-06 09:53:01 +09:00
bdce18119f Merge pull request '학습데이터 라벨링 현황 건수 조건 수정, 라벨러, 검수자 목록 수정' (#53) from feat/infer_dev_260107 into develop
Reviewed-on: #53
2026-02-05 18:01:45 +09:00
3b5536a57e Merge pull request '국유인 연동 경로 확인 TEST' (#52) from feat/infer_dev_260107 into develop
Reviewed-on: #52
2026-02-05 17:00:37 +09:00
9dd03f3c52 Merge pull request '국유인 API 수정 추가' (#51) from feat/infer_dev_260107 into develop
Reviewed-on: #51
2026-02-05 15:10:34 +09:00
796591eca6 Merge pull request '국유인 API 수정 추가' (#50) from feat/infer_dev_260107 into develop
Reviewed-on: #50
2026-02-05 15:03:06 +09:00
825e393e05 Merge pull request '국유인 API 수정 추가' (#49) from feat/infer_dev_260107 into develop
Reviewed-on: #49
2026-02-05 14:58:50 +09:00
1410333829 Merge pull request '국유인 API 수정' (#48) from feat/infer_dev_260107 into develop
Reviewed-on: #48
2026-02-05 14:55:43 +09:00
d63980476f Merge pull request '라벨링 할당 수정' (#47) from feat/infer_dev_260107 into develop
Reviewed-on: #47
2026-02-05 13:56:01 +09:00
ae1693a33c Merge pull request '라벨링 가능 건수 조건 수정' (#46) from feat/infer_dev_260107 into develop
Reviewed-on: #46
2026-02-05 13:49:44 +09:00
8dfae65bcc Merge pull request 'feat/infer_dev_260107' (#45) from feat/infer_dev_260107 into develop
Reviewed-on: #45
2026-02-04 17:56:37 +09:00
872df11844 Merge pull request '국유in 등록 로그 추가' (#44) from feat/infer_dev_260107 into develop
Reviewed-on: #44
2026-02-04 11:10:55 +09:00
f992bbe9ca Merge pull request '국유인, 라벨링 job 각각 분리 작업' (#43) from feat/infer_dev_260107 into develop
Reviewed-on: #43
2026-02-02 19:18:15 +09:00
643ea5cf9a Merge pull request 'spotless 적용' (#42) from feat/infer_dev_260107 into develop
Reviewed-on: #42
2026-02-02 17:17:45 +09:00
bc4b2dbac1 Merge pull request 'feat/infer_dev_260107' (#41) from feat/infer_dev_260107 into develop
Reviewed-on: #41
2026-02-02 17:16:21 +09:00
694b2fc31e Merge pull request '인증 예시 아이디 수정' (#40) from feat/infer_dev_260107 into develop
Reviewed-on: #40
2026-02-02 14:01:01 +09:00
fbdda6477c Merge pull request '사번 자리수 6자리 벨리데이션 수정' (#39) from feat/infer_dev_260107 into develop
Reviewed-on: #39
2026-02-02 13:37:13 +09:00
a572089dff Merge pull request '변화지도 레이어 조회 url 수정' (#38) from feat/infer_dev_260107 into develop
Reviewed-on: #38
2026-02-02 12:22:50 +09:00
c6abf7a935 Merge pull request 'feat/infer_dev_260107' (#37) from feat/infer_dev_260107 into develop
Reviewed-on: #37
2026-02-02 12:18:09 +09:00
a9348d9a66 Merge pull request '타일 url 시큐리티 추가' (#36) from feat/infer_dev_260107 into develop
Reviewed-on: #36
2026-02-02 10:31:59 +09:00
b877d2a8c9 Merge pull request 'feat/infer_dev_260107' (#35) from feat/infer_dev_260107 into develop
Reviewed-on: #35
2026-02-02 10:28:28 +09:00
151012ea28 Merge pull request 'feat/infer_dev_260107' (#34) from feat/infer_dev_260107 into develop
Reviewed-on: #34
2026-02-02 10:14:44 +09:00
68c68082cf Merge branch 'develop' of ssh://192.168.2.126:2222/MVPTeam/kamco-cd-api into develop 2026-02-01 22:27:49 +09:00
4ce96b72aa prod 2026-02-01 22:27:20 +09:00
0a5c5dfd7d Merge pull request 'crs 타입 수정하기' (#32) from feat/infer_dev_260107 into develop
Reviewed-on: #32
2026-01-30 21:43:05 +09:00
7442e4ee09 Merge pull request 'feat/infer_dev_260107' (#31) from feat/infer_dev_260107 into develop
Reviewed-on: #31
2026-01-30 21:32:17 +09:00
d278baed96 Merge pull request 'feat/infer_dev_260107' (#30) from feat/infer_dev_260107 into develop
Reviewed-on: #30
2026-01-30 21:05:04 +09:00
6b0074316f Merge pull request 'feat/infer_dev_260107' (#29) from feat/infer_dev_260107 into develop
Reviewed-on: #29
2026-01-30 19:44:42 +09:00
f921ef5d0d Merge pull request 'geoserver url 변경' (#28) from feat/infer_dev_260107 into develop
Reviewed-on: #28
2026-01-30 19:16:14 +09:00
7667620395 Merge pull request 'bbox 5186 transform' (#27) from feat/infer_dev_260107 into develop
Reviewed-on: #27
2026-01-30 18:20:03 +09:00
527acc9839 Merge pull request 'bbox 5186 으로 변환' (#26) from feat/infer_dev_260107 into develop
Reviewed-on: #26
2026-01-30 18:09:46 +09:00
407f14d230 Merge pull request 'wmts 수정' (#25) from feat/infer_dev_260107 into develop
Reviewed-on: #25
2026-01-30 17:56:40 +09:00
4a91d61b7d Merge pull request '추론실행 퍼센트 수정' (#24) from feat/infer_dev_260107 into develop
Reviewed-on: #24
2026-01-30 17:49:40 +09:00
9d7bbc1b63 Merge pull request 'wmts 수정, 국유인 어제완료된 라벨전송, 전송완료된 리스트 기능 추가' (#23) from feat/infer_dev_260107 into develop
Reviewed-on: #23
2026-01-30 17:44:26 +09:00
f46ea62761 Merge pull request 'RestTemplateConfig 수정' (#22) from feat/infer_dev_260107 into develop
Reviewed-on: #22
2026-01-30 17:21:11 +09:00
1abc0b93c0 Merge pull request 'RestTemplateConfig 수정, wmts 수정' (#21) from feat/infer_dev_260107 into develop
Reviewed-on: #21
2026-01-30 17:13:30 +09:00
4204e48d88 Merge pull request 'feat/infer_dev_260107' (#20) from feat/infer_dev_260107 into develop
Reviewed-on: #20
2026-01-30 16:53:04 +09:00
fa41d41739 Merge pull request 'RestTemplateConfig 수정, 추론실행 수정' (#19) from feat/infer_dev_260107 into develop
Reviewed-on: #19
2026-01-30 16:19:02 +09:00
ee28edd9d0 Merge pull request 'RestTemplateConfig 수정' (#18) from feat/infer_dev_260107 into develop
Reviewed-on: #18
2026-01-30 14:44:05 +09:00
8555897b77 Merge pull request 'feat/infer_dev_260107' (#17) from feat/infer_dev_260107 into develop
Reviewed-on: #17
2026-01-30 14:19:34 +09:00
fe7b1ed0bd Merge pull request '레이어관리 수정' (#16) from feat/infer_dev_260107 into develop
Reviewed-on: #16
2026-01-30 11:54:34 +09:00
064c02e21b Merge pull request '년도 1개만 조회하는 타일 API' (#15) from feat/infer_dev_260107 into develop
Reviewed-on: #15
2026-01-30 11:17:59 +09:00
fd3499a5ec Merge pull request 'feat/infer_dev_260107' (#14) from feat/infer_dev_260107 into develop
Reviewed-on: #14
2026-01-30 11:05:30 +09:00
686cf03524 Merge pull request 'geoserver 등록 수정' (#13) from feat/infer_dev_260107 into develop
Reviewed-on: #13
2026-01-30 10:23:20 +09:00
ee9914a5f3 Merge pull request 'geoserver 등록 수정' (#12) from feat/infer_dev_260107 into develop
Reviewed-on: #12
2026-01-30 10:21:25 +09:00
b3e90c9f2b Merge pull request 'feat/infer_dev_260107' (#11) from feat/infer_dev_260107 into develop
Reviewed-on: #11
2026-01-30 09:50:59 +09:00
156b7a312d Merge pull request '레이어관리 - 변화지도,라벨링툴 맵 리스트' (#10) from feat/infer_dev_260107 into develop
Reviewed-on: #10
2026-01-29 20:56:34 +09:00
cfed31656a Merge pull request 'ai 주소 변경' (#9) from feat/infer_dev_260107 into develop
Reviewed-on: #9
2026-01-29 20:12:17 +09:00
14e8a6476f Merge pull request '영상관리 등록 년도 tile 추가' (#8) from feat/infer_dev_260107 into develop
Reviewed-on: #8
2026-01-29 19:03:39 +09:00
ae6de0c030 Merge pull request 'ai 주소 변경' (#7) from feat/infer_dev_260107 into develop
Reviewed-on: #7
2026-01-29 17:34:32 +09:00
4036f88296 Merge pull request 'feat/infer_dev_260107' (#6) from feat/infer_dev_260107 into develop
Reviewed-on: #6
2026-01-29 16:25:08 +09:00
28718c4218 Merge pull request 'feat/infer_dev_260107' (#5) from feat/infer_dev_260107 into develop
Reviewed-on: #5
2026-01-29 14:42:55 +09:00
54c92842d4 Merge pull request '헬스체크 시큐리티 설정 추가' (#4) from feat/infer_dev_260107 into develop
Reviewed-on: #4
2026-01-29 12:30:48 +09:00
c83c540dfb Merge pull request 'feat/infer_dev_260107' (#3) from feat/infer_dev_260107 into develop
Reviewed-on: #3
2026-01-29 12:16:10 +09:00
dd1284f5c0 Merge pull request '추론실행 수정' (#2) from feat/infer_dev_260107 into develop
Reviewed-on: #2
2026-01-29 10:35:15 +09:00
385ada3291 Merge pull request 'feat/infer_dev_260107' (#1) from feat/infer_dev_260107 into develop
Reviewed-on: #1
2026-01-29 10:31:31 +09:00
165 changed files with 6436 additions and 1600 deletions

6
.gitignore vendored
View File

@@ -60,6 +60,7 @@ Thumbs.db
.env.*.local .env.*.local
application-local.yml application-local.yml
application-secret.yml application-secret.yml
metrics-collector/.env
### Docker (local testing) ### ### Docker (local testing) ###
.dockerignore .dockerignore
@@ -72,3 +73,8 @@ docker-compose.override.yml
*.swo *.swo
*~ *~
!/CLAUDE.md !/CLAUDE.md
### Metrics Collector ###
metrics-collector/venv/
metrics-collector/*.pid
metrics-collector/wheels/

29
Dockerfile-prod Normal file
View File

@@ -0,0 +1,29 @@
# Stage 1: Build stage (gradle build는 Jenkins에서 이미 수행)
FROM eclipse-temurin:21-jre-jammy
# GDAL 설치
RUN apt-get update && apt-get install -y \
gdal-bin \
libgdal-dev \
&& rm -rf /var/lib/apt/lists/*
ARG UID=1000
ARG GID=1000
RUN groupadd -g ${GID} kcomu \
&& useradd -u ${UID} -g ${GID} -m kcomu
USER kcomu
# 작업 디렉토리 설정
WORKDIR /app
# JAR 파일 복사 (Jenkins에서 빌드된 ROOT.jar)
COPY build/libs/ROOT.jar app.jar
# 포트 노출
EXPOSE 8080
# 애플리케이션 실행
# dev 프로파일로 실행
ENTRYPOINT ["java", "-jar", "-Dspring.profiles.active=prod", "app.jar"]

23
Dockerfile-prod_bak Normal file
View File

@@ -0,0 +1,23 @@
# Stage 1: Build stage (gradle build는 Jenkins에서 이미 수행)
FROM kamco-java-gdal:21
ARG UID=1000
ARG GID=1000
RUN groupadd -g ${GID} kcomu \
&& useradd -u ${UID} -g ${GID} -m kcomu
USER kcomu
# 작업 디렉토리 설정
WORKDIR /app
# JAR 파일 복사 (Jenkins에서 빌드된 ROOT.jar)
COPY build/libs/ROOT.jar app.jar
# 포트 노출
EXPOSE 8080
# 애플리케이션 실행
# dev 프로파일로 실행
ENTRYPOINT ["java", "-jar", "-Dspring.profiles.active=prod", "app.jar"]

View File

@@ -15,11 +15,7 @@ services:
- SPRING_PROFILES_ACTIVE=dev - SPRING_PROFILES_ACTIVE=dev
- TZ=Asia/Seoul - TZ=Asia/Seoul
volumes: volumes:
- /mnt/nfs_share/images:/app/original-images - /data:/kamco-nfs
- /mnt/nfs_share/model_output:/app/model-outputs
- /mnt/nfs_share/train_dataset:/app/train-dataset
- /mnt/nfs_share/tmp:/app/tmp
- /kamco-nfs:/kamco-nfs
networks: networks:
- kamco-cds - kamco-cds
restart: unless-stopped restart: unless-stopped

44
docker-compose-prod.yml Normal file
View File

@@ -0,0 +1,44 @@
services:
nginx:
image: nginx:alpine
container_name: kamco-cd-api-nginx
ports:
- "12013:443"
volumes:
- ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
- ./nginx/conf.d:/etc/nginx/conf.d:ro
- /etc/ssl/certs/globalsign:/etc/ssl/certs/globalsign:ro
networks:
- kamco-cds
restart: unless-stopped
depends_on:
- kamco-cd-api
healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost/health"]
interval: 10s
timeout: 5s
retries: 5
start_period: 10s
kamco-cd-api:
image: kamco-api-app:260219
container_name: kamco-cd-api
user: "1000:1000"
environment:
- SPRING_PROFILES_ACTIVE=prod
- TZ=Asia/Seoul
volumes:
- /data:/kamco-nfs
networks:
- kamco-cds
restart: unless-stopped
healthcheck:
test: [ "CMD", "curl", "-f", "http://localhost:8080/monitor/health" ]
interval: 10s
timeout: 5s
retries: 5
start_period: 40s
networks:
kamco-cds:
external: true

47
inference-table-index.sh Normal file
View File

@@ -0,0 +1,47 @@
#!/bin/bash
#############################################
# PostgreSQL INDEX CREATE SCRIPT
# 에러 발생해도 계속 진행
#############################################
# set -e 제거 (중단 안함)
# ===== 환경변수 체크 =====
if [ -z "$DB_HOST" ] || [ -z "$DB_PORT" ] || [ -z "$DB_NAME" ] || [ -z "$DB_USER" ]; then
echo "DB 환경변수가 설정되지 않았습니다."
exit 1
fi
echo "========================================"
echo "START INDEX CREATE"
echo "TIME: $(date)"
echo "========================================"
run_index() {
echo "----------------------------------------"
echo "Running: $1"
psql -h $DB_HOST -p $DB_PORT -U $DB_USER -d $DB_NAME -c "$1"
echo "----------------------------------------"
}
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_files_uid_ext_state
ON tb_map_sheet_mng_files (hst_uid, file_ext, file_state);"
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_mng_files_hstuid_ext
ON tb_map_sheet_mng_files (hst_uid, file_ext);"
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_inkx_no_use
ON tb_map_inkx_5k (mapidcd_no, use_inference);"
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_inkx5k_mapidcd
ON tb_map_inkx_5k (mapidcd_no);"
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_hst_exec_sheetnum_yyyy_desc
ON tb_map_sheet_mng_hst (map_sheet_num, mng_yyyy DESC);"
echo "========================================"
echo "END INDEX CREATE"
echo "TIME: $(date)"
echo "========================================"
echo "모든 인덱스 시도 완료"

122
nginx/README.md Normal file
View File

@@ -0,0 +1,122 @@
# Nginx HTTPS Configuration for KAMCO Change Detection API
## SSL Certificate Setup
### Required Files
GlobalSign SSL 인증서 파일들을 서버의 `/etc/ssl/certs/globalsign/` 디렉토리에 배치해야 합니다:
```
/etc/ssl/certs/globalsign/
├── certificate.crt # SSL 인증서 파일
├── private.key # 개인 키 파일
└── ca-bundle.crt # CA 번들 파일 (중간 인증서)
```
### Certificate Installation Steps
1. **디렉토리 생성**
```bash
sudo mkdir -p /etc/ssl/certs/globalsign
sudo chmod 755 /etc/ssl/certs/globalsign
```
2. **인증서 파일 복사**
```bash
sudo cp your-certificate.crt /etc/ssl/certs/globalsign/certificate.crt
sudo cp your-private.key /etc/ssl/certs/globalsign/private.key
sudo cp ca-bundle.crt /etc/ssl/certs/globalsign/ca-bundle.crt
```
3. **파일 권한 설정**
```bash
sudo chmod 644 /etc/ssl/certs/globalsign/certificate.crt
sudo chmod 600 /etc/ssl/certs/globalsign/private.key
sudo chmod 644 /etc/ssl/certs/globalsign/ca-bundle.crt
```
## Configuration Overview
### Service Architecture
```
Internet (HTTPS:12013)
nginx (443 in container)
kamco-changedetection-api (8080 in container)
```
### Key Features
- **HTTPS/TLS**: TLSv1.2, TLSv1.3 지원
- **Port**: 외부 12013 → 내부 443 (nginx)
- **Domain**: aicd-api.e-kamco.com:12013
- **Reverse Proxy**: kamco-changedetection-api:8080으로 프록시
- **Security Headers**: HSTS, X-Frame-Options, X-Content-Type-Options 등
- **Health Check**: /health 엔드포인트
## Deployment
### Start Services
```bash
docker-compose -f docker-compose-prod.yml up -d
```
### Check Logs
```bash
# Nginx logs
docker logs kamco-cd-nginx
# API logs
docker logs kamco-changedetection-api
```
### Verify Configuration
```bash
# Test nginx configuration
docker exec kamco-cd-nginx nginx -t
# Check SSL certificate
docker exec kamco-cd-nginx openssl s_client -connect localhost:443 -servername aicd-api.e-kamco.com
```
### Access Service
```bash
# HTTPS Access
curl -k https://aicd-api.e-kamco.com:12013/monitor/health
# Health Check
curl -k https://aicd-api.e-kamco.com:12013/health
```
## Troubleshooting
### Certificate Issues
인증서 파일이 제대로 마운트되었는지 확인:
```bash
docker exec kamco-cd-nginx ls -la /etc/ssl/certs/globalsign/
```
### Nginx Configuration Test
```bash
docker exec kamco-cd-nginx nginx -t
```
### Connection Test
```bash
# Check if nginx is listening
docker exec kamco-cd-nginx netstat -tlnp | grep 443
# Check backend connection
docker exec kamco-cd-nginx wget --spider http://kamco-changedetection-api:8080/monitor/health
```
## Configuration Files
- `nginx/nginx.conf`: Main nginx configuration
- `nginx/conf.d/default.conf`: Server block with SSL and proxy settings
- `docker-compose-prod.yml`: Docker compose with nginx service
## Notes
- 인증서 파일명이 다를 경우 `nginx/conf.d/default.conf`에서 경로를 수정하세요
- 인증서 갱신 시 nginx 컨테이너를 재시작하세요: `docker restart kamco-cd-nginx`
- 포트 12013이 방화벽에서 허용되어 있는지 확인하세요

60
nginx/conf.d/default.conf Normal file
View File

@@ -0,0 +1,60 @@
upstream kamco_api {
server kamco-cd-api:8080;
}
server {
listen 443 ssl http2;
server_name aicd-api.e-kamco.com;
# GlobalSign SSL Certificate
ssl_certificate /etc/ssl/certs/globalsign/certificate.crt;
ssl_certificate_key /etc/ssl/certs/globalsign/private.key;
# SSL Configuration
ssl_protocols TLSv1.2 TLSv1.3;
ssl_ciphers HIGH:!aNULL:!MD5;
ssl_prefer_server_ciphers on;
ssl_session_cache shared:SSL:10m;
ssl_session_timeout 10m;
# Security Headers
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
add_header X-Frame-Options "SAMEORIGIN" always;
add_header X-Content-Type-Options "nosniff" always;
add_header X-XSS-Protection "1; mode=block" always;
# Client Body Size
client_max_body_size 100M;
# Proxy Settings
location / {
proxy_pass http://kamco_api;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Port $server_port;
# Timeouts
proxy_connect_timeout 60s;
proxy_send_timeout 60s;
proxy_read_timeout 60s;
# WebSocket Support (if needed)
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
}
# Health Check Endpoint
location /health {
access_log off;
return 200 "OK";
add_header Content-Type text/plain;
}
# Access and Error Logs
access_log /var/log/nginx/kamco-api-access.log;
error_log /var/log/nginx/kamco-api-error.log;
}

33
nginx/nginx.conf Normal file
View File

@@ -0,0 +1,33 @@
user nginx;
worker_processes auto;
error_log /var/log/nginx/error.log warn;
pid /var/run/nginx.pid;
events {
worker_connections 1024;
}
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
gzip on;
gzip_vary on;
gzip_min_length 1024;
gzip_types text/plain text/css text/xml text/javascript application/x-javascript application/xml+rss application/json;
include /etc/nginx/conf.d/*.conf;
}

View File

@@ -18,23 +18,25 @@ import org.springframework.web.filter.OncePerRequestFilter;
@RequiredArgsConstructor @RequiredArgsConstructor
public class JwtAuthenticationFilter extends OncePerRequestFilter { public class JwtAuthenticationFilter extends OncePerRequestFilter {
private final JwtTokenProvider jwtTokenProvider;
private final UserDetailsService userDetailsService;
private static final AntPathMatcher PATH_MATCHER = new AntPathMatcher(); private static final AntPathMatcher PATH_MATCHER = new AntPathMatcher();
// JWT 필터를 타지 않게 할 URL 패턴들
private static final String[] EXCLUDE_PATHS = { private static final String[] EXCLUDE_PATHS = {
"/api/auth/signin", "/api/auth/refresh", "/api/auth/logout", "/api/members/*/password" "/api/auth/signin", "/api/auth/refresh", "/api/auth/logout", "/api/members/*/password"
}; };
private final JwtTokenProvider jwtTokenProvider;
private final UserDetailsService userDetailsService;
@Override @Override
protected void doFilterInternal( protected void doFilterInternal(
HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) HttpServletRequest request, HttpServletResponse response, FilterChain filterChain)
throws ServletException, IOException { throws ServletException, IOException {
// HTTP 요청 헤더에서 JWT 토큰 꺼내기
String token = resolveToken(request); String token = resolveToken(request);
// JWT 토큰을 검증하고, 인증된 사용자로 SecurityContext에 등록
if (token != null && jwtTokenProvider.isValidToken(token)) { if (token != null && jwtTokenProvider.isValidToken(token)) {
String username = jwtTokenProvider.getSubject(token); String username = jwtTokenProvider.getSubject(token);
UserDetails userDetails = userDetailsService.loadUserByUsername(username); UserDetails userDetails = userDetailsService.loadUserByUsername(username);
UsernamePasswordAuthenticationToken authentication = UsernamePasswordAuthenticationToken authentication =
new UsernamePasswordAuthenticationToken(userDetails, null, userDetails.getAuthorities()); new UsernamePasswordAuthenticationToken(userDetails, null, userDetails.getAuthorities());
@@ -57,7 +59,7 @@ public class JwtAuthenticationFilter extends OncePerRequestFilter {
return false; return false;
} }
// /api/members/{memberId}/password // HTTP 요청 헤더에서 JWT 토큰 꺼내기
private String resolveToken(HttpServletRequest request) { private String resolveToken(HttpServletRequest request) {
String bearer = request.getHeader("Authorization"); String bearer = request.getHeader("Authorization");
if (bearer != null && bearer.startsWith("Bearer ")) { if (bearer != null && bearer.startsWith("Bearer ")) {

View File

@@ -8,10 +8,13 @@ import jakarta.annotation.PostConstruct;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Date; import java.util.Date;
import javax.crypto.SecretKey; import javax.crypto.SecretKey;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
/** 토큰 생성 */
@Component @Component
@Log4j2
public class JwtTokenProvider { public class JwtTokenProvider {
@Value("${jwt.secret}") @Value("${jwt.secret}")
@@ -31,11 +34,15 @@ public class JwtTokenProvider {
this.key = Keys.hmacShaKeyFor(secret.getBytes(StandardCharsets.UTF_8)); this.key = Keys.hmacShaKeyFor(secret.getBytes(StandardCharsets.UTF_8));
} }
// Access Token 생성
public String createAccessToken(String subject) { public String createAccessToken(String subject) {
log.info("TOKEN VALIDITY = {}", accessTokenValidityInMs);
return createToken(subject, accessTokenValidityInMs); return createToken(subject, accessTokenValidityInMs);
} }
// Refresh Token 생성
public String createRefreshToken(String subject) { public String createRefreshToken(String subject) {
log.info("REFRESH TOKEN VALIDITY = {}", refreshTokenValidityInMs);
return createToken(subject, refreshTokenValidityInMs); return createToken(subject, refreshTokenValidityInMs);
} }

View File

@@ -58,6 +58,7 @@ public class MenuAuthorizationManager implements AuthorizationManager<RequestAut
for (MenuEntity menu : allowedMenus) { for (MenuEntity menu : allowedMenus) {
String baseUri = menu.getMenuUrl(); String baseUri = menu.getMenuUrl();
if (baseUri == null || baseUri.isBlank()) { if (baseUri == null || baseUri.isBlank()) {
continue; continue;
} }

View File

@@ -6,6 +6,7 @@ import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.data.redis.core.ValueOperations; import org.springframework.data.redis.core.ValueOperations;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
/** redis token handler */
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
public class RefreshTokenService { public class RefreshTokenService {
@@ -13,16 +14,33 @@ public class RefreshTokenService {
private final StringRedisTemplate redisTemplate; private final StringRedisTemplate redisTemplate;
private static final String PREFIX = "RT:"; private static final String PREFIX = "RT:";
/**
* Refresh Token 저장
*
* @param username 사용자 식별값 (보통 username or userId)
* @param refreshToken 발급된 Refresh Token
* @param ttlMillis 토큰 만료 시간 (밀리초 단위)
*/
public void save(String username, String refreshToken, long ttlMillis) { public void save(String username, String refreshToken, long ttlMillis) {
ValueOperations<String, String> ops = redisTemplate.opsForValue(); ValueOperations<String, String> ops = redisTemplate.opsForValue();
ops.set(PREFIX + username, refreshToken, Duration.ofMillis(ttlMillis)); ops.set(PREFIX + username, refreshToken, Duration.ofMillis(ttlMillis));
} }
/**
* Refresh Token 검증
*
* <p>1. Redis에 저장된 값 조회 2. 클라이언트가 보낸 refreshToken과 비교 3. 동일하면 true
*/
public boolean validate(String username, String refreshToken) { public boolean validate(String username, String refreshToken) {
String stored = redisTemplate.opsForValue().get(PREFIX + username); String stored = redisTemplate.opsForValue().get(PREFIX + username);
return stored != null && stored.equals(refreshToken); return stored != null && stored.equals(refreshToken);
} }
/**
* Refresh Token 삭제
*
* <p>로그아웃 시 호출 Redis에서 해당 사용자 토큰 제거
*/
public void delete(String username) { public void delete(String username) {
redisTemplate.delete(PREFIX + username); redisTemplate.delete(PREFIX + username);
} }

View File

@@ -117,16 +117,18 @@ public class ChangeDetectionApiController {
@Parameter(description = "도곽/일반(MAPSHEET/ADDRESS) 검색 타입", required = true) @Parameter(description = "도곽/일반(MAPSHEET/ADDRESS) 검색 타입", required = true)
@RequestParam(defaultValue = "MAPSHEET") @RequestParam(defaultValue = "MAPSHEET")
DetectSearchType type, DetectSearchType type,
@Parameter(description = "5k/50k 구분(SCALE_5K/SCALE_50K))", required = true) @Parameter(description = "5k/50k 구분(SCALE_5K/SCALE_50K))")
@RequestParam(defaultValue = "SCALE_50K") @RequestParam(defaultValue = "SCALE_50K", required = false)
MapScaleType scale, MapScaleType scale,
@Parameter( @Parameter(
description = "변화탐지 년도(차수) /year-list 의 uuid", description = "변화탐지 년도(차수) /year-list 의 uuid",
example = "8584e8d4-53b3-4582-bde2-28a81495a626") example = "8584e8d4-53b3-4582-bde2-28a81495a626")
UUID uuid, UUID uuid,
@Parameter(description = "도엽번호", example = "34607") @RequestParam String mapSheetNum) { @Parameter(description = "도엽번호", example = "34607") @RequestParam(required = false)
String mapSheetNum,
@Parameter(description = "pnu") @RequestParam(required = false) String pnu) {
return ApiResponseDto.ok( return ApiResponseDto.ok(
changeDetectionService.getChangeDetectionPolygonList(type, scale, uuid, mapSheetNum)); changeDetectionService.getChangeDetectionPolygonList(type, scale, uuid, mapSheetNum, pnu));
} }
@Operation(summary = "변화탐지 결과 Point", description = "변화탐지 결과 Point") @Operation(summary = "변화탐지 결과 Point", description = "변화탐지 결과 Point")
@@ -135,15 +137,66 @@ public class ChangeDetectionApiController {
@Parameter(description = "도곽/일반(MAPSHEET/ADDRESS) 검색 타입", required = true) @Parameter(description = "도곽/일반(MAPSHEET/ADDRESS) 검색 타입", required = true)
@RequestParam(defaultValue = "MAPSHEET") @RequestParam(defaultValue = "MAPSHEET")
DetectSearchType type, DetectSearchType type,
@Parameter(description = "5k/50k 구분(SCALE_5K/SCALE_50K))", required = true) @Parameter(description = "5k/50k 구분(SCALE_5K/SCALE_50K))")
@RequestParam(defaultValue = "SCALE_50K") @RequestParam(defaultValue = "SCALE_50K", required = false)
MapScaleType scale, MapScaleType scale,
@Parameter( @Parameter(
description = "변화탐지 년도(차수) /year-list 의 uuid", description = "변화탐지 년도(차수) /year-list 의 uuid",
example = "8584e8d4-53b3-4582-bde2-28a81495a626") example = "8584e8d4-53b3-4582-bde2-28a81495a626")
UUID uuid, UUID uuid,
@Parameter(description = "도엽번호", example = "34607") @RequestParam String mapSheetNum) { @Parameter(description = "도엽번호", example = "34607") @RequestParam(required = false)
String mapSheetNum,
@Parameter(description = "pnu") @RequestParam(required = false) String pnu) {
return ApiResponseDto.ok( return ApiResponseDto.ok(
changeDetectionService.getChangeDetectionPointList(type, scale, uuid, mapSheetNum)); changeDetectionService.getChangeDetectionPointList(type, scale, uuid, mapSheetNum, pnu));
}
@Operation(summary = "선택 변화탐지 결과 uuid 조회", description = "선택 변화탐지 결과 uuid 조회")
@GetMapping("/selected/uuid")
public ApiResponseDto<UUID> getChnDtctIdUuid(
@Parameter(description = "회차 32자 uid", example = "98ABAA1FC4394F11885C302C19AE5E81")
@RequestParam
String chnDtctId) {
return ApiResponseDto.ok(changeDetectionService.getLearnUuid(chnDtctId));
}
@Operation(summary = "선택 변화탐지 결과 Polygon", description = "선택 변화탐지 결과 Polygon")
@GetMapping("/selected/polygon")
public ApiResponseDto<ChangeDetectionDto.PolygonFeatureList> getCdPolygonList(
@Parameter(description = "회차 32자 uid", example = "98ABAA1FC4394F11885C302C19AE5E81")
@RequestParam
String chnDtctId,
@Parameter(description = "polygon 32자 uid", example = "3B1A7E5F895A4D9698489540EE1BBE1E")
@RequestParam(required = false)
String cdObjectId,
@Parameter(
description = "polygon 32자 uids",
example =
"3B1A7E5F895A4D9698489540EE1BBE1E,3B221A2AF9614647A0903A972D56C574,3B22686A7ACE44FC9CB20F1B4FA6DEFD,3B376D94A183479BB5FBE3D7166E6E1A")
@RequestParam(required = false)
List<String> cdObjectIds,
@Parameter(description = "pnu") @RequestParam(required = false) String pnu) {
return ApiResponseDto.ok(
changeDetectionService.getPolygonListByCd(chnDtctId, cdObjectId, cdObjectIds, pnu));
}
@Operation(summary = "선택 변화탐지 결과 Point", description = "선택 변화탐지 결과 Point")
@GetMapping("/selected/point")
public ApiResponseDto<ChangeDetectionDto.PointFeatureList> getCdPointList(
@Parameter(description = "회차 32자 uid", example = "98ABAA1FC4394F11885C302C19AE5E81")
@RequestParam
String chnDtctId,
@Parameter(description = "polygon 32자 uid", example = "3B1A7E5F895A4D9698489540EE1BBE1E")
@RequestParam(required = false)
String cdObjectId,
@Parameter(
description = "polygon 32자 uids",
example =
"3B1A7E5F895A4D9698489540EE1BBE1E,3B221A2AF9614647A0903A972D56C574,3B22686A7ACE44FC9CB20F1B4FA6DEFD,3B376D94A183479BB5FBE3D7166E6E1A")
@RequestParam(required = false)
List<String> cdObjectIds,
@Parameter(description = "pnu") @RequestParam(required = false) String pnu) {
return ApiResponseDto.ok(
changeDetectionService.getPointListByCd(chnDtctId, cdObjectId, cdObjectIds, pnu));
} }
} }

View File

@@ -1,6 +1,9 @@
package com.kamco.cd.kamcoback.changedetection.dto; package com.kamco.cd.kamcoback.changedetection.dto;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose; import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose;
import com.kamco.cd.kamcoback.common.utils.enums.EnumType; import com.kamco.cd.kamcoback.common.utils.enums.EnumType;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
@@ -143,6 +146,23 @@ public class ChangeDetectionDto {
private String mapSheetNum; private String mapSheetNum;
private String mapSheetName; private String mapSheetName;
private String alias; private String alias;
@JsonIgnore private String bboxStr;
private JsonNode bbox;
public MapSheetList(String mapSheetNum, String mapSheetName, String alias, String bboxStr) {
this.mapSheetNum = mapSheetNum;
this.mapSheetName = mapSheetName;
this.alias = alias;
if (bboxStr != null) {
ObjectMapper mapper = new ObjectMapper();
try {
this.bbox = mapper.readTree(bboxStr);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}
} }
@Schema(name = "PolygonFeatureList", description = "Geometry 리턴 객체") @Schema(name = "PolygonFeatureList", description = "Geometry 리턴 객체")
@@ -197,6 +217,8 @@ public class ChangeDetectionDto {
private Double afterConfidence; // 비교 신뢰도(확률) private Double afterConfidence; // 비교 신뢰도(확률)
private String afterClass; private String afterClass;
private Double cdProb; // 탐지정확도 private Double cdProb; // 탐지정확도
private UUID uuid;
private String resultUid;
} }
@Schema(name = "PointFeature", description = "Geometry 리턴 객체") @Schema(name = "PointFeature", description = "Geometry 리턴 객체")
@@ -250,5 +272,22 @@ public class ChangeDetectionDto {
private Double afterConfidence; // 비교 신뢰도(확률) private Double afterConfidence; // 비교 신뢰도(확률)
private String afterClass; // 비교 분류 private String afterClass; // 비교 분류
private Double cdProb; // 탐지 정확도 private Double cdProb; // 탐지 정확도
private UUID uuid;
private String uid;
}
@Schema(name = "ChangeDetectionMapDto", description = "변화지도 팝업 검색조건")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class ChangeDetectionMapDto {
private Integer compareYyyy;
private Integer targetYyyy;
private String cdObjectId;
private List<String> cdObjectIds;
private String chnDtctId;
private String pnu;
} }
} }

View File

@@ -46,13 +46,13 @@ public class ChangeDetectionService {
} }
public ChangeDetectionDto.PolygonFeatureList getChangeDetectionPolygonList( public ChangeDetectionDto.PolygonFeatureList getChangeDetectionPolygonList(
DetectSearchType type, MapScaleType scale, UUID uuid, String mapSheetNum) { DetectSearchType type, MapScaleType scale, UUID uuid, String mapSheetNum, String pnu) {
switch (type) { switch (type) {
case MAPSHEET -> { case MAPSHEET -> {
return changeDetectionCoreService.getChangeDetectionPolygonList(scale, uuid, mapSheetNum); return changeDetectionCoreService.getChangeDetectionPolygonList(scale, uuid, mapSheetNum);
} }
case ADDRESS -> { case ADDRESS -> {
return new ChangeDetectionDto.PolygonFeatureList(); // TODO: 일반 주소 검색 로직 확인 후 작업 필요 return changeDetectionCoreService.getChangeDetectionPnuPolygonList(uuid, pnu);
} }
default -> throw new IllegalArgumentException("Unsupported type: " + type); default -> throw new IllegalArgumentException("Unsupported type: " + type);
} }
@@ -60,14 +60,14 @@ public class ChangeDetectionService {
// Geometry 객체 순환 참조 문제로 캐싱 불가 // Geometry 객체 순환 참조 문제로 캐싱 불가
public ChangeDetectionDto.PointFeatureList getChangeDetectionPointList( public ChangeDetectionDto.PointFeatureList getChangeDetectionPointList(
DetectSearchType type, MapScaleType scale, UUID uuid, String mapSheetNum) { DetectSearchType type, MapScaleType scale, UUID uuid, String mapSheetNum, String pnu) {
switch (type) { switch (type) {
case MAPSHEET -> { case MAPSHEET -> {
return changeDetectionCoreService.getChangeDetectionPointList(scale, uuid, mapSheetNum); return changeDetectionCoreService.getChangeDetectionPointList(scale, uuid, mapSheetNum);
} }
case ADDRESS -> { case ADDRESS -> {
return new ChangeDetectionDto.PointFeatureList(); // TODO: 일반 주소 검색 로직 확인 후 작업 필요 return changeDetectionCoreService.getChangeDetectionPnuPointList(uuid, pnu);
} }
default -> throw new IllegalArgumentException("Unsupported type: " + type); default -> throw new IllegalArgumentException("Unsupported type: " + type);
} }
@@ -89,4 +89,42 @@ public class ChangeDetectionService {
default -> throw new IllegalArgumentException("Unsupported type: " + type); default -> throw new IllegalArgumentException("Unsupported type: " + type);
} }
} }
/**
* 선택 폴리곤 정보 조회
*
* @param chnDtctId
* @param cdObjectId
* @param cdObjectIds
* @param pnu
* @return
*/
public ChangeDetectionDto.PolygonFeatureList getPolygonListByCd(
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu) {
return changeDetectionCoreService.getPolygonListByCd(chnDtctId, cdObjectId, cdObjectIds, pnu);
}
/**
* 선택 Point 조회
*
* @param chnDtctId
* @param cdObjectId
* @param cdObjectIds
* @param pnu
* @return
*/
public ChangeDetectionDto.PointFeatureList getPointListByCd(
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu) {
return changeDetectionCoreService.getPointListByCd(chnDtctId, cdObjectId, cdObjectIds, pnu);
}
/**
* Learn uuid 조회
*
* @param chnDtctId
* @return uuid
*/
public UUID getLearnUuid(String chnDtctId) {
return changeDetectionCoreService.getLearnUuid(chnDtctId);
}
} }

View File

@@ -0,0 +1,109 @@
package com.kamco.cd.kamcoback.common.download;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.common.download.dto.DownloadAuditEvent;
import com.kamco.cd.kamcoback.menu.dto.MenuDto;
import com.kamco.cd.kamcoback.menu.service.MenuService;
import com.kamco.cd.kamcoback.postgres.entity.AuditLogEntity;
import com.kamco.cd.kamcoback.postgres.entity.MemberEntity;
import com.kamco.cd.kamcoback.postgres.repository.log.AuditLogRepository;
import com.kamco.cd.kamcoback.postgres.repository.members.MembersRepository;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Objects;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.event.EventListener;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
@Slf4j
@Component
@RequiredArgsConstructor
public class DownloadAuditEventListener {
private final AuditLogRepository auditLogRepository;
private final MembersRepository membersRepository;
private final MenuService menuService;
private final ObjectMapper objectMapper;
@Async("auditLogExecutor")
@Transactional(propagation = Propagation.REQUIRES_NEW)
@EventListener
public void onDownloadAudit(DownloadAuditEvent ev) {
try {
String menuUid = resolveMenuUid(ev.normalizedUri());
if (menuUid == null) {
// menuUid null 불가 -> 스킵
log.warn(
"MenuUid not resolved. skip audit. uri={}, normalized={}",
ev.requestUri(),
ev.normalizedUri());
return;
}
Long userId = ev.userId();
if (userId == null) {
// a 링크로 들어온 download는 사번으로 파라미터가 전달 되므로 사번으로 user id 조회 하기
MemberEntity memberEntity =
membersRepository.findByEmployeeNo(ev.employeeNo()).orElse(null);
if (memberEntity == null) {
return; // 매핑 실패 시 로그 저장 안 함
}
userId = memberEntity.getId();
}
AuditLogEntity logEntity =
AuditLogEntity.forFileDownload(
userId, ev.requestUri(), menuUid, ev.ip(), ev.status(), ev.downloadUuid());
auditLogRepository.save(logEntity);
} catch (Exception e) {
// 본 요청과 분리되어야 함
log.warn("Download audit save failed. uri={}, err={}", ev.requestUri(), e.toString());
}
}
private String resolveMenuUid(String normalizedUri) {
try {
List<?> list = menuService.getFindAll();
List<MenuDto.Basic> basics =
list.stream()
.map(
item -> {
if (item instanceof LinkedHashMap<?, ?> map) {
return objectMapper.convertValue(map, MenuDto.Basic.class);
} else if (item instanceof MenuDto.Basic dto) {
return dto;
}
return null;
})
.filter(Objects::nonNull)
.toList();
MenuDto.Basic basic =
basics.stream()
.filter(m -> m.getMenuUrl() != null && normalizedUri.startsWith(m.getMenuUrl()))
.max(Comparator.comparingInt(m -> m.getMenuUrl().length()))
.orElse(null);
if (basic == null) return null;
String menuUidStr = basic.getMenuUid(); // ← String
if (menuUidStr == null || menuUidStr.isBlank()) return null;
return menuUidStr; // ← Long 변환
} catch (Exception e) {
return null;
}
}
}

View File

@@ -0,0 +1,19 @@
package com.kamco.cd.kamcoback.common.download;
import org.springframework.util.AntPathMatcher;
public final class DownloadPaths {
private DownloadPaths() {}
public static final String[] PATTERNS = {
"/api/inference/download/**", "/api/training-data/stage/download/**"
};
public static boolean matches(String uri) {
AntPathMatcher m = new AntPathMatcher();
for (String p : PATTERNS) {
if (m.match(p, uri)) return true;
}
return false;
}
}

View File

@@ -0,0 +1,133 @@
package com.kamco.cd.kamcoback.common.download;
import jakarta.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.ResourceRegion;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpRange;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
/**
* Range(부분 다운로드) 지원 파일 다운로드 응답 생성기.
*
* <p>브라우저/다운로드 매니저가 Range 헤더를 보내면 206 Partial Content로 일부 구간만 내려주고, Range 헤더가 없으면 200 OK로 전체 파일을
* 내려준다.
*
* <p>대용량 ZIP(또는 바이너리) 파일 다운로드 시: - 메모리에 파일 전체를 올리지 않고(Resource/FileSystemResource 스트리밍) -
* 이어받기(Resume) 및 병렬 다운로드(일부 클라이언트) 지원 - 잘못된 Range에 대해 416 Range Not Satisfiable 처리
*/
@Component
public class RangeDownloadResponder {
/**
* ZIP(또는 바이너리) 파일 다운로드 응답을 생성한다.
*
* @param filePath 실제 서버 파일 경로
* @param downloadFileName 사용자에게 노출될 다운로드 파일명
* @param request Range 헤더 확인용 HttpServletRequest
* @return Range 유무에 따라 200(전체) 또는 206(부분) ResponseEntity 반환
* @throws IOException 파일 접근/조회 실패 시
*/
public ResponseEntity<?> buildZipResponse(
Path filePath, String downloadFileName, HttpServletRequest request) throws IOException {
// 1) 파일 존재/정상 파일 여부 확인
// - 일반 파일(regular file)이 아니면 404 반환 (디렉토리/없는 파일/특수 파일 등 방지)
if (!Files.isRegularFile(filePath)) {
return ResponseEntity.notFound().build();
}
// 2) 파일 전체 크기 및 Spring Resource 래핑
// - Files.size: 전체 파일 크기(Content-Range 계산/검증에 필요)
// - FileSystemResource: 스프링이 스트리밍 형태로 파일을 응답 바디로 내려줄 수 있게 함
long totalSize = Files.size(filePath);
Resource resource = new FileSystemResource(filePath);
// 3) 다운로드 강제(Content-Disposition)
// - attachment; filename="xxx.zip" 형태로 브라우저가 저장 대화상자/다운로드로 처리
String disposition = "attachment; filename=\"" + downloadFileName + "\"";
// 4) Range 헤더 조회
// - Range: bytes=0-1023 (일부 구간 요청)
// - Range가 없으면 전체 다운로드로 처리
String rangeHeader = request.getHeader(HttpHeaders.RANGE);
// 5) 공통 헤더(전체/부분 다운로드 공통으로 넣을 것)
// - Content-Type: 바이너리(필요시 application/zip 으로 바꿔도 됨)
// - Content-Disposition: 다운로드 강제
// - Accept-Ranges: bytes -> 서버가 Range(이어받기/부분요청) 지원함을 알림
// - X-Accel-Buffering: no -> Nginx 사용 시 버퍼링 비활성화(스트리밍/대용량에 유리)
ResponseEntity.BodyBuilder base =
ResponseEntity.ok()
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.header(HttpHeaders.CONTENT_DISPOSITION, disposition)
.header(HttpHeaders.ACCEPT_RANGES, "bytes")
.header("X-Accel-Buffering", "no");
// 6) Range 헤더가 없으면 전체 파일 반환 (200 OK)
if (rangeHeader == null || rangeHeader.isBlank()) {
// Content-Length를 전체 크기로 설정하고 Resource를 그대로 바디에 담아 스트리밍
return base.contentLength(totalSize).body(resource);
}
// 7) Range 헤더 파싱
// - 잘못된 Range 헤더 형식이면 parseRanges에서 IllegalArgumentException 발생 가능
// - RFC에 따라 416 응답 + Content-Range: bytes */{total} 형태로 알려줌
List<HttpRange> ranges;
try {
ranges = HttpRange.parseRanges(rangeHeader);
} catch (IllegalArgumentException ex) {
return ResponseEntity.status(416)
.header(HttpHeaders.CONTENT_RANGE, "bytes */" + totalSize)
.header("X-Accel-Buffering", "no")
.build();
}
// 8) 다중 Range 요청이 와도(예: bytes=0-99,200-299) 여기서는 첫 번째 Range만 처리
// - 대부분 브라우저는 단일 Range를 사용
// - 멀티파트/byteranges 처리를 하려면 별도 구현 필요
HttpRange range = ranges.get(0);
// 9) 실제 시작/끝 범위 계산
// - bytes=500- : end를 파일 끝으로 해석
// - bytes=-500 : 마지막 500바이트로 해석
long start = range.getRangeStart(totalSize);
long end = range.getRangeEnd(totalSize);
// 10) 시작점이 파일 크기 이상이면 만족 불가 -> 416
if (start >= totalSize) {
return ResponseEntity.status(416)
.header(HttpHeaders.CONTENT_RANGE, "bytes */" + totalSize)
.header("X-Accel-Buffering", "no")
.build();
}
// 11) 요청 구간 길이 계산
// - end/start는 inclusive이므로 +1 필요
long regionLength = end - start + 1;
// 12) ResourceRegion 생성
// - resource의 start부터 regionLength 만큼만 응답으로 내려줄 수 있게 함
// - 파일 전체를 메모리에 올리지 않고 필요한 부분만 스트리밍
ResourceRegion region = new ResourceRegion(resource, start, regionLength);
// 13) 206 Partial Content로 응답 구성
// - Content-Range: bytes start-end/total
// - Content-Length: regionLength(부분 크기)
return ResponseEntity.status(206)
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.header(HttpHeaders.CONTENT_DISPOSITION, disposition)
.header(HttpHeaders.ACCEPT_RANGES, "bytes")
.header("X-Accel-Buffering", "no")
.header(HttpHeaders.CONTENT_RANGE, "bytes " + start + "-" + end + "/" + totalSize)
.contentLength(regionLength)
.body(region);
}
}

View File

@@ -0,0 +1,12 @@
package com.kamco.cd.kamcoback.common.download.dto;
import java.util.UUID;
public record DownloadAuditEvent(
Long userId,
String employeeNo,
String requestUri,
String normalizedUri,
String ip,
int status,
UUID downloadUuid) {}

View File

@@ -11,7 +11,8 @@ import lombok.Getter;
public enum CrsType implements EnumType { public enum CrsType implements EnumType {
EPSG_3857("Web Mercator, 웹지도 미터(EPSG:900913 동일)"), EPSG_3857("Web Mercator, 웹지도 미터(EPSG:900913 동일)"),
EPSG_4326("WGS84 위경도, GeoJSON/OSM 기본"), EPSG_4326("WGS84 위경도, GeoJSON/OSM 기본"),
EPSG_5186("Korea 2000 중부 TM, 한국 SHP"); EPSG_5186("5186::Korea 2000 중부 TM, 한국 SHP"),
EPSG_5179("5179::Korea 2000 중부 TM, 한국 SHP");
private final String desc; private final String desc;

View File

@@ -13,7 +13,9 @@ public enum LayerType implements EnumType {
TILE("배경지도"), TILE("배경지도"),
GEOJSON("객체데이터"), GEOJSON("객체데이터"),
WMTS("타일레이어"), WMTS("타일레이어"),
WMS("지적도"); WMS("지적도"),
KAMCO_WMS("국유인WMS"),
KAMCO_WMTS("국유인WMTS");
private final String desc; private final String desc;

View File

@@ -27,4 +27,10 @@ public class CustomApiException extends RuntimeException {
this.codeName = errorCode.getCode(); this.codeName = errorCode.getCode();
this.status = errorCode.getStatus(); this.status = errorCode.getStatus();
} }
public CustomApiException(String codeName, HttpStatus status, Throwable cause) {
super(codeName, cause);
this.codeName = codeName;
this.status = status;
}
} }

View File

@@ -0,0 +1,111 @@
package com.kamco.cd.kamcoback.common.inference.service;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient.ExternalCallResult;
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto;
import java.util.List;
import java.util.Map;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@Log4j2
@Transactional(readOnly = true)
@RequiredArgsConstructor
public class InferenceCommonService {
@Value("${spring.profiles.active}")
private String profile;
@Value("${inference.url}")
private String inferenceUrl;
private final ObjectMapper objectMapper;
private final ExternalHttpClient externalHttpClient;
/**
* 추론 AI API 호출 batch id를 리턴
*
* @param dto
* @return
*/
public Long ensureAccepted(InferenceSendDto dto) {
if (dto == null) {
log.warn("not InferenceSendDto dto");
throw new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST);
}
log.info("");
log.info("========================================================");
log.info("[SEND INFERENCE] Inference request dto= {}", dto);
log.info("========================================================");
log.info("");
// 1) 요청 로그
try {
log.debug("Inference request dto={}", objectMapper.writeValueAsString(dto));
} catch (JsonProcessingException e) {
log.warn("Failed to serialize inference dto", e);
}
// 2) local 환경 임시 처리
// if ("local".equals(profile)) {
// if (dto.getPred_requests_areas() == null) {
// throw new IllegalStateException("pred_requests_areas is null");
// }
//
// dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson");
//
// dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson");
// }
// 3) HTTP 호출
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
// 4) 추론 실행 API 호출
ExternalCallResult<String> result =
externalHttpClient.callLong(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
if (result.statusCode() < 200 || result.statusCode() >= 300) {
log.error("Inference API failed. status={}, body={}", result.statusCode(), result.body());
throw new CustomApiException("BAD_GATEWAY", HttpStatus.BAD_GATEWAY);
}
// 5) 응답 파싱
try {
List<Map<String, Object>> list =
objectMapper.readValue(result.body(), new TypeReference<>() {});
if (list.isEmpty()) {
throw new CustomApiException(
"NOT_FOUND", HttpStatus.NOT_FOUND, "Inference response is empty");
}
Object batchIdObj = list.get(0).get("batch_id");
if (batchIdObj == null) {
throw new CustomApiException(
"NOT_FOUND", HttpStatus.NOT_FOUND, "batch_id not found in response");
}
return Long.valueOf(batchIdObj.toString());
} catch (Exception e) {
log.error("Failed to parse inference response. body={}", result.body(), e);
throw new CustomApiException("INVALID_INFERENCE_RESPONSE", HttpStatus.BAD_GATEWAY);
}
}
}

View File

@@ -0,0 +1,274 @@
package com.kamco.cd.kamcoback.common.inference.utils;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.http.HttpStatus;
import org.springframework.web.server.ResponseStatusException;
/**
* GeoJSON 파일의 "features[].properties.scene_id" 값들이 "요청한 도엽번호 목록(requestedMapSheetNums)"과 정확히 일치하는지
* 검증하는 유틸.
*
* <p>핵심 목적: - 요청한 도엽번호를 기반으로 GeoJSON을 생성했는데, 실제 결과 파일에 누락/추가/중복/빈값(scene_id 없음) 등이 발생했는지 빠르게 잡아내기.
*
* <p>검증 실패 시: - 404: 파일 자체가 없음 - 400: 파일이 비어있거나(0 byte), features 구조가 이상하거나, 요청 목록이 비어있음 - 500: 파일
* IO/파싱 자체가 실패(읽기 실패 등) - 422: 정합성(요청 vs 결과)이 맞지 않음 (누락/추가/중복/빈 scene_id 존재)
*/
public class GeoJsonValidator {
/** GeoJSON 파싱용 ObjectMapper (정적 1개로 재사용) */
private static final ObjectMapper om = new ObjectMapper();
/** 로그 출력용 */
private static final Logger log = LogManager.getLogger(GeoJsonValidator.class);
/**
* @param geojsonPath GeoJSON 파일 경로(문자열)
* @param requestedMapSheetNums "요청한 도엽번호" 리스트 (중복/공백/NULL 포함 가능)
* <p>동작 개요: 1) 파일 존재/크기 검증 2) 요청 도엽번호 목록 정리(Trim + 공백 제거 + 중복 제거) 3) GeoJSON 파싱 후 features 배열
* 확보 4) features에서 scene_id 추출하여 유니크 set 구성 5) requested vs found 비교: - missing: requested -
* found - extra : found - requested - duplicates: GeoJSON 내부에서 scene_id 중복 등장 - nullIdCount:
* scene_id가 null/blank 인 feature 개수 6) 이상 있으면 422로 실패 처리
*/
public static void validateWithRequested(String geojsonPath, List<String> requestedMapSheetNums) {
// 문자열 경로를 Path로 변환 (Files API 사용 목적)
Path path = Path.of(geojsonPath);
// =========================================================
// 1) 파일 기본 검증
// - 파일이 존재하는지
// - 파일 크기가 0인지(비어있으면 생성 실패/오류 가능성)
// =========================================================
try {
// 파일 존재 여부 체크 (없으면 404)
if (!Files.exists(path)) {
throw new ResponseStatusException(
HttpStatus.NOT_FOUND, "GeoJSON 파일이 존재하지 않습니다: " + geojsonPath);
}
// 파일 사이즈 체크 (0 byte면 400)
if (Files.size(path) == 0) {
throw new ResponseStatusException(
HttpStatus.BAD_REQUEST, "GeoJSON 파일이 비어있습니다: " + geojsonPath);
}
} catch (IOException e) {
// 파일 사이즈/상태 확인 중 IO 오류면 서버오류로 처리
log.error("GeoJSON 파일 상태 확인 실패: path={}", path, e);
throw new ResponseStatusException(
HttpStatus.INTERNAL_SERVER_ERROR, "GeoJSON 파일 상태 확인 실패: " + geojsonPath, e);
}
// =========================================================
// 2) 요청 도엽 리스트 유효성 검증
// - 요청 목록 자체가 null/empty면 검증할 기준이 없으므로 400
// =========================================================
if (requestedMapSheetNums == null || requestedMapSheetNums.isEmpty()) {
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "requestedMapSheetNums 가 비어있습니다.");
}
// =========================================================
// 2-1) 요청 도엽 Set 정리 (중복/공백/NULL 제거)
// - null 제거
// - trim 적용
// - 빈 문자열 제거
// - LinkedHashSet 사용: "중복 제거 + 원래 입력 순서 유지"
// =========================================================
Set<String> requested =
requestedMapSheetNums.stream()
.filter(Objects::nonNull) // null 제거
.map(String::trim) // 앞뒤 공백 제거
.filter(s -> !s.isEmpty()) // "" 제거
.collect(Collectors.toCollection(LinkedHashSet::new)); // 중복 제거 + 순서 유지
// 정리 결과가 비어있으면(전부 null/공백)이므로 400
if (requested.isEmpty()) {
throw new ResponseStatusException(
HttpStatus.BAD_REQUEST, "requestedMapSheetNums 가 공백/NULL만 포함합니다.");
}
// =========================================================
// 3) GeoJSON 파싱
// 기대 구조:
// {
// "type": "FeatureCollection",
// "features": [ ... ]
// }
//
// - features가 없거나 배열이 아니면 "유효하지 않은 GeoJSON" (400)
// - 파일 읽기/파싱 IO 문제는 500
// - JSON 자체가 깨진 경우는 400
// =========================================================
final JsonNode features;
try {
// JSON 파일을 트리 형태로 파싱
JsonNode root = om.readTree(path.toFile());
// GeoJSON FeatureCollection의 핵심은 features 배열
features = root.get("features");
// features가 없거나 배열이 아니면 GeoJSON 구조가 이상한 것
if (features == null || !features.isArray()) {
throw new ResponseStatusException(
HttpStatus.BAD_REQUEST, "유효하지 않은 GeoJSON: features가 없거나 배열이 아닙니다.");
}
} catch (ResponseStatusException e) {
// 위에서 직접 던진 에러는 그대로 전달
throw e;
} catch (IOException e) {
// 읽기/파싱 과정에서 IO 문제가 터지면 서버오류
log.error("GeoJSON 파일 읽기/파싱 실패: path={}", path, e);
throw new ResponseStatusException(
HttpStatus.INTERNAL_SERVER_ERROR, "GeoJSON 파일 읽기/파싱 실패: " + geojsonPath, e);
} catch (Exception e) {
// JSON 문법 오류/예상치 못한 파싱 오류는 클라이언트 입력/파일 자체 문제로 400 처리
log.error("GeoJSON 파싱 오류(비정상 JSON): path={}", path, e);
throw new ResponseStatusException(
HttpStatus.BAD_REQUEST, "GeoJSON 파싱 오류(비정상 JSON): " + geojsonPath, e);
}
// =========================================================
// 4) 검증 로직
// - featureCount: 전체 feature 수 (중복 포함)
// - foundUnique: GeoJSON에 등장한 유니크 scene_id 집합
// - duplicates: GeoJSON 내부에서 scene_id가 중복된 목록(샘플 출력용)
// - nullIdCount: scene_id가 없거나 빈 값인 feature 개수
// =========================================================
int featureCount = features.size();
// 유니크 scene_id를 담는 Set (중복 판단을 위해 add 결과를 사용)
Set<String> foundUnique = new HashSet<>();
// 중복된 scene_id 목록 (샘플 로그 출력용이라 순서 유지 가능한 LinkedHashSet 사용)
Set<String> duplicates = new LinkedHashSet<>();
// scene_id가 null 또는 blank인 feature의 개수 (데이터 이상)
int nullIdCount = 0;
// ---------------------------------------------------------
// features를 돌면서 feature.properties.scene_id를 추출한다.
//
// 기대 구조(일반적):
// features[i] = {
// "type": "Feature",
// "properties": {
// "scene_id": "도엽번호"
// },
// "geometry": {...}
// }
// ---------------------------------------------------------
for (JsonNode feature : features) {
JsonNode props = feature.get("properties");
// properties가 있고 scene_id가 null이 아니면 텍스트로 읽음
// 없으면 null 처리
String sceneId =
(props != null && props.hasNonNull("scene_id"))
? props.get("scene_id").asText().trim()
: null;
// scene_id가 없거나 빈값이면 "정상적으로 도엽번호가 들어오지 않은 feature"로 카운트
if (sceneId == null || sceneId.isBlank()) {
nullIdCount++; // 도엽번호가 없으면 증가
continue;
}
// foundUnique.add(sceneId)가 false면 "이미 같은 값이 있었다"는 뜻 => 중복
if (!foundUnique.add(sceneId)) {
duplicates.add(sceneId);
}
}
// =========================================================
// 4-1) requested vs found 비교(set 차집합)
//
// missing = requested - found
// : 요청은 했는데 결과 GeoJSON에 없는 도엽번호
//
// extra = found - requested
// : 요청하지 않았는데 결과 GeoJSON에 들어간 도엽번호
// =========================================================
// missing: requested를 복사한 뒤(foundUnique에 있는 값들을 제거) => 남은 것이 누락분
Set<String> missing = new LinkedHashSet<>(requested);
missing.removeAll(foundUnique);
// extra: foundUnique를 복사한 뒤(requested에 있는 값들을 제거) => 남은 것이 추가분
Set<String> extra = new LinkedHashSet<>(foundUnique);
extra.removeAll(requested);
// =========================================================
// 5) 로그 출력
// - 운영에서 문제 생겼을 때 "요청 vs 생성 결과"를 한 눈에 보게
// - sample 로그는 너무 길어질 수 있으므로 limit 걸어줌
// =========================================================
log.info(
"""
===== GeoJSON Validation =====
file: {}
features(total): {}
requested(unique): {}
found(unique scene_id): {}
scene_id null/blank: {}
duplicates(scene_id): {}
missing(requested - found): {}
extra(found - requested): {}
==============================
""",
geojsonPath,
featureCount, // 중복 포함한 전체 feature 수
requested.size(), // 요청 도엽 유니크 수
foundUnique.size(), // GeoJSON에서 발견된 scene_id 유니크 수
nullIdCount, // scene_id가 비어있는 feature 수
duplicates.size(), // 중복 scene_id 종류 수
missing.size(), // 요청했지만 빠진 도엽 수
extra.size()); // 요청하지 않았는데 들어온 도엽 수
// 중복/누락/추가 항목은 전체를 다 찍으면 로그 폭발하므로 샘플만
// if (!duplicates.isEmpty())
// log.warn("duplicates sample: {}", duplicates.stream().limit(20).toList());
if (!missing.isEmpty()) {
log.warn("missing sample: {}", missing.stream().limit(50).toList());
}
if (!extra.isEmpty()) {
log.warn("extra sample: {}", extra.stream().limit(50).toList());
}
// =========================================================
// 6) 실패 조건 판정
//
// 아래 중 하나라도 있으면 "요청 대비 결과 정합성이 깨졌다"로 보고 실패 처리(422):
// - missing 존재: 요청했는데 결과에 없음
// - extra 존재 : 요청 안했는데 결과에 있음
// - duplicates 존재: 동일 도엽이 중복 생성됨
// - nullIdCount > 0: scene_id가 비어있는 feature가 있음(데이터 이상)
//
// 422(Unprocessable Entity):
// - 요청 문법은 맞지만(파일은 있고 JSON도 읽힘),
// 내용(정합성)이 요구사항을 만족하지 못하는 경우에 적합.
// =========================================================
if (!missing.isEmpty() || !extra.isEmpty() || !duplicates.isEmpty() || nullIdCount > 0) {
throw new ResponseStatusException(
HttpStatus.UNPROCESSABLE_ENTITY,
String.format(
"GeoJSON validation failed: missing=%d, extra=%d, duplicates=%d, nullId=%d",
missing.size(), extra.size(), duplicates.size(), nullIdCount));
}
// 모든 조건을 통과하면 정상
log.info("GeoJSON validation OK");
}
}

View File

@@ -1,20 +1,45 @@
package com.kamco.cd.kamcoback.common.service; package com.kamco.cd.kamcoback.common.service;
import com.kamco.cd.kamcoback.config.InferenceProperties;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2; import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@Log4j2 @Log4j2
// 0312
@RequiredArgsConstructor
@Component @Component
public class ExternalJarRunner { public class ExternalJarRunner {
@Value("${spring.profiles.active}")
private String profile;
// 0312
private final InferenceProperties inferenceProperties;
private static final long TIMEOUT_MINUTES = TimeUnit.DAYS.toMinutes(3); private static final long TIMEOUT_MINUTES = TimeUnit.DAYS.toMinutes(3);
// java -jar build/libs/shp-exporter.jar --batch --geoserver.enabled=true
// --converter.inference-id=qq99999 --converter.batch-ids[0]=111
// 0312 shp 파일 배치를 통해 생성
public void run(String inferenceLearningId, List<Long> batchIds) {
// JAR 경로 (shape파일 생성용)
String jarPathV2 = inferenceProperties.getJarPathV2();
List<String> args = new ArrayList<>();
args.add(" --spring.profiles.active=" + profile);
args.add(" --batch");
args.add(" --geoserver.enabled=true");
args.add(" --converter.inference-id=" + inferenceLearningId);
batchIds.forEach(batchId -> args.add(" --converter.batch-ids[" + args.size() + "]=" + batchId));
execJar(jarPathV2, args);
}
/** /**
* shp 파일 생성 * shp 파일 생성
* *
@@ -25,7 +50,8 @@ public class ExternalJarRunner {
* @param mode * @param mode
* <p>MERGED - batch-ids 에 해당하는 **모든 데이터를 하나의 Shapefile로 병합 생성, * <p>MERGED - batch-ids 에 해당하는 **모든 데이터를 하나의 Shapefile로 병합 생성,
* <p>MAP_IDS - 명시적으로 전달한 map-ids만 대상으로 Shapefile 생성, * <p>MAP_IDS - 명시적으로 전달한 map-ids만 대상으로 Shapefile 생성,
* <p>RESOLVE - batch-ids 기준으로 **JAR 내부에서 map_ids를 조회**한 뒤 Shapefile 생성 * <p>RESOLVE - batch-ids 기준으로 **JAR 내부에서 map_ids를 조회**한 뒤 Shapefile 생성 java -jar
* build/libs/shp-exporter.jar --spring.profiles.active=prod
*/ */
public void run(String jarPath, String batchIds, String inferenceId, String mapIds, String mode) { public void run(String jarPath, String batchIds, String inferenceId, String mapIds, String mode) {
List<String> args = new ArrayList<>(); List<String> args = new ArrayList<>();
@@ -40,7 +66,7 @@ public class ExternalJarRunner {
if (mode != null && !mode.isEmpty()) { if (mode != null && !mode.isEmpty()) {
addArg(args, "converter.mode", mode); addArg(args, "converter.mode", mode);
} }
addArg(args, "spring.profiles.active", profile);
execJar(jarPath, args); execJar(jarPath, args);
} }
@@ -57,6 +83,7 @@ public class ExternalJarRunner {
addArg(args, "upload-shp", register); addArg(args, "upload-shp", register);
// addArg(args, "layer", layer); // addArg(args, "layer", layer);
addArg(args, "spring.profiles.active", profile);
execJar(jarPath, args); execJar(jarPath, args);
} }
@@ -69,7 +96,8 @@ public class ExternalJarRunner {
cmd.add("-jar"); cmd.add("-jar");
cmd.add(jarPath); cmd.add(jarPath);
cmd.addAll(args); cmd.addAll(args);
// 0312
log.info("exec jar command: {}", cmd);
ProcessBuilder pb = new ProcessBuilder(cmd); ProcessBuilder pb = new ProcessBuilder(cmd);
pb.redirectErrorStream(true); pb.redirectErrorStream(true);
@@ -99,6 +127,7 @@ public class ExternalJarRunner {
} catch (Exception e) { } catch (Exception e) {
log.error("jar execution error. output=\n{}", out, e); log.error("jar execution error. output=\n{}", out, e);
throw new RuntimeException("jar execution error\n" + out, e);
} }
} }

View File

@@ -279,18 +279,28 @@ public class FIleChecker {
return true; return true;
} }
public static List<Folder> getFolderAll(String dirPath, String sortType, int maxDepth) { // kamco-nfs를 확인하는곳이 있어서 파라미터 추가 사용용도확인후 처리
public static List<Folder> getFolderAll(
String dirPath, String sortType, int maxDepth, String nfsRootDir) {
Path startPath = Paths.get(dirPath); Path startPath = Paths.get(dirPath);
List<Folder> folderList = List.of(); List<Folder> folderList = List.of();
try (Stream<Path> stream = Files.walk(startPath, maxDepth)) { log.info("[FIND_FOLDER] DIR : {} {} {} {}", dirPath, sortType, maxDepth, startPath);
int childDirCount = getChildFolderCount(startPath.toFile());
log.info("[FIND_FOLDER] START_PATH_CHILD_DIR_COUNT : {}", childDirCount);
try (Stream<Path> stream = Files.walk(startPath, maxDepth)) {
folderList = folderList =
stream stream
.filter(Files::isDirectory) .filter(Files::isDirectory)
.filter(p -> !p.toString().equals(dirPath)) .filter(
p ->
!p.toAbsolutePath()
.normalize()
.equals(startPath.toAbsolutePath().normalize()))
.map( .map(
path -> { path -> {
int depth = path.getNameCount(); int depth = path.getNameCount();
@@ -300,11 +310,12 @@ public class FIleChecker {
String parentPath = path.getParent().toString(); String parentPath = path.getParent().toString();
String fullPath = path.toAbsolutePath().toString(); String fullPath = path.toAbsolutePath().toString();
boolean isValid = // 이것이 필요한건가?
!NameValidator.containsKorean(folderNm) // boolean isShowHide =
&& !NameValidator.containsWhitespaceRegex(folderNm) // !parentFolderNm.equals("kamco-nfs"); // 폴더 리스트에
&& !parentFolderNm.equals("kamco-nfs"); // kamco-nfs 하위만 나오도록 처리
boolean isShowHide =
!parentFolderNm.equals(nfsRootDir); // 폴더 리스트에 nfsRootDir 하위만 나오도록 처리
File file = new File(fullPath); File file = new File(fullPath);
int childCnt = getChildFolderCount(file); int childCnt = getChildFolderCount(file);
String lastModified = getLastModified(file); String lastModified = getLastModified(file);
@@ -317,7 +328,7 @@ public class FIleChecker {
depth, depth,
childCnt, childCnt,
lastModified, lastModified,
isValid); isShowHide);
}) })
.collect(Collectors.toList()); .collect(Collectors.toList());
@@ -352,24 +363,8 @@ public class FIleChecker {
return folderList; return folderList;
} }
public static List<Folder> getFolderAll(String dirPath) { public static List<Folder> getFolderAll(String dirPath, String nfsRootDir) {
return getFolderAll(dirPath, "name", 1); return getFolderAll(dirPath, "name", 1, nfsRootDir);
}
public static List<Folder> getFolderAll(String dirPath, String sortType) {
return getFolderAll(dirPath, sortType, 1);
}
public static int getChildFolderCount(String dirPath) {
File directory = new File(dirPath);
File[] childFolders = directory.listFiles(File::isDirectory);
int childCnt = 0;
if (childFolders != null) {
childCnt = childFolders.length;
}
return childCnt;
} }
public static int getChildFolderCount(File directory) { public static int getChildFolderCount(File directory) {
@@ -383,11 +378,6 @@ public class FIleChecker {
return childCnt; return childCnt;
} }
public static String getLastModified(String dirPath) {
File file = new File(dirPath);
return dttmFormat.format(new Date(file.lastModified()));
}
public static String getLastModified(File file) { public static String getLastModified(File file) {
return dttmFormat.format(new Date(file.lastModified())); return dttmFormat.format(new Date(file.lastModified()));
} }

View File

@@ -1,92 +1,74 @@
package com.kamco.cd.kamcoback.config; package com.kamco.cd.kamcoback.config;
import com.fasterxml.jackson.databind.ObjectMapper; import com.kamco.cd.kamcoback.common.download.dto.DownloadAuditEvent;
import com.kamco.cd.kamcoback.auth.CustomUserDetails; import com.kamco.cd.kamcoback.common.utils.UserUtil;
import com.kamco.cd.kamcoback.common.utils.HeaderUtil;
import com.kamco.cd.kamcoback.config.api.ApiLogFunction; import com.kamco.cd.kamcoback.config.api.ApiLogFunction;
import com.kamco.cd.kamcoback.menu.dto.MenuDto; import jakarta.servlet.DispatcherType;
import com.kamco.cd.kamcoback.menu.service.MenuService;
import com.kamco.cd.kamcoback.postgres.entity.AuditLogEntity;
import com.kamco.cd.kamcoback.postgres.repository.log.AuditLogRepository;
import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse; import jakarta.servlet.http.HttpServletResponse;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Objects;
import java.util.UUID; import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationEventPublisher;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import org.springframework.web.servlet.HandlerInterceptor; import org.springframework.web.servlet.HandlerInterceptor;
/** 파일 다운로드 log 저장 */
@Slf4j @Slf4j
@Component @Component
@RequiredArgsConstructor
public class FileDownloadInteceptor implements HandlerInterceptor { public class FileDownloadInteceptor implements HandlerInterceptor {
private final AuditLogRepository auditLogRepository; private final ApplicationEventPublisher publisher;
private final MenuService menuService; private final UserUtil userUtil;
@Autowired private ObjectMapper objectMapper;
public FileDownloadInteceptor(AuditLogRepository auditLogRepository, MenuService menuService) {
this.auditLogRepository = auditLogRepository;
this.menuService = menuService;
}
@Override @Override
public void afterCompletion( public void afterCompletion(
HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) { HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) {
// 파일 다운로드 API만 필터링 String uri = request.getRequestURI();
if (!request.getRequestURI().contains("/download")) { if (uri == null || !uri.contains("/download")) return;
if (request.getDispatcherType() != DispatcherType.REQUEST) return;
Long userId;
String employeeNo = "";
try {
// a 링크 다운로드일경우 userId가 없으므로 전달받은 사번을 넣는다
userId = userUtil.getId();
if (userId == null) {
employeeNo = request.getParameter("employeeNo");
if (employeeNo == null) {
return;
}
}
} catch (Exception e) {
log.warn("Download audit userId resolve failed. uri={}, err={}", uri, e.toString());
return; return;
} }
Long userId = extractUserId(request);
String ip = ApiLogFunction.getClientIp(request); String ip = ApiLogFunction.getClientIp(request);
int status = response.getStatus();
String normalizedUri = uri.replace("/api", "");
List<?> list = menuService.getFindAll(); UUID downloadUuid = extractUuidFromUri(uri);
List<MenuDto.Basic> result = if (downloadUuid == null) {
list.stream() log.warn("Download UUID parse failed. uri={}", uri);
.map( return; // downloadUuid null 불가 -> 스킵
item -> { }
if (item instanceof LinkedHashMap<?, ?> map) {
return objectMapper.convertValue(map, MenuDto.Basic.class);
} else if (item instanceof MenuDto.Basic dto) {
return dto;
} else {
throw new IllegalStateException("Unsupported cache type: " + item.getClass());
}
})
.toList();
String normalizedUri = request.getRequestURI().replace("/api", ""); // log저장 DownloadAuditEventListener 클래스 호출
MenuDto.Basic basic = publisher.publishEvent(
result.stream() new DownloadAuditEvent(userId, employeeNo, uri, normalizedUri, ip, status, downloadUuid));
.filter(
menu -> menu.getMenuUrl() != null && normalizedUri.startsWith(menu.getMenuUrl()))
.max(Comparator.comparingInt(m -> m.getMenuUrl().length()))
.orElse(null);
AuditLogEntity log =
AuditLogEntity.forFileDownload(
userId,
request.getRequestURI(),
Objects.requireNonNull(basic).getMenuUid(),
ip,
response.getStatus(),
UUID.fromString(HeaderUtil.get(request, "kamco-download-uuid")));
auditLogRepository.save(log);
} }
private Long extractUserId(HttpServletRequest request) { private UUID extractUuidFromUri(String uri) {
if (request.getUserPrincipal() instanceof UsernamePasswordAuthenticationToken auth try {
&& auth.getPrincipal() instanceof CustomUserDetails userDetails) { String[] parts = uri.split("/");
return userDetails.getMember().getId(); String last = parts[parts.length - 1];
return UUID.fromString(last);
} catch (Exception e) {
return null;
} }
return null;
} }
} }

View File

@@ -0,0 +1,35 @@
package com.kamco.cd.kamcoback.config;
import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@Getter
@Setter
@Component
@ConfigurationProperties(prefix = "file")
public class FileProperties {
private String root;
private String nfs;
private String syncRootDir;
private String syncTmpDir;
private String syncFileExtention;
private String datasetDir;
private String datasetTmpDir;
private String modelDir;
private String modelTmpDir;
private String modelFileExtention;
private String ptPath;
private String datasetResponse;
private TrainingData trainingData;
private String outputDir;
@Getter
@Setter
public static class TrainingData {
private String geojsonDir;
}
}

View File

@@ -0,0 +1,22 @@
package com.kamco.cd.kamcoback.config;
import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@Getter
@Setter
@Component
@ConfigurationProperties(prefix = "inference")
public class InferenceProperties {
private String nfs;
private String url;
private String batchUrl;
private String geojsonDir;
private String jarPath;
// 0312
private String jarPathV2;
private String inferenceServerName;
}

View File

@@ -24,7 +24,7 @@ public class OpenApiConfig {
@Value("${swagger.dev-url:https://kamco.dev-api.gs.dabeeo.com}") @Value("${swagger.dev-url:https://kamco.dev-api.gs.dabeeo.com}")
private String devUrl; private String devUrl;
@Value("${swagger.prod-url:https://api.kamco.com}") @Value("${swagger.prod-url:https://aicd-api.e-kamco.com:12013}")
private String prodUrl; private String prodUrl;
@Bean @Bean
@@ -51,9 +51,9 @@ public class OpenApiConfig {
servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버")); servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버"));
// servers.add(new Server().url(prodUrl).description("운영 서버")); // servers.add(new Server().url(prodUrl).description("운영 서버"));
} else if ("prod".equals(profile)) { } else if ("prod".equals(profile)) {
// servers.add(new Server().url(prodUrl).description("운영 서버")); servers.add(new Server().url(prodUrl).description("운영 서버"));
servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버")); servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버"));
servers.add(new Server().url(devUrl).description("개발 서버"));
} else { } else {
servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버")); servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버"));
servers.add(new Server().url(devUrl).description("개발 서버")); servers.add(new Server().url(devUrl).description("개발 서버"));

View File

@@ -3,6 +3,8 @@ package com.kamco.cd.kamcoback.config;
import com.kamco.cd.kamcoback.auth.CustomAuthenticationProvider; import com.kamco.cd.kamcoback.auth.CustomAuthenticationProvider;
import com.kamco.cd.kamcoback.auth.JwtAuthenticationFilter; import com.kamco.cd.kamcoback.auth.JwtAuthenticationFilter;
import com.kamco.cd.kamcoback.auth.MenuAuthorizationManager; import com.kamco.cd.kamcoback.auth.MenuAuthorizationManager;
import com.kamco.cd.kamcoback.common.download.DownloadPaths;
import jakarta.servlet.DispatcherType;
import java.util.List; import java.util.List;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
@@ -44,9 +46,11 @@ public class SecurityConfig {
.authorizeHttpRequests( .authorizeHttpRequests(
auth -> auth ->
auth auth
// .requestMatchers("/chunk_upload_test.html").authenticated() // .requestMatchers("/chunk_upload_test.html").authenticated()
.requestMatchers("/monitor/health", "/monitor/health/**") .requestMatchers("/monitor/health", "/monitor/health/**")
.permitAll() .permitAll()
// 맵시트 영역 전체 허용 (우선순위 최상단) // 맵시트 영역 전체 허용 (우선순위 최상단)
.requestMatchers("/api/mapsheet/**") .requestMatchers("/api/mapsheet/**")
.permitAll() .permitAll()
@@ -67,47 +71,59 @@ public class SecurityConfig {
.requestMatchers("/api/test/review") .requestMatchers("/api/test/review")
.hasAnyRole("ADMIN", "REVIEWER") .hasAnyRole("ADMIN", "REVIEWER")
// shapefile 생성 테스트 API - 인증 없이 접근 가능
.requestMatchers("/api/test/make-shapefile")
.permitAll()
// ASYNC/ERROR 재디스패치는 막지 않기 (다운로드/스트리밍에서 필수)
.dispatcherTypeMatchers(DispatcherType.ASYNC, DispatcherType.ERROR)
.permitAll()
// 다운로드는 인증 필요
.requestMatchers(HttpMethod.GET, DownloadPaths.PATTERNS)
.permitAll()
// 메뉴 등록 ADMIN만 가능 // 메뉴 등록 ADMIN만 가능
.requestMatchers(HttpMethod.POST, "/api/menu/auth") .requestMatchers(HttpMethod.POST, "/api/menu/auth")
.hasAnyRole("ADMIN") .hasAnyRole("ADMIN")
// 에러 경로는 항상 허용 (이미 있지만 유지)
.requestMatchers("/error") .requestMatchers("/error")
.permitAll() .permitAll()
// preflight 허용
.requestMatchers(HttpMethod.OPTIONS, "/**") .requestMatchers(HttpMethod.OPTIONS, "/**")
.permitAll() // preflight 허용 .permitAll()
.requestMatchers( .requestMatchers(
"/api/auth/signin", "/api/auth/signin",
"/api/auth/refresh", "/api/auth/refresh",
"/api/auth/logout", "/api/auth/logout",
"/swagger-ui/**", "/swagger-ui/**",
"/api/members/*/password",
"/v3/api-docs/**", "/v3/api-docs/**",
"/chunk_upload_test.html", "/chunk_upload_test.html",
"/download_progress_test.html",
"/api/model/file-chunk-upload", "/api/model/file-chunk-upload",
"/api/upload/file-chunk-upload", "/api/upload/file-chunk-upload",
"/api/upload/chunk-upload-complete", "/api/upload/chunk-upload-complete",
"/api/change-detection/**", "/api/change-detection/**",
"/api/members/*/password",
"/api/layer/map/**", "/api/layer/map/**",
"/api/layer/tile-url", "/api/layer/tile-url",
"/api/layer/tile-url-year") "/api/layer/tile-url-year",
"/api/common-code/clazz")
.permitAll() .permitAll()
// 로그인한 사용자만 가능 IAM // 로그인한 사용자만 가능 IAM
.requestMatchers( .requestMatchers(
"/api/user/**", "/api/user/**",
"/api/my/menus", "/api/my/menus",
"/api/common-code/**",
"/api/training-data/label/**", "/api/training-data/label/**",
"/api/training-data/review/**") "/api/training-data/review/**")
.authenticated() .authenticated()
.anyRequest()
.access(menuAuthorizationManager)
// .authenticated() // 나머지는 메뉴권한
) .anyRequest()
.addFilterBefore( .access(menuAuthorizationManager))
jwtAuthenticationFilter, .addFilterBefore(jwtAuthenticationFilter, UsernamePasswordAuthenticationFilter.class);
UsernamePasswordAuthenticationFilter
.class) // 요청 들어오면 먼저 JWT 토큰 검사 후 security context 에 사용자 정보 저장.
;
return http.build(); return http.build();
} }
@@ -118,23 +134,18 @@ public class SecurityConfig {
return configuration.getAuthenticationManager(); return configuration.getAuthenticationManager();
} }
/** /** CORS 설정 */
* CORS 설정
*
* @return
*/
@Bean @Bean
public CorsConfigurationSource corsConfigurationSource() { public CorsConfigurationSource corsConfigurationSource() {
CorsConfiguration config = new CorsConfiguration(); // CORS 객체 생성 CorsConfiguration config = new CorsConfiguration();
config.setAllowedOriginPatterns(List.of("*")); // 도메인 허용 config.setAllowedOriginPatterns(List.of("*"));
config.setAllowedMethods(List.of("GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS")); config.setAllowedMethods(List.of("GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"));
config.setAllowedHeaders(List.of("*")); // 헤더요청 Authorization, Content-Type, X-Custom-Header config.setAllowedHeaders(List.of("*"));
config.setAllowCredentials(true); // 쿠키, Authorization 헤더, Bearer Token 등 자격증명 포함 요청을 허용할지 설정 config.setAllowCredentials(true);
config.setExposedHeaders(List.of("Content-Disposition")); config.setExposedHeaders(List.of("Content-Disposition"));
UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource(); UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
/** "/**" → 모든 API 경로에 대해 이 CORS 규칙을 적용 /api/** 같이 특정 경로만 지정 가능. */ source.registerCorsConfiguration("/**", config);
source.registerCorsConfiguration("/**", config); // CORS 정책을 등록
return source; return source;
} }

View File

@@ -16,6 +16,8 @@ public class StartupLogger {
private final Environment environment; private final Environment environment;
private final DataSource dataSource; private final DataSource dataSource;
private final FileProperties fileProperties;
private final InferenceProperties inferenceProperties;
@EventListener(ApplicationReadyEvent.class) @EventListener(ApplicationReadyEvent.class)
public void logStartupInfo() { public void logStartupInfo() {
@@ -79,6 +81,26 @@ public class StartupLogger {
│ DDL Auto : %s │ DDL Auto : %s
│ JDBC Batch Size : %s │ JDBC Batch Size : %s
│ Fetch Batch Size : %s │ Fetch Batch Size : %s
╠════════════════════════════════════════════════════════════════════════════════╣
║ FILE CONFIGURATION ║
╠────────────────────────────────────────────────────────────────────────────────╣
│ Root Directory : %s
│ NFS Mount Path : %s
│ Sync Root Dir : %s
│ Sync Tmp Dir : %s
│ Dataset Dir : %s
│ Model Dir : %s
│ PT Path : %s
│ Output Dir : %s
╠════════════════════════════════════════════════════════════════════════════════╣
║ INFERENCE CONFIGURATION ║
╠────────────────────────────────────────────────────────────────────────────────╣
│ NFS Mount Path : %s
│ Inference URL : %s
│ Batch URL : %s
│ GeoJSON Dir : %s
│ JAR Path : %s
│ Server Names : %s
╚════════════════════════════════════════════════════════════════════════════════╝ ╚════════════════════════════════════════════════════════════════════════════════╝
""", """,
profileInfo, profileInfo,
@@ -89,7 +111,25 @@ public class StartupLogger {
showSql, showSql,
ddlAuto, ddlAuto,
batchSize, batchSize,
batchFetchSize); batchFetchSize,
fileProperties.getRoot() != null ? fileProperties.getRoot() : "N/A",
fileProperties.getNfs() != null ? fileProperties.getNfs() : "N/A",
fileProperties.getSyncRootDir() != null ? fileProperties.getSyncRootDir() : "N/A",
fileProperties.getSyncTmpDir() != null ? fileProperties.getSyncTmpDir() : "N/A",
fileProperties.getDatasetDir() != null ? fileProperties.getDatasetDir() : "N/A",
fileProperties.getModelDir() != null ? fileProperties.getModelDir() : "N/A",
fileProperties.getPtPath() != null ? fileProperties.getPtPath() : "N/A",
fileProperties.getOutputDir() != null ? fileProperties.getOutputDir() : "N/A",
inferenceProperties.getNfs() != null ? inferenceProperties.getNfs() : "N/A",
inferenceProperties.getUrl() != null ? inferenceProperties.getUrl() : "N/A",
inferenceProperties.getBatchUrl() != null ? inferenceProperties.getBatchUrl() : "N/A",
inferenceProperties.getGeojsonDir() != null
? inferenceProperties.getGeojsonDir()
: "N/A",
inferenceProperties.getJarPath() != null ? inferenceProperties.getJarPath() : "N/A",
inferenceProperties.getInferenceServerName() != null
? inferenceProperties.getInferenceServerName()
: "N/A");
log.info(startupMessage); log.info(startupMessage);
} }

View File

@@ -2,6 +2,7 @@ package com.kamco.cd.kamcoback.config;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.module.SimpleModule;
import com.kamco.cd.kamcoback.common.download.DownloadPaths;
import com.kamco.cd.kamcoback.common.utils.geometry.GeometryDeserializer; import com.kamco.cd.kamcoback.common.utils.geometry.GeometryDeserializer;
import com.kamco.cd.kamcoback.common.utils.geometry.GeometrySerializer; import com.kamco.cd.kamcoback.common.utils.geometry.GeometrySerializer;
import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Geometry;
@@ -39,9 +40,6 @@ public class WebConfig implements WebMvcConfigurer {
@Override @Override
public void addInterceptors(InterceptorRegistry registry) { public void addInterceptors(InterceptorRegistry registry) {
registry registry.addInterceptor(fileDownloadInteceptor).addPathPatterns(DownloadPaths.PATTERNS);
.addInterceptor(fileDownloadInteceptor)
.addPathPatterns("/api/inference/download/**") // 추론 파일 다운로드
.addPathPatterns("/api/training-data/stage/download/**"); // 학습데이터 다운로드
} }
} }

View File

@@ -1,5 +1,6 @@
package com.kamco.cd.kamcoback.config.api; package com.kamco.cd.kamcoback.config.api;
import com.kamco.cd.kamcoback.common.download.DownloadPaths;
import jakarta.servlet.FilterChain; import jakarta.servlet.FilterChain;
import jakarta.servlet.ServletException; import jakarta.servlet.ServletException;
import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletRequest;
@@ -16,6 +17,14 @@ public class ApiLogFilter extends OncePerRequestFilter {
protected void doFilterInternal( protected void doFilterInternal(
HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) HttpServletRequest request, HttpServletResponse response, FilterChain filterChain)
throws ServletException, IOException { throws ServletException, IOException {
String uri = request.getRequestURI();
if (DownloadPaths.matches(uri)) {
filterChain.doFilter(request, response);
return;
}
ContentCachingRequestWrapper wrappedRequest = new ContentCachingRequestWrapper(request); ContentCachingRequestWrapper wrappedRequest = new ContentCachingRequestWrapper(request);
ContentCachingResponseWrapper wrappedResponse = new ContentCachingResponseWrapper(response); ContentCachingResponseWrapper wrappedResponse = new ContentCachingResponseWrapper(response);

View File

@@ -173,8 +173,9 @@ public class ApiResponseDto<T> {
+ "To reset your password again, please submit a new request through \"Forgot" + "To reset your password again, please submit a new request through \"Forgot"
+ " Password.\""), + " Password.\""),
PAYLOAD_TOO_LARGE("업로드 용량 제한을 초과했습니다."), PAYLOAD_TOO_LARGE("업로드 용량 제한을 초과했습니다."),
NOT_FOUND_TARGET_YEAR("기준도 도엽을 찾을 수 없습니다."), NOT_FOUND_TARGET_YEAR("기준도 도엽을 찾을 수 없습니다."),
NOT_FOUND_COMPARE_YEAR("비교도 도엽을 찾을 수 없습니다."), NOT_FOUND_COMPARE_YEAR("비교도 도엽을 찾을 수 없습니다."),
NOT_FOUND_MAP_SHEET_NUM("추론 가능한 도엽이 없습니다."),
FAIL_SAVE_MAP_SHEET("도엽 저장 중 오류가 발생했습니다."), FAIL_SAVE_MAP_SHEET("도엽 저장 중 오류가 발생했습니다."),
FAIL_CREATE_MAP_SHEET_FILE("도엽 설정파일 생성 중 오류가 발생했습니다."), FAIL_CREATE_MAP_SHEET_FILE("도엽 설정파일 생성 중 오류가 발생했습니다."),
; ;

View File

@@ -3,8 +3,8 @@ package com.kamco.cd.kamcoback.config.resttemplate;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.List; import java.util.List;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2; import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.http.HttpEntity; import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod; import org.springframework.http.HttpMethod;
@@ -14,54 +14,86 @@ import org.springframework.stereotype.Component;
import org.springframework.web.client.HttpStatusCodeException; import org.springframework.web.client.HttpStatusCodeException;
import org.springframework.web.client.RestTemplate; import org.springframework.web.client.RestTemplate;
@RequiredArgsConstructor
@Component @Component
@Log4j2 @Log4j2
public class ExternalHttpClient { public class ExternalHttpClient {
private final RestTemplate restTemplate; private final RestTemplate restTemplate; // short (@Primary)
private final RestTemplate restTemplateLong; // long
private final ObjectMapper objectMapper; private final ObjectMapper objectMapper;
public ExternalHttpClient(
RestTemplate restTemplate,
@Qualifier("restTemplateLong") RestTemplate restTemplateLong,
ObjectMapper objectMapper) {
this.restTemplate = restTemplate;
this.restTemplateLong = restTemplateLong;
this.objectMapper = objectMapper;
}
/** 기본(짧은 timeout) 호출 */
public <T> ExternalCallResult<T> call( public <T> ExternalCallResult<T> call(
String url, HttpMethod method, Object body, HttpHeaders headers, Class<T> responseType) { String url, HttpMethod method, Object body, HttpHeaders headers, Class<T> responseType) {
// responseType 기반으로 Accept 동적 세팅 return doCall(restTemplate, url, method, body, headers, responseType);
}
/** 추론/대용량 전용 (긴 timeout) */
public <T> ExternalCallResult<T> callLong(
String url, HttpMethod method, Object body, HttpHeaders headers, Class<T> responseType) {
return doCall(restTemplateLong, url, method, body, headers, responseType);
}
private <T> ExternalCallResult<T> doCall(
RestTemplate rt,
String url,
HttpMethod method,
Object body,
HttpHeaders headers,
Class<T> responseType) {
HttpHeaders resolvedHeaders = resolveHeaders(headers, responseType); HttpHeaders resolvedHeaders = resolveHeaders(headers, responseType);
logRequestBody(body); logRequestBody(body);
HttpEntity<Object> entity = new HttpEntity<>(body, resolvedHeaders); HttpEntity<Object> entity = new HttpEntity<>(body, resolvedHeaders);
try { try {
// String: raw bytes -> UTF-8 string
// String 응답은 raw byte로 받아 UTF-8 변환
if (responseType == String.class) { if (responseType == String.class) {
ResponseEntity<byte[]> res = restTemplate.exchange(url, method, entity, byte[].class); ResponseEntity<byte[]> res = rt.exchange(url, method, entity, byte[].class);
String raw = String raw =
(res.getBody() == null) ? null : new String(res.getBody(), StandardCharsets.UTF_8); (res.getBody() == null) ? null : new String(res.getBody(), StandardCharsets.UTF_8);
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
T casted = (T) raw; T casted = (T) raw;
return new ExternalCallResult<>(res.getStatusCodeValue(), true, casted, null); return new ExternalCallResult<>(res.getStatusCodeValue(), true, casted, null);
} }
// byte[]: raw bytes로 받고, JSON이면 에러로 처리 // byte[] 응답 처리
if (responseType == byte[].class) { if (responseType == byte[].class) {
ResponseEntity<byte[]> res = restTemplate.exchange(url, method, entity, byte[].class); ResponseEntity<byte[]> res = rt.exchange(url, method, entity, byte[].class);
MediaType ct = res.getHeaders().getContentType(); MediaType ct = res.getHeaders().getContentType();
byte[] bytes = res.getBody(); byte[] bytes = res.getBody();
// JSON이면 에러로 간주
if (isJsonLike(ct)) { if (isJsonLike(ct)) {
String err = (bytes == null) ? null : new String(bytes, StandardCharsets.UTF_8); String err = (bytes == null) ? null : new String(bytes, StandardCharsets.UTF_8);
return new ExternalCallResult<>(res.getStatusCodeValue(), false, null, err); return new ExternalCallResult<>(res.getStatusCodeValue(), false, null, err);
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
T casted = (T) bytes; T casted = (T) bytes;
return new ExternalCallResult<>(res.getStatusCodeValue(), true, casted, null); return new ExternalCallResult<>(res.getStatusCodeValue(), true, casted, null);
} }
// DTO 등: 일반 역직렬화 // DTO 응답
ResponseEntity<T> res = restTemplate.exchange(url, method, entity, responseType); ResponseEntity<T> res = rt.exchange(url, method, entity, responseType);
return new ExternalCallResult<>(res.getStatusCodeValue(), true, res.getBody(), null); return new ExternalCallResult<>(res.getStatusCodeValue(), true, res.getBody(), null);
} catch (HttpStatusCodeException e) { } catch (HttpStatusCodeException e) {
@@ -70,29 +102,28 @@ public class ExternalHttpClient {
} }
} }
// 기존 resolveJsonHeaders를 "동적"으로 교체 /** Accept / Content-Type 자동 처리 */
private HttpHeaders resolveHeaders(HttpHeaders headers, Class<?> responseType) { private HttpHeaders resolveHeaders(HttpHeaders headers, Class<?> responseType) {
// 원본 headers를 그대로 쓰면 외부에서 재사용할 때 사이드이펙트 날 수 있어서 복사 권장
HttpHeaders h = (headers == null) ? new HttpHeaders() : new HttpHeaders(headers); HttpHeaders h = (headers == null) ? new HttpHeaders() : new HttpHeaders(headers);
// 요청 바디 기본은 JSON이라고 가정 (필요하면 호출부에서 덮어쓰기) // 기본 Content-Type
if (h.getContentType() == null) { if (h.getContentType() == null) {
h.setContentType(MediaType.APPLICATION_JSON); h.setContentType(MediaType.APPLICATION_JSON);
} }
// 호출부에서 Accept를 명시했으면 존중 // Accept 이미 있으면 존중
if (h.getAccept() != null && !h.getAccept().isEmpty()) { if (h.getAccept() != null && !h.getAccept().isEmpty()) {
return h; return h;
} }
// responseType Accept 자동 지정 // 응답 타입 Accept 자동 지정
if (responseType == byte[].class) { if (responseType == byte[].class) {
h.setAccept( h.setAccept(
List.of( List.of(
MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_OCTET_STREAM,
MediaType.valueOf("application/zip"), MediaType.valueOf("application/zip"),
MediaType.APPLICATION_JSON // 실패(JSON 에러 바디) 대비 MediaType.APPLICATION_JSON));
));
} else { } else {
h.setAccept(List.of(MediaType.APPLICATION_JSON)); h.setAccept(List.of(MediaType.APPLICATION_JSON));
} }
@@ -100,12 +131,15 @@ public class ExternalHttpClient {
return h; return h;
} }
/** JSON 응답 여부 체크 */
private boolean isJsonLike(MediaType ct) { private boolean isJsonLike(MediaType ct) {
if (ct == null) return false; if (ct == null) return false;
return ct.includes(MediaType.APPLICATION_JSON) return ct.includes(MediaType.APPLICATION_JSON)
|| "application/problem+json".equalsIgnoreCase(ct.toString()); || "application/problem+json".equalsIgnoreCase(ct.toString());
} }
/** 요청 바디 로그 */
private void logRequestBody(Object body) { private void logRequestBody(Object body) {
try { try {
if (body != null) { if (body != null) {

View File

@@ -4,6 +4,7 @@ import lombok.extern.log4j.Log4j2;
import org.springframework.boot.web.client.RestTemplateBuilder; import org.springframework.boot.web.client.RestTemplateBuilder;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.http.client.BufferingClientHttpRequestFactory; import org.springframework.http.client.BufferingClientHttpRequestFactory;
import org.springframework.http.client.SimpleClientHttpRequestFactory; import org.springframework.http.client.SimpleClientHttpRequestFactory;
import org.springframework.web.client.RestTemplate; import org.springframework.web.client.RestTemplate;
@@ -13,10 +14,20 @@ import org.springframework.web.client.RestTemplate;
public class RestTemplateConfig { public class RestTemplateConfig {
@Bean @Bean
@Primary
public RestTemplate restTemplate(RestTemplateBuilder builder) { public RestTemplate restTemplate(RestTemplateBuilder builder) {
return build(builder, 20000, 60000);
}
@Bean("restTemplateLong")
public RestTemplate restTemplateLong(RestTemplateBuilder builder) {
return build(builder, 20000, 60000);
}
private RestTemplate build(RestTemplateBuilder builder, int connectTimeoutMs, int readTimeoutMs) {
SimpleClientHttpRequestFactory baseFactory = new SimpleClientHttpRequestFactory(); SimpleClientHttpRequestFactory baseFactory = new SimpleClientHttpRequestFactory();
baseFactory.setConnectTimeout(2000); baseFactory.setConnectTimeout(connectTimeoutMs);
baseFactory.setReadTimeout(3000); baseFactory.setReadTimeout(readTimeoutMs);
RestTemplate rt = RestTemplate rt =
builder builder
@@ -24,10 +35,8 @@ public class RestTemplateConfig {
.additionalInterceptors(new RetryInterceptor()) .additionalInterceptors(new RetryInterceptor())
.build(); .build();
// byte[] 응답은 무조건 raw로 읽게 강제 (Jackson이 끼어들 여지 제거)
rt.getMessageConverters() rt.getMessageConverters()
.add(0, new org.springframework.http.converter.ByteArrayHttpMessageConverter()); .add(0, new org.springframework.http.converter.ByteArrayHttpMessageConverter());
return rt; return rt;
} }
} }

View File

@@ -1,4 +1,4 @@
package com.kamco.cd.kamcoback.config; package com.kamco.cd.kamcoback.config.swagger;
import io.swagger.v3.oas.annotations.enums.SecuritySchemeType; import io.swagger.v3.oas.annotations.enums.SecuritySchemeType;
import io.swagger.v3.oas.annotations.security.SecurityScheme; import io.swagger.v3.oas.annotations.security.SecurityScheme;

View File

@@ -0,0 +1,97 @@
package com.kamco.cd.kamcoback.config.swagger;
import jakarta.servlet.http.HttpServletRequest;
import java.nio.charset.StandardCharsets;
import org.springdoc.core.properties.SwaggerUiConfigProperties;
import org.springdoc.core.properties.SwaggerUiOAuthProperties;
import org.springdoc.core.providers.ObjectMapperProvider;
import org.springdoc.webmvc.ui.SwaggerIndexPageTransformer;
import org.springdoc.webmvc.ui.SwaggerIndexTransformer;
import org.springdoc.webmvc.ui.SwaggerWelcomeCommon;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.Profile;
import org.springframework.core.io.Resource;
import org.springframework.web.servlet.resource.ResourceTransformerChain;
import org.springframework.web.servlet.resource.TransformedResource;
@Profile({"local", "dev"})
@Configuration
public class SwaggerUiAutoAuthConfig {
@Bean
@Primary
public SwaggerIndexTransformer swaggerIndexTransformer(
SwaggerUiConfigProperties swaggerUiConfigProperties,
SwaggerUiOAuthProperties swaggerUiOAuthProperties,
SwaggerWelcomeCommon swaggerWelcomeCommon,
ObjectMapperProvider objectMapperProvider) {
SwaggerIndexPageTransformer delegate =
new SwaggerIndexPageTransformer(
swaggerUiConfigProperties,
swaggerUiOAuthProperties,
swaggerWelcomeCommon,
objectMapperProvider);
return new SwaggerIndexTransformer() {
private static final String TOKEN_KEY = "SWAGGER_ACCESS_TOKEN";
@Override
public Resource transform(
HttpServletRequest request, Resource resource, ResourceTransformerChain chain) {
try {
// 1) springdoc 기본 변환 먼저 적용
Resource transformed = delegate.transform(request, resource, chain);
String html =
new String(transformed.getInputStream().readAllBytes(), StandardCharsets.UTF_8);
String loginPathContains = "/api/auth/signin";
String inject =
"""
tagsSorter: (a, b) => {
const TOP = '인증(Auth)';
if (a === TOP && b !== TOP) return -1;
if (b === TOP && a !== TOP) return 1;
return a.localeCompare(b);
},
requestInterceptor: (req) => {
const token = localStorage.getItem('%s');
if (token) {
req.headers = req.headers || {};
req.headers['Authorization'] = 'Bearer ' + token;
}
return req;
},
responseInterceptor: async (res) => {
try {
const isLogin = (res?.url?.includes('%s') && res?.status === 200);
if (isLogin) {
const text = (typeof res.data === 'string') ? res.data : JSON.stringify(res.data);
const json = JSON.parse(text);
const token = json?.data?.accessToken;
if (token) {
localStorage.setItem('%s', token);
}
}
} catch (e) {}
return res;
},
"""
.formatted(TOKEN_KEY, loginPathContains, TOKEN_KEY);
html = html.replace("SwaggerUIBundle({", "SwaggerUIBundle({\n" + inject);
return new TransformedResource(transformed, html.getBytes(StandardCharsets.UTF_8));
} catch (Exception e) {
// 실패 시 원본 반환(문서 깨짐 방지)
return resource;
}
}
};
}
}

View File

@@ -11,11 +11,6 @@ import com.kamco.cd.kamcoback.gukyuin.dto.DetectMastDto.Basic;
import com.kamco.cd.kamcoback.gukyuin.dto.DetectMastDto.DetectMastReq; import com.kamco.cd.kamcoback.gukyuin.dto.DetectMastDto.DetectMastReq;
import com.kamco.cd.kamcoback.gukyuin.dto.GukYuinDto.GukYuinLinkableRes; import com.kamco.cd.kamcoback.gukyuin.dto.GukYuinDto.GukYuinLinkableRes;
import com.kamco.cd.kamcoback.gukyuin.service.GukYuinApiService; import com.kamco.cd.kamcoback.gukyuin.service.GukYuinApiService;
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiLabelJobService;
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiPnuJobService;
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiStatusJobService;
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiStbltJobService;
import io.swagger.v3.oas.annotations.Hidden;
import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content; import io.swagger.v3.oas.annotations.media.Content;
@@ -43,10 +38,6 @@ import org.springframework.web.bind.annotation.RestController;
public class GukYuinApiController { public class GukYuinApiController {
private final GukYuinApiService gukYuinApiService; private final GukYuinApiService gukYuinApiService;
private final GukYuinApiPnuJobService gukYuinApiPnuJobService;
private final GukYuinApiStatusJobService gukYuinApiStatusJobService;
private final GukYuinApiLabelJobService gukYuinApiLabelJobService;
private final GukYuinApiStbltJobService gukYuinApiStbltJobService;
/** 탐지결과 등록 */ /** 탐지결과 등록 */
@Operation(summary = "탐지결과 등록", description = "탐지결과 등록") @Operation(summary = "탐지결과 등록", description = "탐지결과 등록")
@@ -128,7 +119,7 @@ public class GukYuinApiController {
}) })
public ApiResponseDto<ChngDetectMastDto.ResultDto> selectChangeDetectionDtctIdList( public ApiResponseDto<ChngDetectMastDto.ResultDto> selectChangeDetectionDtctIdList(
@RequestParam(required = false) String chnDtctId) { @RequestParam(required = false) String chnDtctId) {
return ApiResponseDto.ok(gukYuinApiService.listChnDtctId(chnDtctId)); return ApiResponseDto.ok(gukYuinApiService.listChnDtctId(chnDtctId, ""));
} }
@Operation(summary = "탐지결과 등록목록 조회(1건 조회)", description = "탐지결과 등록목록 조회") @Operation(summary = "탐지결과 등록목록 조회(1건 조회)", description = "탐지결과 등록목록 조회")
@@ -192,7 +183,7 @@ public class GukYuinApiController {
@PathVariable String chnDtctId, @PathVariable String chnDtctId,
@RequestParam(defaultValue = "0") Integer pageIndex, @RequestParam(defaultValue = "0") Integer pageIndex,
@RequestParam(defaultValue = "10") Integer pageSize) { @RequestParam(defaultValue = "10") Integer pageSize) {
return ApiResponseDto.ok(gukYuinApiService.findChnContList(chnDtctId, pageIndex, pageSize)); return ApiResponseDto.ok(gukYuinApiService.findChnContList(chnDtctId, pageIndex, pageSize, ""));
} }
@Operation(summary = "탐지객체 조회 (탐지객체 1건 조회)", description = "탐지객체 조회 (탐지객체 1건 조회)") @Operation(summary = "탐지객체 조회 (탐지객체 1건 조회)", description = "탐지객체 조회 (탐지객체 1건 조회)")
@@ -272,7 +263,8 @@ public class GukYuinApiController {
@PostMapping("/rlb/objt/{chnDtctObjtId}/lbl/{lblYn}") @PostMapping("/rlb/objt/{chnDtctObjtId}/lbl/{lblYn}")
public ApiResponseDto<ChngDetectContDto.ResultLabelDto> updateChnDtctObjtLabelingYn( public ApiResponseDto<ChngDetectContDto.ResultLabelDto> updateChnDtctObjtLabelingYn(
@PathVariable String chnDtctObjtId, @PathVariable String lblYn) { @PathVariable String chnDtctObjtId, @PathVariable String lblYn) {
return ApiResponseDto.ok(gukYuinApiService.updateChnDtctObjtLabelingYn(chnDtctObjtId, lblYn)); return ApiResponseDto.ok(
gukYuinApiService.updateChnDtctObjtLabelingYn(chnDtctObjtId, lblYn, ""));
} }
@Operation(summary = "국유in연동 등록", description = "국유in연동 등록") @Operation(summary = "국유in연동 등록", description = "국유in연동 등록")
@@ -309,7 +301,7 @@ public class GukYuinApiController {
@PathVariable String chnDtctId, @PathVariable String chnDtctId,
@Parameter(description = "날짜(기본은 어제 날짜)") @RequestParam(defaultValue = "20260205") @Parameter(description = "날짜(기본은 어제 날짜)") @RequestParam(defaultValue = "20260205")
String yyyymmdd) { String yyyymmdd) {
return ApiResponseDto.ok(gukYuinApiService.findRlbDtctList(chnDtctId, yyyymmdd)); return ApiResponseDto.ok(gukYuinApiService.findRlbDtctList(chnDtctId, yyyymmdd, ""));
} }
@Operation(summary = "탐지객체 적합여부 조회 (객체별 조회)", description = "탐지객체 적합여부 조회 (객체별 조회)") @Operation(summary = "탐지객체 적합여부 조회 (객체별 조회)", description = "탐지객체 적합여부 조회 (객체별 조회)")
@@ -331,35 +323,22 @@ public class GukYuinApiController {
return ApiResponseDto.ok(gukYuinApiService.findRlbDtctObject(chnDtctObjtId)); return ApiResponseDto.ok(gukYuinApiService.findRlbDtctObject(chnDtctObjtId));
} }
@Hidden @Operation(summary = "실태조사 적합여부 업데이트", description = "실태조사 적합여부 업데이트")
@Operation(summary = "job test pnu", description = "job test pnu") @ApiResponses(
@GetMapping("/job-test/pnu") value = {
public ApiResponseDto<Void> findGukYuinContListPnuUpdate() { @ApiResponse(
gukYuinApiPnuJobService.findGukYuinContListPnuUpdate(); responseCode = "201",
return ApiResponseDto.ok(null); description = "등록 성공",
} content =
@Content(
@Hidden mediaType = "application/json",
@Operation(summary = "job test status", description = "job test status") schema = @Schema(implementation = DetectMastReq.class))),
@GetMapping("/job-test/status") @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
public ApiResponseDto<Void> findGukYuinMastCompleteYn() { @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
gukYuinApiStatusJobService.findGukYuinMastCompleteYn(); })
return ApiResponseDto.ok(null); @PostMapping("/rlb/objt/{chnDtctObjtId}/stblt/{stbltYn}")
} public ApiResponseDto<ChngDetectContDto.ResultLabelDto> updateStbltObjtYn(
@PathVariable String chnDtctObjtId, @PathVariable String stbltYn) {
@Hidden return ApiResponseDto.ok(gukYuinApiService.updateStbltObjtYn(chnDtctObjtId, stbltYn, ""));
@Operation(summary = "job test label", description = "job test label")
@GetMapping("/job-test/label")
public ApiResponseDto<Void> findLabelingCompleteSend() {
gukYuinApiLabelJobService.findLabelingCompleteSend();
return ApiResponseDto.ok(null);
}
@Hidden
@Operation(summary = "job test stblt", description = "job test stblt")
@GetMapping("/job-test/stblt")
public ApiResponseDto<Void> findGukYuinEligibleForSurvey() {
gukYuinApiStbltJobService.findGukYuinEligibleForSurvey();
return ApiResponseDto.ok(null);
} }
} }

View File

@@ -137,4 +137,15 @@ public class ChngDetectContDto {
private String reqIp; private String reqIp;
private String reqEpno; private String reqEpno;
} }
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class StbltResult {
private String stbltYn;
private String incyCd;
private String incyCmnt;
}
} }

View File

@@ -83,7 +83,7 @@ public class ChngDetectMastDto {
@Schema( @Schema(
description = "탐지결과 절대경로명 /kamco_nas/export/{chnDtctId}", description = "탐지결과 절대경로명 /kamco_nas/export/{chnDtctId}",
example = "/kamco-nfs/dataset/export/D5F192EC76D34F6592035BE63A84F591") example = "{file.nfs}/dataset/export/D5F192EC76D34F6592035BE63A84F591")
private String pathNm; private String pathNm;
@Schema(description = "사원번호", example = "123456") @Schema(description = "사원번호", example = "123456")
@@ -247,45 +247,45 @@ public class ChngDetectMastDto {
@AllArgsConstructor @AllArgsConstructor
public static class RlbDtctMastDto { public static class RlbDtctMastDto {
private String pnuDtctId; private String pnuDtctId; // PNU탐지ID
private String pnu; private String pnu; // PNU코드(19자리)
private String lrmSyncYmd; private String lrmSyncYmd; // 지적도동기화일자(YYYYMMDD)
private String pnuSyncYmd; private String pnuSyncYmd; // PNU동기화일자(YYYYMMDD)
private String mpqdNo; // 도번호 private String mpqdNo; // 도번호
private String cprsYr; // 비교년도 private String cprsYr; // 비교년도
private String crtrYr; // 기준년도 private String crtrYr; // 기준년도
private String chnDtctSno; // 회차 private String chnDtctSno; // 회차, 변화탐지순번
private String chnDtctId; private String chnDtctId; // 변화탐지ID(UUID)
private String chnDtctMstId; private String chnDtctMstId; // 변화탐지마스터ID
private String chnDtctObjtId; private String chnDtctObjtId; // 변화탐지객체ID
private String chnDtctContId; private String chnDtctContId; // 변화탐지내용ID
private String chnCd; private String chnCd; // 변화코드
private String chnDtctProb; private String chnDtctProb; // 변화탐지정확도(0~1)
private String bfClsCd; // 이전분류코드 private String bfClsCd; // 이전분류코드
private String bfClsProb; // 이전분류정확도 private String bfClsProb; // 이전분류정확도(0~1)
private String afClsCd; // 이후분류코드 private String afClsCd; // 이후분류코드
private String afClsProb; // 이후분류정확도 private String afClsProb; // 이후분류정확도(0~1)
private String pnuSqms; private String pnuSqms; // PNU면적(㎡)
private String pnuDtctSqms; private String pnuDtctSqms; // PNU탐지면적(㎡)
private String chnDtctSqms; private String chnDtctSqms; // 변화탐지면적(㎡)
private String stbltYn; private String stbltYn; // 적합여부(Y/N) - 안정성 (Y:부적합, N:적합)
private String incyCd; private String incyCd; // 부적합코드
private String incyRsnCont; private String incyRsnCont; // 부적합사유내용
private String lockYn; private String lockYn; // 잠금여부(Y/N)
private String lblYn; private String lblYn; // 라벨여부(Y/N)
private String chgYn; private String chgYn; // 변경여부(Y/N)
private String rsatctNo; private String rsatctNo; // 부동산등기번호
private String rmk; private String rmk; // 비고
private String crtDt; // 생성일시 private String crtDt; // 생성일시
private String crtEpno; // 생성사원번호 private String crtEpno; // 생성사원번호
private String crtIp; // 생성사원아이피 private String crtIp; // 생성사원아이피
private String chgDt; private String chgDt; // 변경일시
private String chgEpno; private String chgEpno; // 변경자사번
private String chgIp; private String chgIp; // 변경자IP
private String delYn; // 삭제여부 private String delYn; // 삭제여부
} }

View File

@@ -14,9 +14,9 @@ public class GukYuinDto {
public enum GukYuinLinkFailCode implements EnumType { public enum GukYuinLinkFailCode implements EnumType {
OK("연동 가능"), OK("연동 가능"),
NOT_FOUND("대상 회차가 없습니다."), NOT_FOUND("대상 회차가 없습니다."),
SCOPE_PART_NOT_ALLOWED("부분 도엽은 연동 불가능 합니다."), SCOPE_PART_NOT_ALLOWED("부분 도엽 추론 결과는 연동 할 수 없습니다."),
HAS_RUNNING_INFERENCE("라벨링 진행 중 회차가 있습니다."), HAS_RUNNING_INFERENCE("라벨링 진행중 회차가 있습니다.\n진행중인 라벨링 작업을 종료하신 후 다시 연동해주세요."),
OTHER_GUKYUIN_IN_PROGRESS("국유in 연동 진행 중 회차가 있습니다."); OTHER_GUKYUIN_IN_PROGRESS("국유in 연동 진행중입니다. 선행 연동 작업이 종료된 후 진행할 수 있습니다.");
private final String desc; private final String desc;
@@ -36,8 +36,9 @@ public class GukYuinDto {
public static class GukYuinLinkableRes { public static class GukYuinLinkableRes {
private boolean linkable; private boolean linkable;
// private GukYuinLinkFailCode code; private GukYuinLinkFailCode code;
private String message; private String message;
private UUID inferenceUuid;
} }
// Repository가 반환할 Fact(조회 결과) // Repository가 반환할 Fact(조회 결과)
@@ -45,7 +46,8 @@ public class GukYuinDto {
boolean existsLearn, boolean existsLearn,
boolean isPartScope, boolean isPartScope,
boolean hasRunningInference, boolean hasRunningInference,
boolean hasOtherUnfinishedGukYuin) {} boolean hasOtherUnfinishedGukYuin,
UUID inferenceUuid) {}
@Getter @Getter
@Setter @Setter

View File

@@ -66,6 +66,12 @@ public class GukYuinApiService {
@Value("${gukyuin.cdi}") @Value("${gukyuin.cdi}")
private String gukyuinCdiUrl; private String gukyuinCdiUrl;
@Value("${file.nfs}")
private String nfs;
@Value("${file.output-dir}") // 국유인 반영 파일 경로
private String outputDir;
@Value("${file.dataset-dir}") @Value("${file.dataset-dir}")
private String datasetDir; private String datasetDir;
@@ -231,9 +237,12 @@ public class GukYuinApiService {
GukYuinLinkFailCode code = decideCode(f); GukYuinLinkFailCode code = decideCode(f);
GukYuinLinkableRes res = new GukYuinLinkableRes(); GukYuinLinkableRes res = new GukYuinLinkableRes();
// res.setCode(code); res.setCode(code);
res.setLinkable(code == GukYuinLinkFailCode.OK); res.setLinkable(code == GukYuinLinkFailCode.OK);
res.setMessage(code.getDesc()); res.setMessage(code.getDesc());
if (code == GukYuinLinkFailCode.HAS_RUNNING_INFERENCE) {
res.setInferenceUuid(f.inferenceUuid());
}
return res; return res;
} }
@@ -243,9 +252,9 @@ public class GukYuinApiService {
return GukYuinLinkFailCode.NOT_FOUND; return GukYuinLinkFailCode.NOT_FOUND;
} }
if (f.isPartScope()) { // if (f.isPartScope()) {
return GukYuinLinkFailCode.SCOPE_PART_NOT_ALLOWED; // return GukYuinLinkFailCode.SCOPE_PART_NOT_ALLOWED;
} // }
if (f.hasRunningInference()) { if (f.hasRunningInference()) {
return GukYuinLinkFailCode.HAS_RUNNING_INFERENCE; return GukYuinLinkFailCode.HAS_RUNNING_INFERENCE;
@@ -259,7 +268,8 @@ public class GukYuinApiService {
} }
// 탐지객체 리스트 조회 // 탐지객체 리스트 조회
public ResultContDto findChnContList(String chnDtctId, Integer pageIndex, Integer pageSize) { public ResultContDto findChnContList(
String chnDtctId, Integer pageIndex, Integer pageSize, String batchYn) {
String url = String url =
gukyuinCdiUrl gukyuinCdiUrl
@@ -272,8 +282,9 @@ public class GukYuinApiService {
+ "&reqIp=" + "&reqIp="
+ myip + myip
+ "&reqEpno=" + "&reqEpno="
+ userUtil.getEmployeeNo(); + ("Y".equals(batchYn) ? "BATCH" : userUtil.getEmployeeNo());
log.info("##### API 호출 URL : {}", url);
ExternalCallResult<ChngDetectContDto.ResultContDto> result = ExternalCallResult<ChngDetectContDto.ResultContDto> result =
externalHttpClient.call( externalHttpClient.call(
url, url,
@@ -282,6 +293,7 @@ public class GukYuinApiService {
netUtils.jsonHeaders(), netUtils.jsonHeaders(),
ChngDetectContDto.ResultContDto.class); ChngDetectContDto.ResultContDto.class);
log.info("##### API 호출 완료 : {}", result.toString());
List<ContBasic> contList = result.body().getResult(); List<ContBasic> contList = result.body().getResult();
if (contList == null || contList.isEmpty()) { if (contList == null || contList.isEmpty()) {
return new ResultContDto( return new ResultContDto(
@@ -334,13 +346,14 @@ public class GukYuinApiService {
} }
public ChngDetectContDto.ResultLabelDto updateChnDtctObjtLabelingYn( public ChngDetectContDto.ResultLabelDto updateChnDtctObjtLabelingYn(
String chnDtctObjtId, String lblYn) { String chnDtctObjtId, String lblYn, String batchYn) {
String url = gukyuinCdiUrl + "/rlb/objt/" + chnDtctObjtId + "/lbl/" + lblYn; String url = gukyuinCdiUrl + "/rlb/objt/" + chnDtctObjtId + "/lbl/" + lblYn;
ReqInfo info = new ReqInfo(); ReqInfo info = new ReqInfo();
info.setReqIp(myip); info.setReqIp(myip);
info.setReqEpno(userUtil.getEmployeeNo()); info.setReqEpno("Y".equals(batchYn) ? "BATCH" : userUtil.getEmployeeNo());
log.info("##### API 호출 URL : {}", url);
ExternalCallResult<ChngDetectContDto.ResultLabelDto> result = ExternalCallResult<ChngDetectContDto.ResultLabelDto> result =
externalHttpClient.call( externalHttpClient.call(
url, url,
@@ -348,6 +361,7 @@ public class GukYuinApiService {
info, info,
netUtils.jsonHeaders(), netUtils.jsonHeaders(),
ChngDetectContDto.ResultLabelDto.class); ChngDetectContDto.ResultLabelDto.class);
log.info("##### API 호출 완료 : {}", result.toString());
this.insertGukyuinAuditLog( this.insertGukyuinAuditLog(
EventType.MODIFIED.getId(), EventType.MODIFIED.getId(),
@@ -391,7 +405,7 @@ public class GukYuinApiService {
return result.body(); return result.body();
} }
public ResultDto listChnDtctId(String chnDtctId) { public ResultDto listChnDtctId(String chnDtctId, String batchYn) {
String url = String url =
gukyuinCdiUrl gukyuinCdiUrl
+ "/chn/mast/" + "/chn/mast/"
@@ -399,12 +413,14 @@ public class GukYuinApiService {
+ "?reqIp=" + "?reqIp="
+ myip + myip
+ "&reqEpno=" + "&reqEpno="
+ userUtil.getEmployeeNo(); + ("Y".equals(batchYn) ? "BATCH" : userUtil.getEmployeeNo());
log.info("##### API 호출 URL : {}", url);
ExternalCallResult<ChngDetectMastDto.ResultDto> result = ExternalCallResult<ChngDetectMastDto.ResultDto> result =
externalHttpClient.call( externalHttpClient.call(
url, HttpMethod.GET, null, netUtils.jsonHeaders(), ChngDetectMastDto.ResultDto.class); url, HttpMethod.GET, null, netUtils.jsonHeaders(), ChngDetectMastDto.ResultDto.class);
log.info("##### API 호출 완료 : {}", result.toString());
this.insertGukyuinAuditLog( this.insertGukyuinAuditLog(
EventType.DETAIL.getId(), EventType.DETAIL.getId(),
netUtils.getLocalIP(), netUtils.getLocalIP(),
@@ -452,10 +468,36 @@ public class GukYuinApiService {
return new ResponseObj(ApiResponseCode.DUPLICATE_DATA, "이미 국유인 연동을 한 회차입니다."); return new ResponseObj(ApiResponseCode.DUPLICATE_DATA, "이미 국유인 연동을 한 회차입니다.");
} }
// 추론 shp 파일 생성되는 위치
log.info(
"═════════════════════════════════[s] datasetDir : 추론 shp파일 생성되는 위치 ══════════════════════════════════");
log.info("datasetDir path : " + datasetDir + info.getUid());
log.info(
"═════════════════════════════════[e] datasetDir : 추론 shp파일 생성되는 위치 ════════════════════════════");
if (!Files.isDirectory(Path.of(datasetDir + info.getUid()))) {
return new ResponseObj(
ApiResponseCode.NOT_FOUND_DATA, "파일 경로에 회차 실행 파일이 생성되지 않았습니다. 확인 부탁드립니다.");
}
// 비교년도,기준년도로 전송한 데이터 있는지 확인 후 회차 번호 생성 // 비교년도,기준년도로 전송한 데이터 있는지 확인 후 회차 번호 생성
Integer maxStage = Integer maxStage =
gukyuinCoreService.findMapSheetLearnYearStage(info.getCompareYyyy(), info.getTargetYyyy()); gukyuinCoreService.findMapSheetLearnYearStage(info.getCompareYyyy(), info.getTargetYyyy());
// reqDto 셋팅
// 마운트된 추론 shp 파일 생성되는 위치
log.info(
"═════════════════════════════════[s] outputDir : 마운트된 추론 shp 파일 생성되는 위치 ══════════════════════════════════");
log.info("outputDir path : " + outputDir + info.getUid());
log.info(
"═════════════════════════════════[e] outputDir : 마운트된 추론 shp 파일 생성되는 위치 ════════════════════════════");
String kamconfsDatasetExportPathfsDatasetExportPath = outputDir;
ChnDetectMastReqDto reqDto = new ChnDetectMastReqDto();
reqDto.setCprsYr(String.valueOf(info.getCompareYyyy()));
reqDto.setCrtrYr(String.valueOf(info.getTargetYyyy()));
reqDto.setChnDtctSno(String.valueOf(maxStage + 1));
reqDto.setChnDtctId(info.getUid());
reqDto.setPathNm(kamconfsDatasetExportPathfsDatasetExportPath + info.getUid());
// 1회차를 종료 상태로 처리하고 2회차를 보내야 함 // 1회차를 종료 상태로 처리하고 2회차를 보내야 함
// 추론(learn), 학습데이터(inference) 둘 다 종료 처리 // 추론(learn), 학습데이터(inference) 둘 다 종료 처리
if (maxStage > 0) { if (maxStage > 0) {
@@ -466,19 +508,6 @@ public class GukYuinApiService {
gukyuinCoreService.updateMapSheetInferenceLabelEndStatus(learnId); gukyuinCoreService.updateMapSheetInferenceLabelEndStatus(learnId);
} }
// reqDto 셋팅
ChnDetectMastReqDto reqDto = new ChnDetectMastReqDto();
reqDto.setCprsYr(String.valueOf(info.getCompareYyyy()));
reqDto.setCrtrYr(String.valueOf(info.getTargetYyyy()));
reqDto.setChnDtctSno(String.valueOf(maxStage + 1));
reqDto.setChnDtctId(info.getUid());
reqDto.setPathNm("/kamco-nfs/dataset/export/" + info.getUid());
if (!Files.isDirectory(Path.of("/kamco-nfs/dataset/export/" + info.getUid()))) {
return new ResponseObj(
ApiResponseCode.NOT_FOUND_DATA, "파일 경로에 회차 실행 파일이 생성되지 않았습니다. 확인 부탁드립니다.");
}
// 국유인 /chn/mast/regist 전송 // 국유인 /chn/mast/regist 전송
ChngDetectMastDto.RegistResDto result = this.regist(reqDto); ChngDetectMastDto.RegistResDto result = this.regist(reqDto);
if (result.getSuccess()) { if (result.getSuccess()) {
@@ -528,7 +557,8 @@ public class GukYuinApiService {
return result.body(); return result.body();
} }
public ChngDetectMastDto.RlbDtctDto findRlbDtctList(String chnDtctId, String yyyymmdd) { public ChngDetectMastDto.RlbDtctDto findRlbDtctList(
String chnDtctId, String yyyymmdd, String batchYn) {
String url = String url =
gukyuinCdiUrl gukyuinCdiUrl
@@ -537,14 +567,16 @@ public class GukYuinApiService {
+ "?reqIp=" + "?reqIp="
+ myip + myip
+ "&reqEpno=" + "&reqEpno="
+ userUtil.getEmployeeNo() + ("Y".equals(batchYn) ? "BATCH" : userUtil.getEmployeeNo())
+ "&yyyymmdd=" + "&yyyymmdd="
+ yyyymmdd; + yyyymmdd;
log.info("##### API 호출 URL : {}", url);
ExternalCallResult<ChngDetectMastDto.RlbDtctDto> result = ExternalCallResult<ChngDetectMastDto.RlbDtctDto> result =
externalHttpClient.call( externalHttpClient.call(
url, HttpMethod.GET, null, netUtils.jsonHeaders(), ChngDetectMastDto.RlbDtctDto.class); url, HttpMethod.GET, null, netUtils.jsonHeaders(), ChngDetectMastDto.RlbDtctDto.class);
log.info("##### API 호출 완료 : {}", result.toString());
this.insertGukyuinAuditLog( this.insertGukyuinAuditLog(
EventType.LIST.getId(), EventType.LIST.getId(),
netUtils.getLocalIP(), netUtils.getLocalIP(),
@@ -578,4 +610,45 @@ public class GukYuinApiService {
result.body() != null && result.body().getSuccess()); result.body() != null && result.body().getSuccess());
return result.body(); return result.body();
} }
public ChngDetectContDto.ResultLabelDto updateStbltObjtYn(
String chnDtctObjtId, String stbltYn, String batchYn) {
String url = gukyuinCdiUrl + "/rlb/objt/" + chnDtctObjtId + "/stblt/" + stbltYn;
ReqInfo info = new ReqInfo();
info.setReqIp(myip);
info.setReqEpno("Y".equals(batchYn) ? "BATCH" : userUtil.getEmployeeNo());
ExternalCallResult<ChngDetectContDto.ResultLabelDto> result =
externalHttpClient.call(
url,
HttpMethod.POST,
info,
netUtils.jsonHeaders(),
ChngDetectContDto.ResultLabelDto.class);
this.insertGukyuinAuditLog(
EventType.MODIFIED.getId(),
netUtils.getLocalIP(),
userUtil.getId(),
url.replace(gukyuinUrl, ""),
null,
result.body().getSuccess());
return result.body();
}
public void stbltBulkUpdate(List<String> objectIds) {
for (String objectId : objectIds) {
this.updateStbltObjtYn(objectId, "Y", "");
}
}
public List<String> findStbltObjectIds(String uid, String mapSheetNum) {
return gukyuinCoreService.findStbltObjectIds(uid, mapSheetNum);
}
public Integer updateStbltRandomData(String uid, int updateCnt) {
return gukyuinCoreService.updateStbltRandomData(uid, updateCnt);
}
} }

View File

@@ -0,0 +1,43 @@
package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.service.InferenceManualService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "추론결과 데이터 생성", description = "추론결과 데이터 생성 API")
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/inference/manual")
public class InferenceManualApiController {
private final InferenceManualService inferenceManualService;
@Operation(summary = "추론 결과로 추론 목록 및 shp 생성", description = "추론 결과로 추론 목록 및 shp 생성")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "데이터 저장 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(implementation = InferenceResultShpDto.InferenceCntDto.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/save")
public void saveTesting(List<Long> batchIds) {
inferenceManualService.saveResultsTesting(batchIds);
}
}

View File

@@ -1,5 +1,6 @@
package com.kamco.cd.kamcoback.inference; package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.common.download.RangeDownloadResponder;
import com.kamco.cd.kamcoback.common.exception.CustomApiException; import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
@@ -7,6 +8,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
import com.kamco.cd.kamcoback.inference.service.InferenceAsyncService;
import com.kamco.cd.kamcoback.inference.service.InferenceResultService; import com.kamco.cd.kamcoback.inference.service.InferenceResultService;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto; import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto.DownloadReq; import com.kamco.cd.kamcoback.log.dto.AuditLogDto.DownloadReq;
@@ -23,6 +25,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag; import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.validation.Valid; import jakarta.validation.Valid;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
@@ -32,11 +35,9 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.core.io.FileSystemResource; import lombok.extern.log4j.Log4j2;
import org.springframework.core.io.Resource;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.GetMapping;
@@ -48,16 +49,20 @@ import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.RestController;
@Tag(name = "추론관리", description = "추론관리 API") @Tag(name = "추론관리", description = "추론관리 API")
@Log4j2
@RequestMapping("/api/inference") @RequestMapping("/api/inference")
@RequiredArgsConstructor @RequiredArgsConstructor
@RestController @RestController
public class InferenceResultApiController { public class InferenceResultApiController {
private final InferenceResultService inferenceResultService; private final InferenceResultService inferenceResultService;
private final InferenceAsyncService inferenceAsyncService;
private final MapSheetMngService mapSheetMngService; private final MapSheetMngService mapSheetMngService;
private final ModelMngService modelMngService; private final ModelMngService modelMngService;
private final RangeDownloadResponder rangeDownloadResponder;
@Operation(summary = "추론관리 목록", description = "어드민 홈 > 추론관리 > 추론관리 > 추론관리 목록") /** 추론관리 목록 화면에서 호출 */
@Operation(summary = "추론관리 목록", description = "추론관리 > 추론관리 목록 ")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -89,7 +94,8 @@ public class InferenceResultApiController {
return ApiResponseDto.ok(analResList); return ApiResponseDto.ok(analResList);
} }
@Operation(summary = "추론 진행 여부 확인", description = "어드민 홈 > 추론관리 > 추론관리 > 추론관리 목록") /** 추론관리 목록 화면에서 호출 */
@Operation(summary = "추론 진행 여부 확인", description = "추론관리 > 추론관리 목록")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -111,7 +117,8 @@ public class InferenceResultApiController {
return ApiResponseDto.ok(inferenceResultService.getProcessing()); return ApiResponseDto.ok(inferenceResultService.getProcessing());
} }
@Operation(summary = "년도 목록 조회", description = "어드민 홈 > 추론관리 > 추론목록 > 변화탐지 실행 정보 입력 > 년도 목록 조회") /** 추론관리 목록 화면에서 호출 */
@Operation(summary = "년도 목록 조회", description = "추론관리 > 추론목록 > 변화탐지 실행 정보 입력 > 년도 목록 조회")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -129,7 +136,8 @@ public class InferenceResultApiController {
return ApiResponseDto.ok(mapSheetMngService.findMapSheetMngDoneYyyyList()); return ApiResponseDto.ok(mapSheetMngService.findMapSheetMngDoneYyyyList());
} }
@Operation(summary = "변화탐지 실행 정보 입력", description = "어드민 홈 > 추론관리 > 추론목록 > 변화탐지 실행 정보 입력") /** 변화탐지 실행 정보 입력화면에서 호출 */
@Operation(summary = "변화탐지 실행 정보 입력, 추론실행", description = "추론관리 > 추론목록 > 변화탐지 실행 정보 입력")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -150,11 +158,12 @@ public class InferenceResultApiController {
@RequestBody @RequestBody
@Valid @Valid
InferenceResultDto.RegReq req) { InferenceResultDto.RegReq req) {
UUID uuid = inferenceResultService.saveInferenceInfo(req); UUID uuid = inferenceResultService.run(req);
return ApiResponseDto.ok(uuid); return ApiResponseDto.ok(uuid);
} }
@Operation(summary = "추론 종료", description = "추론 종료") /** 추론진행 현황 화면에서 호출 */
@Operation(summary = "추론 종료", description = "추론관리 > 추론목록 > 추론진행 현황")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -169,11 +178,13 @@ public class InferenceResultApiController {
}) })
@DeleteMapping("/end") @DeleteMapping("/end")
public ApiResponseDto<UUID> getInferenceGeomList() { public ApiResponseDto<UUID> getInferenceGeomList() {
UUID uuid = inferenceResultService.deleteInferenceEnd(); // UUID uuid = inferenceResultService.deleteInferenceEnd();
UUID uuid = inferenceAsyncService.asyncInferenceEnd();
return ApiResponseDto.ok(uuid); return ApiResponseDto.ok(uuid);
} }
@Operation(summary = "분석 모델 선택 조회", description = "변화탐지 실행 정보 입력 모델선택 팝업 ") /** 변화탐지 실행 정보 입력화면에서 호출 */
@Operation(summary = "분석 모델 선택 조회", description = "추론관리 > 추론목록 > 변화탐지 실행 정보 입력 > 모델선택 팝업 ")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -194,7 +205,7 @@ public class InferenceResultApiController {
LocalDate endDttm, LocalDate endDttm,
@Parameter(description = "키워드 (모델버전)", example = "M1.H1.E28") @RequestParam(required = false) @Parameter(description = "키워드 (모델버전)", example = "M1.H1.E28") @RequestParam(required = false)
String searchVal, String searchVal,
@Parameter(description = "타입", example = "M1") @RequestParam(required = false) @Parameter(description = "타입", example = "G1") @RequestParam(required = false)
String modelType, String modelType,
@RequestParam(defaultValue = "0") int page, @RequestParam(defaultValue = "0") int page,
@RequestParam(defaultValue = "20") int size) { @RequestParam(defaultValue = "20") int size) {
@@ -204,7 +215,8 @@ public class InferenceResultApiController {
return ApiResponseDto.ok(result); return ApiResponseDto.ok(result);
} }
@Operation(summary = "추론관리 추론진행 서버 현황", description = "추론관리 추론진행 서버 현황") /** 추론진행 현황 화면에서 호출 */
@Operation(summary = "추론관리 추론진행 서버 현황", description = "추론관리 > 추론목록 > 추론진행 현황")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -223,7 +235,8 @@ public class InferenceResultApiController {
return ApiResponseDto.ok(inferenceResultService.getInferenceServerStatusList()); return ApiResponseDto.ok(inferenceResultService.getInferenceServerStatusList());
} }
@Operation(summary = "추론관리 진행현황 상세", description = "어드민 홈 > 추론관리 > 추론관리 > 진행현황 상세") /** 추론진행 현황 화면에서 호출 */
@Operation(summary = "추론관리 진행현황 상세", description = "추론관리 > 추론진행 현황")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -247,7 +260,8 @@ public class InferenceResultApiController {
return ApiResponseDto.ok(inferenceResultService.getInferenceStatus(uuid)); return ApiResponseDto.ok(inferenceResultService.getInferenceStatus(uuid));
} }
@Operation(summary = "추론결과 기본정보", description = "추론결과 기본정보") /** 추론결과 화면에서 호출 */
@Operation(summary = "추론결과 기본정보", description = "추론관리 > 추론결과")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -268,7 +282,8 @@ public class InferenceResultApiController {
return ApiResponseDto.ok(inferenceResultService.getInferenceResultInfo(uuid)); return ApiResponseDto.ok(inferenceResultService.getInferenceResultInfo(uuid));
} }
@Operation(summary = "추론결과 분류별 탐지 건수", description = "추론결과 분류별 탐지 건수") /** 추론결과 화면에서 호출 */
@Operation(summary = "추론결과 분류별 탐지 건수", description = "추론관리 > 추론결과")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -289,6 +304,7 @@ public class InferenceResultApiController {
return ApiResponseDto.ok(inferenceResultService.getInferenceClassCountList(uuid)); return ApiResponseDto.ok(inferenceResultService.getInferenceClassCountList(uuid));
} }
/** 추론결과 화면에서 호출 */
@Operation(summary = "추론관리 분석결과 상세 목록", description = "추론관리 분석결과 상세 목록 geojson 데이터 조회") @Operation(summary = "추론관리 분석결과 상세 목록", description = "추론관리 분석결과 상세 목록 geojson 데이터 조회")
@ApiResponses( @ApiResponses(
value = { value = {
@@ -328,12 +344,14 @@ public class InferenceResultApiController {
return ApiResponseDto.ok(geomList); return ApiResponseDto.ok(geomList);
} }
/** 추론결과 화면에서 호출 */
/** 다운로드는 a 링크로 받는걸로 변경되어 사번을 파라미터로 받아서 로그에 저장하는걸로 변경함 */
@Operation(summary = "shp 파일 다운로드", description = "추론관리 분석결과 shp 파일 다운로드") @Operation(summary = "shp 파일 다운로드", description = "추론관리 분석결과 shp 파일 다운로드")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
responseCode = "200", responseCode = "200",
description = "shp zip파일 다운로드", description = "shp 파일 다운로드",
content = content =
@Content( @Content(
mediaType = "application/octet-stream", mediaType = "application/octet-stream",
@@ -341,16 +359,17 @@ public class InferenceResultApiController {
@ApiResponse(responseCode = "404", description = "파일 없음", content = @Content), @ApiResponse(responseCode = "404", description = "파일 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
}) })
@GetMapping(value = "/download/{uuid}", produces = MediaType.APPLICATION_OCTET_STREAM_VALUE) @GetMapping("/download/{uuid}")
public ResponseEntity<Resource> downloadShp( public ResponseEntity<?> download(
@Parameter(description = "uuid", example = "0192efc6-9ec2-43ee-9a90-5b73e763c09f") @PathVariable UUID uuid,
@PathVariable @Parameter(description = "사번", example = "123456") @RequestParam String employeeNo,
UUID uuid) HttpServletRequest request)
throws IOException { throws IOException {
String path; String path;
String uid; String uid;
try { try {
// 추론결과 shp zip 파일 확인하여 다운로드 경로 생성
Map<String, Object> map = inferenceResultService.shpDownloadPath(uuid); Map<String, Object> map = inferenceResultService.shpDownloadPath(uuid);
path = String.valueOf(map.get("path")); path = String.valueOf(map.get("path"));
uid = String.valueOf(map.get("uid")); uid = String.valueOf(map.get("uid"));
@@ -359,25 +378,15 @@ public class InferenceResultApiController {
} }
Path zipPath = Path.of(path); Path zipPath = Path.of(path);
if (!Files.isRegularFile(zipPath)) {
if (!Files.exists(zipPath) || !Files.isReadable(zipPath)) { return ResponseEntity.status(HttpStatus.NOT_FOUND).body("다운로드 받을 파일이 없습니다.");
return ResponseEntity.notFound().build();
} }
FileSystemResource resource = new FileSystemResource(zipPath); return rangeDownloadResponder.buildZipResponse(zipPath, uid + ".zip", request);
String filename = uid + ".zip";
long fileSize = Files.size(zipPath);
return ResponseEntity.ok()
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\"")
.contentLength(fileSize)
.body(resource);
} }
@Operation(summary = "shp 파일 다운로드 이력", description = "추론관리 분석결과 shp 파일 다운로드 이력") /** 추론결과 화면에서 호출 */
@Operation(summary = "shp 파일 다운로드 이력 조회", description = "추론관리 분석결과 shp 파일 다운로드 이력 조회")
@GetMapping(value = "/download-audit/{uuid}") @GetMapping(value = "/download-audit/{uuid}")
@ApiResponses( @ApiResponses(
value = { value = {
@@ -392,19 +401,20 @@ public class InferenceResultApiController {
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
}) })
public ApiResponseDto<Page<AuditLogDto.DownloadRes>> downloadAudit( public ApiResponseDto<Page<AuditLogDto.DownloadRes>> downloadAudit(
@Parameter(description = "UUID", example = "0192efc6-9ec2-43ee-9a90-5b73e763c09f") @Parameter(description = "UUID", example = "69c4e56c-e0bf-4742-9225-bba9aae39052")
@PathVariable @PathVariable
UUID uuid, UUID uuid,
@Parameter(description = "다운로드일 시작", example = "2025-01-01") @RequestParam(required = false) @Parameter(description = "다운로드일 시작", example = "2025-01-01") @RequestParam(required = false)
LocalDate strtDttm, LocalDate strtDttm,
@Parameter(description = "다운로드일 종료", example = "2026-01-01") @RequestParam(required = false) @Parameter(description = "다운로드일 종료", example = "2026-04-01") @RequestParam(required = false)
LocalDate endDttm, LocalDate endDttm,
@Parameter(description = "키워드", example = "관리자") @RequestParam(required = false) @Parameter(description = "키워드", example = "") @RequestParam(required = false)
String searchValue, String searchValue,
@Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0") @Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0")
int page, int page,
@Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20") @Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20")
int size) { int size) {
AuditLogDto.searchReq searchReq = new searchReq(); AuditLogDto.searchReq searchReq = new searchReq();
searchReq.setPage(page); searchReq.setPage(page);
searchReq.setSize(size); searchReq.setSize(size);
@@ -413,13 +423,13 @@ public class InferenceResultApiController {
downloadReq.setStartDate(strtDttm); downloadReq.setStartDate(strtDttm);
downloadReq.setEndDate(endDttm); downloadReq.setEndDate(endDttm);
downloadReq.setSearchValue(searchValue); downloadReq.setSearchValue(searchValue);
downloadReq.setMenuId("22"); downloadReq.setRequestUri("/api/inference/download/" + uuid);
downloadReq.setRequestUri("/api/inference/download-audit");
return ApiResponseDto.ok(inferenceResultService.getDownloadAudit(searchReq, downloadReq)); return ApiResponseDto.ok(inferenceResultService.getDownloadAudit(searchReq, downloadReq));
} }
@Operation(summary = "추론 실행중인 도엽 목록", description = "추론관리 실행중인 도엽명 5k 목록") /** 추론진행 현황 화면에서 호출, 분석도엽 부분 옵션일때 분석중인 도엽 확인용 */
@Operation(summary = "추론관리 분석중인 도엽명 5k 목록", description = "추론관리 분석중인 도엽명 50k 목록")
@ApiResponses({ @ApiResponses({
@ApiResponse( @ApiResponse(
responseCode = "200", responseCode = "200",

View File

@@ -1,90 +0,0 @@
package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.Scene;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "추론결과 데이터 생성", description = "추론결과 데이터 생성 API")
@Log4j2
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/inference/shp")
public class InferenceResultShpApiController {
private final InferenceResultShpService inferenceResultShpService;
public static final String MAP_ID =
"{ \"mapIds\": [\"37716096\",\"37716095\",\"37716094\",\"37716091\",\"37716086\",\"37716085\",\"37716084\",\"37716083\",\"37716076\",\"37716066\",\"37716065\",\"37716064\",\"37716063\",\"37716061\",\"37716051\",\"37716011\"] }";
@Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "데이터 저장 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(implementation = InferenceResultShpDto.InferenceCntDto.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/save/{learnId}")
public ApiResponseDto<InferenceResultShpDto.InferenceCntDto> saveInferenceData(
@PathVariable Long learnId) {
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData(learnId));
}
@Operation(summary = "추론결과 shp 생성", description = "추론결과 shp 생성")
@PostMapping("/shp/{uuid}")
public ApiResponseDto<Void> createShp(
@Parameter(example = "feb2ec0b-a0f7-49ca-95e4-98b2231bdaae") @PathVariable UUID uuid) {
inferenceResultShpService.createShp(uuid);
return ApiResponseDto.createOK(null);
}
@Operation(summary = "추론실행에 필요한 geojson 파일 생성", description = "추론실행에 필요한 geojson 파일 생성")
@PostMapping("/geojson/{yyyy}/{mapSheetScope}/{detectOption}")
public ApiResponseDto<Scene> createGeojson(
@Schema(description = "년도") @PathVariable String yyyy,
@Schema(description = "전체(ALL),부분(PART)", example = "PART") @PathVariable
String mapSheetScope,
@Schema(description = "추론제외(EXCL),이전 년도 도엽 사용(PREV)", example = "EXCL") @PathVariable
String detectOption,
@Schema(description = "5k도엽번호", example = MAP_ID) @RequestBody Map<String, Object> body) {
Object raw = body.get("mapIds");
if (raw == null) {
throw new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST);
}
@SuppressWarnings("unchecked")
List<String> mapIds = (List<String>) raw;
Scene scene =
inferenceResultShpService.createGeojson(yyyy, mapSheetScope, detectOption, mapIds);
return ApiResponseDto.createOK(scene);
}
}

View File

@@ -458,6 +458,7 @@ public class InferenceDetailDto {
private String bboxGeom; private String bboxGeom;
private String bboxCenterPoint; private String bboxCenterPoint;
private UUID inferenceUuid; private UUID inferenceUuid;
private String status;
public AnalResultInfo( public AnalResultInfo(
String analTitle, String analTitle,
@@ -474,7 +475,8 @@ public class InferenceDetailDto {
String subUid, String subUid,
Boolean applyYn, Boolean applyYn,
ZonedDateTime applyDttm, ZonedDateTime applyDttm,
UUID inferenceUuid) { UUID inferenceUuid,
String status) {
this.analTitle = analTitle; this.analTitle = analTitle;
this.modelVer1 = modelVer1; this.modelVer1 = modelVer1;
this.modelVer2 = modelVer2; this.modelVer2 = modelVer2;
@@ -489,6 +491,7 @@ public class InferenceDetailDto {
this.subUid = subUid; this.subUid = subUid;
this.applyYn = applyYn; this.applyYn = applyYn;
this.applyDttm = applyDttm; this.applyDttm = applyDttm;
this.status = status;
Duration elapsed = Duration elapsed =
(inferStartDttm != null && inferEndDttm != null) (inferStartDttm != null && inferEndDttm != null)
? Duration.between(inferStartDttm, inferEndDttm) ? Duration.between(inferStartDttm, inferEndDttm)
@@ -538,6 +541,10 @@ public class InferenceDetailDto {
public Boolean getApplyYn() { public Boolean getApplyYn() {
return this.applyYn != null && this.applyYn; return this.applyYn != null && this.applyYn;
} }
public String getStatusNm() {
return InferenceResultDto.Status.getDescByCode(this.status);
}
} }
@Getter @Getter

View File

@@ -18,6 +18,7 @@ import lombok.AllArgsConstructor;
import lombok.Getter; import lombok.Getter;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import lombok.Setter; import lombok.Setter;
import lombok.ToString;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
@@ -86,6 +87,7 @@ public class InferenceResultDto {
READY("대기"), READY("대기"),
IN_PROGRESS("진행중"), IN_PROGRESS("진행중"),
END("완료"), END("완료"),
END_FAIL("종료실패"),
FORCED_END("강제종료"); FORCED_END("강제종료");
private final String desc; private final String desc;
@@ -240,21 +242,22 @@ public class InferenceResultDto {
@Setter @Setter
@NoArgsConstructor @NoArgsConstructor
@AllArgsConstructor @AllArgsConstructor
@ToString
public static class RegReq { public static class RegReq {
@Schema(description = "제목", example = "2023-2024 변화탐지 테스트") @Schema(description = "제목", example = "2023-2024 변화탐지 테스트")
@NotBlank @NotBlank
private String title; private String title;
@Schema(description = "M1", example = "b40e0f68-c1d8-49fc-93f9-a36270093861") @Schema(description = "G1", example = "643adead-f3d2-4f10-9037-862bee919399")
@NotNull @NotNull
private UUID model1Uuid; private UUID model1Uuid;
@Schema(description = "M2", example = "ec92b7d2-b5a3-4915-9bdf-35fb3ca8ad27") @Schema(description = "G2", example = "dd86b4ef-28e3-4e3d-9ee4-f60d9cb54e13")
@NotNull @NotNull
private UUID model2Uuid; private UUID model2Uuid;
@Schema(description = "M3", example = "37f45782-8ccf-4cf6-911c-a055a1510d39") @Schema(description = "G3", example = "58c1153e-dec6-4424-82a1-189083a9d9dc")
@NotNull @NotNull
private UUID model3Uuid; private UUID model3Uuid;
@@ -272,11 +275,10 @@ public class InferenceResultDto {
private String mapSheetScope; private String mapSheetScope;
@Schema(description = "탐지 데이터 옵션 - 추론제외(EXCL), 이전 년도 도엽 사용(PREV)", example = "EXCL") @Schema(description = "탐지 데이터 옵션 - 추론제외(EXCL), 이전 년도 도엽 사용(PREV)", example = "EXCL")
@NotBlank // @EnumValid(
@EnumValid( // enumClass = DetectOption.class,
enumClass = DetectOption.class, // message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.")
message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.") private DetectOption detectOption;
private String detectOption;
@Schema(description = "5k 도협 번호 목록", example = "[33605,33606, 33610, 34802, 35603, 35611]") @Schema(description = "5k 도협 번호 목록", example = "[33605,33606, 33610, 34802, 35603, 35611]")
@NotNull @NotNull
@@ -297,6 +299,30 @@ public class InferenceResultDto {
@Schema(name = "InferenceStatusDetailDto", description = "추론(변화탐지) 진행상태") @Schema(name = "InferenceStatusDetailDto", description = "추론(변화탐지) 진행상태")
public static class InferenceStatusDetailDto { public static class InferenceStatusDetailDto {
@Schema(description = "모델1 사용시간 시작일시")
@JsonFormatDttm
ZonedDateTime m1ModelStartDttm;
@Schema(description = "모델2 사용시간 시작일시")
@JsonFormatDttm
ZonedDateTime m2ModelStartDttm;
@Schema(description = "모델3 사용시간 시작일시")
@JsonFormatDttm
ZonedDateTime m3ModelStartDttm;
@Schema(description = "모델1 사용시간 종료일시")
@JsonFormatDttm
ZonedDateTime m1ModelEndDttm;
@Schema(description = "모델2 사용시간 종료일시")
@JsonFormatDttm
ZonedDateTime m2ModelEndDttm;
@Schema(description = "모델3 사용시간 종료일시")
@JsonFormatDttm
ZonedDateTime m3ModelEndDttm;
@Schema(description = "탐지대상 도엽수") @Schema(description = "탐지대상 도엽수")
private Long detectingCnt; private Long detectingCnt;
@@ -336,30 +362,6 @@ public class InferenceResultDto {
@Schema(description = "모델3 분석 실패") @Schema(description = "모델3 분석 실패")
private Integer m3FailedJobs; private Integer m3FailedJobs;
@Schema(description = "모델1 사용시간 시작일시")
@JsonFormatDttm
ZonedDateTime m1ModelStartDttm;
@Schema(description = "모델2 사용시간 시작일시")
@JsonFormatDttm
ZonedDateTime m2ModelStartDttm;
@Schema(description = "모델3 사용시간 시작일시")
@JsonFormatDttm
ZonedDateTime m3ModelStartDttm;
@Schema(description = "모델1 사용시간 종료일시")
@JsonFormatDttm
ZonedDateTime m1ModelEndDttm;
@Schema(description = "모델2 사용시간 종료일시")
@JsonFormatDttm
ZonedDateTime m2ModelEndDttm;
@Schema(description = "모델3 사용시간 종료일시")
@JsonFormatDttm
ZonedDateTime m3ModelEndDttm;
@Schema(description = "변화탐지 제목") @Schema(description = "변화탐지 제목")
private String title; private String title;
@@ -496,19 +498,19 @@ public class InferenceResultDto {
return MapSheetScope.getDescByCode(this.mapSheetScope); return MapSheetScope.getDescByCode(this.mapSheetScope);
} }
@Schema(description = "M1 사용시간") @Schema(description = "G1 사용시간")
@JsonProperty("m1ElapsedTim") @JsonProperty("m1ElapsedTim")
public String getM1ElapsedTime() { public String getM1ElapsedTime() {
return formatElapsedTime(this.m1ModelStartDttm, this.m1ModelEndDttm); return formatElapsedTime(this.m1ModelStartDttm, this.m1ModelEndDttm);
} }
@Schema(description = "M2 사용시간") @Schema(description = "G2 사용시간")
@JsonProperty("m2ElapsedTim") @JsonProperty("m2ElapsedTim")
public String getM2ElapsedTime() { public String getM2ElapsedTime() {
return formatElapsedTime(this.m2ModelStartDttm, this.m2ModelEndDttm); return formatElapsedTime(this.m2ModelStartDttm, this.m2ModelEndDttm);
} }
@Schema(description = "M3 사용시간") @Schema(description = "G3 사용시간")
@JsonProperty("m3ElapsedTim") @JsonProperty("m3ElapsedTim")
public String getM3ElapsedTime() { public String getM3ElapsedTime() {
return formatElapsedTime(this.m3ModelStartDttm, this.m3ModelEndDttm); return formatElapsedTime(this.m3ModelStartDttm, this.m3ModelEndDttm);
@@ -676,4 +678,14 @@ public class InferenceResultDto {
private Long m2ModelBatchId; private Long m2ModelBatchId;
private Long m3ModelBatchId; private Long m3ModelBatchId;
} }
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class MapSheetFallbackYearDto {
private String mapSheetNum;
private Integer mngYyyy;
}
} }

View File

@@ -71,14 +71,16 @@ public class InferenceResultShpDto {
@NoArgsConstructor @NoArgsConstructor
public static class InferenceCntDto { public static class InferenceCntDto {
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 목록 저장 터이터 건수", example = "120") @Schema(
description = "추론 결과(inference_results_testing)를 기준으로 데이터 목록 저장 터이터 건수",
example = "120")
int sheetAnalDataCnt; int sheetAnalDataCnt;
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 데이터 건수", example = "120") @Schema(description = "추론 결과(inference_results_testing)를 기준으로 저장 Geom 데이터 건수", example = "120")
int inferenceCnt;
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 Geom 데이터 건수", example = "120")
int inferenceGeomCnt; int inferenceGeomCnt;
@Schema(description = "추론 결과(inference_results_testing)를 기준으로 저장 집계 데이터 건수", example = "120")
int inferenceSttcnt;
} }
@Setter @Setter

View File

@@ -1,10 +1,12 @@
package com.kamco.cd.kamcoback.inference.dto; package com.kamco.cd.kamcoback.inference.dto;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity; import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity;
import java.time.ZonedDateTime;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Getter; import lombok.Getter;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import lombok.Setter; import lombok.Setter;
import org.locationtech.jts.geom.Geometry;
public class InferenceResultsTestingDto { public class InferenceResultsTestingDto {
@@ -22,4 +24,31 @@ public class InferenceResultsTestingDto {
return new ShpDto(e.getBatchId(), e.getUid(), e.getMapId()); return new ShpDto(e.getBatchId(), e.getUid(), e.getMapId());
} }
} }
@Getter
@Setter
@AllArgsConstructor
@NoArgsConstructor
public static class Basic {
private Double probability;
private Long beforeYear;
private Long afterYear;
private String mapId;
private String modelVersion;
private String clsModelPath;
private String clsModelVersion;
private String cdModelType;
private Long id;
private String modelName;
private Long batchId;
private Double area;
private String beforeC;
private Double beforeP;
private String afterC;
private Double afterP;
private Long seq;
private ZonedDateTime createdDate;
private String uid;
private Geometry geometry;
}
} }

View File

@@ -5,8 +5,10 @@ import lombok.Getter;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import lombok.Setter; import lombok.Setter;
import lombok.ToString; import lombok.ToString;
import lombok.extern.slf4j.Slf4j;
/** AI API 추론 실행 DTO */ /** AI API 추론 실행 DTO */
@Slf4j
@Getter @Getter
@Setter @Setter
@NoArgsConstructor @NoArgsConstructor

View File

@@ -0,0 +1,117 @@
package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.common.inference.service.InferenceCommonService;
import com.kamco.cd.kamcoback.common.utils.UserUtil;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient.ExternalCallResult;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
import com.kamco.cd.kamcoback.postgres.core.AuditLogCoreService;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService;
import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/** 추론 관리 */
@Service
@Slf4j
@RequiredArgsConstructor
@Transactional(readOnly = true)
public class InferenceAsyncService {
private final InferenceResultCoreService inferenceResultCoreService;
private final MapSheetMngCoreService mapSheetMngCoreService;
private final ModelMngCoreService modelMngCoreService;
private final AuditLogCoreService auditLogCoreService;
private final InferenceCommonService inferenceCommonService;
private final ExternalHttpClient externalHttpClient;
private final UserUtil userUtil;
@Value("${inference.batch-url}")
private String batchUrl;
@Value("${inference.inference-server-name}")
private String inferenceServerName;
@Value("${file.dataset-dir}")
private String datasetDir;
@Value("${spring.profiles.active}")
private String activeEnv;
@Value("${inference.geojson-dir}")
private String inferenceDir;
// 0313
@Transactional
public UUID asyncInferenceEnd() {
SaveInferenceAiDto dto = inferenceResultCoreService.getProcessing();
if (dto == null) {
throw new CustomApiException("NOT_FOUND", HttpStatus.NOT_FOUND);
}
this.deleteInferenceEndAsync(dto); // 비동기 종료 호출
return dto.getUuid();
}
// 0313
@Async("inferenceEndExecutor")
@Transactional
public void deleteInferenceEndAsync(SaveInferenceAiDto dto) {
Long batchId = dto.getBatchId();
String url = batchUrl + "/" + batchId;
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
try {
log.info("[추론 종료 비동기 시작] uuid={}, batchId={}", dto.getUuid(), batchId);
ExternalCallResult<String> result =
externalHttpClient.callLong(url, HttpMethod.DELETE, dto, headers, String.class);
if (!result.success()) {
log.error("[추론 종료 실패] 외부 API 호출 실패. uuid={}, batchId={}", dto.getUuid(), batchId);
SaveInferenceAiDto failRequest = new SaveInferenceAiDto();
failRequest.setUuid(dto.getUuid());
failRequest.setStatus(Status.END_FAIL.getId()); // TODO: 종료실패 상태 추가하는 게 맞는지?
failRequest.setUpdateUid(userUtil.getId());
failRequest.setInferEndDttm(ZonedDateTime.now());
inferenceResultCoreService.update(failRequest);
return;
}
SaveInferenceAiDto request = new SaveInferenceAiDto();
request.setStatus(Status.FORCED_END.getId());
request.setUuid(dto.getUuid());
request.setUpdateUid(userUtil.getId());
request.setInferEndDttm(ZonedDateTime.now());
inferenceResultCoreService.update(request);
Long learnId = inferenceResultCoreService.getInferenceLearnIdByUuid(dto.getUuid());
inferenceResultCoreService.upsertGeomData(learnId);
log.info("[추론 종료 비동기 완료] uuid={}, batchId={}", dto.getUuid(), batchId);
} catch (Exception e) {
log.error("[추론 종료 비동기 예외] uuid={}, batchId={}", dto.getUuid(), batchId, e);
}
}
}

View File

@@ -0,0 +1,61 @@
package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class InferenceManualService {
private final InferenceResultCoreService inferenceResultCoreService;
private final ModelMngCoreService modelMngCoreService;
public void saveResultsTesting(List<Long> batchIds) {
// 배치 id로 추론 결과 testing 테이블에서 조회
List<InferenceResultsTestingDto.Basic> resultInfoList =
inferenceResultCoreService.getInferenceResultGroupList(batchIds);
if (resultInfoList.isEmpty()) {
throw new CustomApiException("NOT_FOUND", HttpStatus.NOT_FOUND);
}
// Long compareYear = resultInfoList.getFirst().getBeforeYear();
// Long targetYear = resultInfoList.getFirst().getAfterYear();
// String title = compareYear + "-" + targetYear + "변화탐지";
//
// InferenceResultDto.RegReq inferenceDto = new InferenceResultDto.RegReq();
// inferenceDto.setTitle(title);
// inferenceDto.setCompareYyyy(Integer.valueOf(compareYear));
// inferenceDto.setTargetYyyy(targetYear);
// // 추론 기본정보 저장
// for (InferenceResultsTestingDto.Basic result : resultInfoList) {
//
// if (result.getModelVersion().startsWith(ModelType.G1.getId()) ||
// result.getModelVersion().startsWith("M1")) {
// ModelMngDto.Basic model =
// modelMngCoreService.findByModelVer(result.getModelVersion());
// inferenceDto.setModel1Uuid(model.getUuid());
//
// } else if (result.getModelVersion().startsWith("G2") ||
// result.getModelVersion().startsWith("M2")) {
// ModelMngDto.Basic model =
// modelMngCoreService.findByModelVer(result.getModelVersion());
// inferenceDto.setModel2Uuid(model.getUuid());
//
// } else if (result.getModelVersion().startsWith("G3") ||
// result.getModelVersion().startsWith("M3")) {
// ModelMngDto.Basic model =
// modelMngCoreService.findByModelVer(result.getModelVersion());
// inferenceDto.setModel3Uuid(model.getUuid());
// }
//
// System.out.println(result);
// }
}
}

View File

@@ -1,11 +1,11 @@
package com.kamco.cd.kamcoback.inference.service; package com.kamco.cd.kamcoback.inference.service;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.common.exception.CustomApiException; import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter;
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature; import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature;
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.Scene; import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.Scene;
import com.kamco.cd.kamcoback.common.inference.service.InferenceCommonService;
import com.kamco.cd.kamcoback.common.inference.utils.GeoJsonValidator;
import com.kamco.cd.kamcoback.common.utils.UserUtil; import com.kamco.cd.kamcoback.common.utils.UserUtil;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient; import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient.ExternalCallResult; import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient.ExternalCallResult;
@@ -21,7 +21,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.DetectOption;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceLearnDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceLearnDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetNumDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
@@ -39,12 +39,15 @@ import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService; import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService;
import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService; import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Comparator; import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
@@ -53,7 +56,7 @@ import java.util.Set;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2; import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
@@ -63,8 +66,9 @@ import org.springframework.http.MediaType;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
/** 추론 관리 */
@Service @Service
@Log4j2 @Slf4j
@RequiredArgsConstructor @RequiredArgsConstructor
@Transactional(readOnly = true) @Transactional(readOnly = true)
public class InferenceResultService { public class InferenceResultService {
@@ -73,14 +77,11 @@ public class InferenceResultService {
private final MapSheetMngCoreService mapSheetMngCoreService; private final MapSheetMngCoreService mapSheetMngCoreService;
private final ModelMngCoreService modelMngCoreService; private final ModelMngCoreService modelMngCoreService;
private final AuditLogCoreService auditLogCoreService; private final AuditLogCoreService auditLogCoreService;
private final InferenceCommonService inferenceCommonService;
private final ExternalHttpClient externalHttpClient; private final ExternalHttpClient externalHttpClient;
private final ObjectMapper objectMapper;
private final UserUtil userUtil; private final UserUtil userUtil;
@Value("${inference.url}")
private String inferenceUrl;
@Value("${inference.batch-url}") @Value("${inference.batch-url}")
private String batchUrl; private String batchUrl;
@@ -91,7 +92,10 @@ public class InferenceResultService {
private String datasetDir; private String datasetDir;
@Value("${spring.profiles.active}") @Value("${spring.profiles.active}")
private String profile; private String activeEnv;
@Value("${inference.geojson-dir}")
private String inferenceDir;
/** /**
* 추론관리 목록 * 추론관리 목록
@@ -104,7 +108,7 @@ public class InferenceResultService {
} }
/** /**
* 추론 진행중인지 확인 * 추론 진행중인지 확인, 변화탐지 설정 등록 버튼 활성화 여부에 필요함
* *
* @return * @return
*/ */
@@ -117,7 +121,343 @@ public class InferenceResultService {
} }
/** /**
* 변화탐지 실행 정보 생성 * 추론 실행 - 추론제외, 이전연도 도엽 사용 분기
*
* @param req
* @return
*/
@Transactional
public UUID run(InferenceResultDto.RegReq req) {
log.info("inference start request = {}", req);
DetectOption detectOption = req.getDetectOption();
if (detectOption == DetectOption.EXCL) {
// 추론 제외 일때 EXCL
return runExcl(req);
}
// 이전연도 도엽 사용 일때 PREV
return runPrev(req);
}
/**
* 변화탐지 [옵션 추론제외 실행]
*
* @param req
* @return
*/
public UUID runExcl(InferenceResultDto.RegReq req) {
// 기준연도 실행가능 도엽 조회
List<MngListDto> targetMngList =
mapSheetMngCoreService.getMapSheetMngHst(req.getTargetYyyy(), req.getMapSheetNum());
if (targetMngList == null || targetMngList.isEmpty()) {
throw new CustomApiException("NOT_FOUND_MAP_SHEET_NUM", HttpStatus.NOT_FOUND);
}
log.info("targetMngList size = {}", targetMngList.size());
// 비교연도 실행가능 도엽 조회
List<MngListDto> compareMngList =
mapSheetMngCoreService.getMapSheetMngHst(req.getCompareYyyy(), req.getMapSheetNum());
if (compareMngList == null || compareMngList.isEmpty()) {
throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND);
}
log.info("compareMngList size = {}", compareMngList.size());
// compare 도엽번호 Set 구성
Set<String> compareSet =
compareMngList.stream()
.map(MngListDto::getMapSheetNum)
.filter(Objects::nonNull)
.collect(Collectors.toSet());
// 기준년도 비교년도 동일한 도엽번호만 담기
List<MngListDto> intersectionList =
targetMngList.stream()
.filter(dto -> dto.getMapSheetNum() != null)
.filter(dto -> compareSet.contains(dto.getMapSheetNum()))
.toList();
log.info("target size = {}", targetMngList.size());
log.info("compare size = {}", compareMngList.size());
log.info("intersection size = {}", intersectionList.size());
// 비교 연도 도엽번호를 꺼내와서 최종 추론 대상 도엽번호를 담기
List<String> mapSheetNums =
intersectionList.stream()
.map(MngListDto::getMapSheetNum)
.filter(Objects::nonNull)
.distinct()
.toList();
int targetTotal = targetMngList.size();
int compareTotal = compareMngList.size();
int intersection = intersectionList.size();
// ===== MapSheet Year Comparison =====
// target Total : 기준연도 실행가능 전체 도엽 수
// compare Total : 비교연도 실행가능 전체 도엽 수
// Intersection : 양 연도에 모두 존재하는 도엽 수 (최종 추론 대상)
// target Only (Excluded) : 기준연도에만 존재하고 비교연도에는 없는 도엽 수
// compare Only : 비교연도에만 존재하고 기준연도에는 없는 도엽 수
// ====================================
log.info(
"""
===== MapSheet Year Comparison =====
target Total: {}
compare Total: {}
Intersection: {}
target Only (Excluded): {}
compare Only: {}
====================================
""",
targetTotal,
compareTotal,
intersection,
targetTotal - intersection,
compareTotal - intersection);
if (mapSheetNums.isEmpty()) {
// 추론 가능한 도엽이 없습니다.
throw new CustomApiException("NOT_FOUND_MAP_SHEET_NUM", HttpStatus.NOT_FOUND);
}
// compare geojson 파일 생성
Scene compareScene =
getSceneInference(
req.getCompareYyyy().toString(), // 기준년도
mapSheetNums, // 최종 추론 대상
req.getMapSheetScope(), // ALL / 부분
req.getDetectOption()); // EXCL / PREV
// target geojson 파일 생성
Scene targetScene =
getSceneInference(
req.getTargetYyyy().toString(), // 대상년도
mapSheetNums, // 최종 추론 대상
req.getMapSheetScope(),
req.getDetectOption());
log.info("비교년도 geojson 파일 validation ===== {}", compareScene.getFilePath());
GeoJsonValidator.validateWithRequested(compareScene.getFilePath(), mapSheetNums);
log.info("기준년도 geojson 파일 validation ===== {}", targetScene.getFilePath());
GeoJsonValidator.validateWithRequested(targetScene.getFilePath(), mapSheetNums);
// 추론 실행
return executeInference(
req,
intersectionList, // 전체 target 목록
mapSheetNums, // 최종 추론 대상
compareScene, // compare geojson
targetScene // target geojson
);
}
/**
* 변화탐지 옵션 이전 년도 도엽 사용 실행
*
* @param req
* @return
*/
@Transactional
public UUID runPrev(InferenceResultDto.RegReq req) {
Integer targetYyyy = req.getTargetYyyy();
Integer compareYyyy = req.getCompareYyyy();
String mapSheetScope = req.getMapSheetScope();
log.info("[{}|{}}] ,{}", compareYyyy, targetYyyy, mapSheetScope);
// 기준연도 실행가능 도엽 조회[AFTER]
List<MngListDto> targetMngList =
mapSheetMngCoreService.getMapSheetMngHst(targetYyyy, req.getMapSheetNum());
log.info("[runPrev] targetMngList size = {}", targetMngList.size());
if (targetMngList == null || targetMngList.isEmpty()) {
throw new CustomApiException("NOT_FOUND_TARGET_YEAR", HttpStatus.NOT_FOUND);
}
// 비교연도 실행가능 도엽 조회
List<MngListDto> compareMngList =
mapSheetMngCoreService.getMapSheetMngHst(compareYyyy, req.getMapSheetNum());
log.info("[runPrev] compareMngList size = {}", compareMngList.size());
if (compareMngList == null || compareMngList.isEmpty()) {
throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND);
}
log.info("[runPrev] Difference in count = {}", targetMngList.size() - compareMngList.size());
// 로그용 원본 카운트 (이전도엽 추가 전)
int targetTotal = targetMngList.size();
int compareTotalBeforeFallback = compareMngList.size();
// 기준연도 기준 비교연도 구해서 이전년도로 compare 보완 하기위해서 도엽번호만 정리
Set<String> compareSet0 =
compareMngList.stream()
.map(MngListDto::getMapSheetNum)
.filter(Objects::nonNull)
.collect(Collectors.toSet());
// 기준연도 기준 비교연도에 도협번호가 없으면 이전연도 조회해서 compare 보완, 없는거 담기
List<String> targetOnlyMapSheetNums =
targetMngList.stream()
.map(MngListDto::getMapSheetNum)
.filter(Objects::nonNull)
.filter(num -> !compareSet0.contains(num))
.toList();
log.info("[runPrev] targetOnlyMapSheetNums in count = {}", targetOnlyMapSheetNums.size());
// 이전연도 초회 추가
compareMngList.addAll(
mapSheetMngCoreService.findFallbackCompareYearByMapSheets(
compareYyyy, targetOnlyMapSheetNums));
log.info("[runPrev] fallback compare size= {}", compareMngList.size());
// 이전연도 추가 후 compare 총 개수
int compareTotalAfterFallback = compareMngList.size();
// 이전연도 추가한 기준연도 값 도협번호만 담기
Set<String> compareSet1 =
compareMngList.stream()
.map(MngListDto::getMapSheetNum)
.filter(Objects::nonNull)
.collect(Collectors.toSet());
// 기준연도 기준으로 비교연도에 있는것만 담기 (도협번호) 결국 비교년도와 개수가 같아짐
List<String> mapSheetNums =
targetMngList.stream()
.map(MngListDto::getMapSheetNum)
.filter(Objects::nonNull)
.filter(compareSet1::contains)
.toList();
int intersection = mapSheetNums.size();
Set<String> intersectionSet = new HashSet<>(mapSheetNums);
// 비교연도 같은거 담기(dto list)
compareMngList =
compareMngList.stream()
.filter(c -> c.getMapSheetNum() != null)
.filter(c -> intersectionSet.contains(c.getMapSheetNum()))
.toList();
// 기준연도 같은거 담기(dto list)
List<MngListDto> filteredTargetMngList =
targetMngList.stream()
.filter(t -> t.getMapSheetNum() != null)
.filter(t -> intersectionSet.contains(t.getMapSheetNum()))
.toList();
// 로그
int targetOnlyExcluded = targetTotal - intersection;
int compareOnly = compareTotalAfterFallback - intersection;
log.info(
"""
===== MapSheet Year Comparison =====
target Total: {}
compare Total(before fallback): {}
compare Total(after fallback): {}
Intersection: {}
target Only (Excluded): {}
compare Only: {}
====================================
""",
targetTotal,
compareTotalBeforeFallback,
compareTotalAfterFallback,
intersection,
targetOnlyExcluded,
compareOnly);
if (mapSheetNums.isEmpty()) {
throw new CustomApiException("NOT_FOUND_MAP_SHEET_NUM", HttpStatus.NOT_FOUND);
}
// compare 기준 geojson 생성
Scene compareScene =
getSceneInference(
compareMngList, compareYyyy.toString(), mapSheetScope, req.getDetectOption());
// target 기준 geojson 생성
Scene targetScene =
getSceneInference(
targetYyyy.toString(), mapSheetNums, mapSheetScope, req.getDetectOption());
log.info("비교년도 geojson 파일 validation ===== {}", compareScene.getFilePath());
GeoJsonValidator.validateWithRequested(compareScene.getFilePath(), mapSheetNums);
log.info("기준년도 geojson 파일 validation ===== {}", targetScene.getFilePath());
GeoJsonValidator.validateWithRequested(targetScene.getFilePath(), mapSheetNums);
// 추론 실행
return executeInference(req, filteredTargetMngList, mapSheetNums, compareScene, targetScene);
}
/**
* learn 테이블 저장 및 AI 추론 API 호출
*
* @param req
* @param targetDtoList
* @param filteredTargetList
* @param modelComparePath
* @param modelTargetPath
* @return
*/
private UUID executeInference(
InferenceResultDto.RegReq req,
List<MngListDto> targetDtoList,
List<String> filteredTargetList,
Scene modelComparePath,
Scene modelTargetPath) {
Set<String> filteredSet = new HashSet<>(filteredTargetList);
List<MngListDto> newTargetList =
targetDtoList.stream()
.filter(m -> m.getMapSheetNum() != null)
.filter(m -> filteredSet.contains(m.getMapSheetNum()))
.toList();
// 추론 실행 목록 테이블 저장, 도엽목록별 상태 체크 테이블 저장
UUID uuid = inferenceResultCoreService.saveInferenceInfo(req, newTargetList);
// 추론 AI 전달 파라미터 생성
pred_requests_areas predRequestsAreas = new pred_requests_areas();
predRequestsAreas.setInput1_year(req.getCompareYyyy());
predRequestsAreas.setInput2_year(req.getTargetYyyy());
predRequestsAreas.setInput1_scene_path(modelComparePath.getFilePath());
predRequestsAreas.setInput2_scene_path(modelTargetPath.getFilePath());
// 모델정보 조회 dto 생성 후 반환
InferenceSendDto m1 = this.getModelInfo(req.getModel1Uuid());
m1.setPred_requests_areas(predRequestsAreas);
log.info("[INFERENCE] Start m1 = {}", m1);
// AI 호출
Long batchId = inferenceCommonService.ensureAccepted(m1);
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
saveInferenceAiDto.setUuid(uuid);
saveInferenceAiDto.setBatchId(batchId);
saveInferenceAiDto.setStatus(Status.IN_PROGRESS.getId());
saveInferenceAiDto.setType(ModelType.G1.getId());
saveInferenceAiDto.setInferStartDttm(ZonedDateTime.now());
saveInferenceAiDto.setModelComparePath(modelComparePath.getFilePath());
saveInferenceAiDto.setModelTargetPath(modelTargetPath.getFilePath());
saveInferenceAiDto.setModelStartDttm(ZonedDateTime.now());
// AI 호출 하고 리턴 받은 정보 추론 실행 목록 테이블에 업데이트
inferenceResultCoreService.update(saveInferenceAiDto);
return uuid;
}
/**
* 변화탐지 실행 정보 생성 TODO 미사용, 새로운 추론실행 로직 테스트후 삭제 해야합니다.
* *
* @param req * @param req
*/ */
@@ -125,7 +465,7 @@ public class InferenceResultService {
public UUID saveInferenceInfo(InferenceResultDto.RegReq req) { public UUID saveInferenceInfo(InferenceResultDto.RegReq req) {
// 변화탐지 실행 가능 기준 년도 조회 // 변화탐지 실행 가능 기준 년도 조회
List<MngListDto> targetList = mapSheetMngCoreService.getHstMapSheetList(req); List<MngListDto> targetList = null; // mapSheetMngCoreService.getHstMapSheetList(req);
if (targetList.isEmpty()) { if (targetList.isEmpty()) {
throw new CustomApiException("NOT_FOUND_TARGET_YEAR", HttpStatus.NOT_FOUND); throw new CustomApiException("NOT_FOUND_TARGET_YEAR", HttpStatus.NOT_FOUND);
@@ -238,17 +578,19 @@ public class InferenceResultService {
predRequestsAreas.setInput2_scene_path(modelTargetPath.getFilePath()); predRequestsAreas.setInput2_scene_path(modelTargetPath.getFilePath());
InferenceSendDto m1 = this.getModelInfo(req.getModel1Uuid()); InferenceSendDto m1 = this.getModelInfo(req.getModel1Uuid());
log.info("[INFERENCE] Start m1 = {}", m1);
m1.setPred_requests_areas(predRequestsAreas); m1.setPred_requests_areas(predRequestsAreas);
// ai 추론 실행 api 호출 // ai 추론 실행 api 호출
Long batchId = ensureAccepted(m1); Long batchId = inferenceCommonService.ensureAccepted(m1);
// ai 추론 실행후 응답값 update // ai 추론 실행후 응답값 update
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto(); SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
saveInferenceAiDto.setUuid(uuid); saveInferenceAiDto.setUuid(uuid);
saveInferenceAiDto.setBatchId(batchId); saveInferenceAiDto.setBatchId(batchId);
saveInferenceAiDto.setStatus(Status.IN_PROGRESS.getId()); saveInferenceAiDto.setStatus(Status.IN_PROGRESS.getId());
saveInferenceAiDto.setType("M1"); saveInferenceAiDto.setType(ModelType.G1.getId());
saveInferenceAiDto.setInferStartDttm(ZonedDateTime.now()); saveInferenceAiDto.setInferStartDttm(ZonedDateTime.now());
saveInferenceAiDto.setModelComparePath(modelComparePath.getFilePath()); saveInferenceAiDto.setModelComparePath(modelComparePath.getFilePath());
saveInferenceAiDto.setModelTargetPath(modelTargetPath.getFilePath()); saveInferenceAiDto.setModelTargetPath(modelTargetPath.getFilePath());
@@ -258,131 +600,6 @@ public class InferenceResultService {
return uuid; return uuid;
} }
// 비교년도 탐지 제이터 옵션 별로 조회하여 req에 적용
private List<MapSheetNumDto> createdMngDto(
InferenceResultDto.RegReq req, List<MngListDto> targetList) {
List<String> mapTargetIds = new ArrayList<>();
targetList.forEach(
hstMapSheet -> {
// 비교년도는 target 년도 기준으로 가져옴 파라미터 만들기
mapTargetIds.add(hstMapSheet.getMapSheetNum());
});
// 비교년도 조회
List<String> mapCompareIds = new ArrayList<>();
List<MngListCompareDto> compareList =
mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds);
for (MngListCompareDto dto : compareList) {
// 추론 제외일때 이전년도 파일이 없으면 제외
if (req.getDetectOption().equals(DetectOption.EXCL.getId())) {
int targetYear = req.getTargetYyyy() - 1;
if (dto.getBeforeYear() != targetYear) {
continue;
}
}
// 비교년도는 target 년도 기준으로 가져옴
mapCompareIds.add(dto.getMapSheetNum());
}
Set<String> compareSet =
mapCompareIds.stream()
.filter(Objects::nonNull)
.map(String::trim) // 공백/개행 방지
.collect(Collectors.toSet());
// target 기준 compare 비교하여 서로 있는것만 저장
List<String> commonIds =
mapTargetIds.stream()
.filter(Objects::nonNull)
.map(String::trim)
.filter(compareSet::contains)
.toList();
Set<String> commonIdSet =
commonIds.stream().filter(Objects::nonNull).map(String::trim).collect(Collectors.toSet());
// 저장하기위해 파라미터 다시 구성
List<MapSheetNumDto> mapSheetNum =
targetList.stream()
.filter(dto -> dto.getMapSheetNum() != null)
.filter(dto -> commonIdSet.contains(dto.getMapSheetNum().trim()))
.map(
dto -> {
MapSheetNumDto mapSheetNumDto = new MapSheetNumDto();
mapSheetNumDto.setMapSheetNum(dto.getMapSheetNum());
mapSheetNumDto.setMapSheetName(dto.getMapSheetName());
return mapSheetNumDto;
})
.toList();
return mapSheetNum;
}
/**
* 추론 AI API 호출
*
* @param dto
*/
private Long ensureAccepted(InferenceSendDto dto) {
if (dto == null) {
log.warn("not InferenceSendDto dto");
throw new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST);
}
// 1) 요청 로그
try {
log.debug("Inference request dto={}", objectMapper.writeValueAsString(dto));
} catch (JsonProcessingException e) {
log.warn("Failed to serialize inference dto", e);
}
// 2) local 환경 임시 처리
if ("local".equals(profile)) {
if (dto.getPred_requests_areas() == null) {
throw new IllegalStateException("pred_requests_areas is null");
}
dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson");
dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson");
}
// 3) HTTP 호출
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
ExternalCallResult<String> result =
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
if (result.statusCode() < 200 || result.statusCode() >= 300) {
log.error("Inference API failed. status={}, body={}", result.statusCode(), result.body());
throw new CustomApiException("BAD_GATEWAY", HttpStatus.BAD_GATEWAY);
}
// 4) 응답 파싱
try {
List<Map<String, Object>> list =
objectMapper.readValue(result.body(), new TypeReference<>() {});
if (list.isEmpty()) {
throw new IllegalStateException("Inference response is empty");
}
Object batchIdObj = list.get(0).get("batch_id");
if (batchIdObj == null) {
throw new IllegalStateException("batch_id not found in response");
}
return Long.valueOf(batchIdObj.toString());
} catch (Exception e) {
log.error("Failed to parse inference response. body={}", result.body(), e);
throw new CustomApiException("INVALID_INFERENCE_RESPONSE", HttpStatus.BAD_GATEWAY);
}
}
/** /**
* 모델정보 조회 dto 생성 후 반환 * 모델정보 조회 dto 생성 후 반환
* *
@@ -391,6 +608,7 @@ public class InferenceResultService {
*/ */
private InferenceSendDto getModelInfo(UUID uuid) { private InferenceSendDto getModelInfo(UUID uuid) {
// 모델정보 조회
Basic modelInfo = modelMngCoreService.findByModelUuid(uuid); Basic modelInfo = modelMngCoreService.findByModelUuid(uuid);
String cdModelPath = ""; String cdModelPath = "";
@@ -414,12 +632,12 @@ public class InferenceResultService {
String modelType = ""; String modelType = "";
if (modelInfo.getModelType().equals(ModelType.M1.getId())) { if (modelInfo.getModelType().equals(ModelType.G1.getId())) {
modelType = "G1"; modelType = ModelType.G1.getId();
} else if (modelInfo.getModelType().equals(ModelType.M2.getId())) { } else if (modelInfo.getModelType().equals(ModelType.G2.getId())) {
modelType = "G2"; modelType = ModelType.G2.getId();
} else { } else {
modelType = "G3"; modelType = ModelType.G3.getId();
} }
InferenceSendDto sendDto = new InferenceSendDto(); InferenceSendDto sendDto = new InferenceSendDto();
@@ -429,7 +647,8 @@ public class InferenceResultService {
sendDto.setCls_model_path(cdClsModelPath); sendDto.setCls_model_path(cdClsModelPath);
sendDto.setCls_model_version(modelInfo.getModelVer()); sendDto.setCls_model_version(modelInfo.getModelVer());
sendDto.setCd_model_type(modelType); sendDto.setCd_model_type(modelType);
sendDto.setPriority(modelInfo.getPriority()); sendDto.setPriority(5d);
log.info("[Inference Send]SendDto={}", sendDto);
return sendDto; return sendDto;
} }
@@ -442,9 +661,37 @@ public class InferenceResultService {
* @return * @return
*/ */
private Scene getSceneInference( private Scene getSceneInference(
String yyyy, List<String> mapSheetNums, String mapSheetScope, String detectOption) { String yyyy, List<String> mapSheetNums, String mapSheetScope, DetectOption detectOption) {
return mapSheetMngCoreService.getSceneInference(
yyyy, mapSheetNums, mapSheetScope, detectOption); // geojson 생성시 필요한 영상파일 정보 조회
List<ImageFeature> features =
mapSheetMngCoreService.loadSceneInferenceBySheets(yyyy, mapSheetNums);
if (features == null || features.isEmpty()) {
log.warn(
"NOT_FOUND_MAP_SHEET_NUM : yyyy={}, scenesSize={}",
yyyy,
mapSheetNums == null ? 0 : mapSheetNums.size());
throw new CustomApiException("NOT_FOUND_MAP_SHEET_NUM", HttpStatus.NOT_FOUND);
}
return writeSceneGeoJson(yyyy, mapSheetScope, detectOption, features);
}
/**
* 년도 별로 조회하여 geojson 파일 생성
*
* @param yearDtos
* @param yyyy
* @param mapSheetScope
* @param detectOption
* @return
*/
private Scene getSceneInference(
List<MngListDto> yearDtos, String yyyy, String mapSheetScope, DetectOption detectOption) {
List<ImageFeature> features =
mapSheetMngCoreService.loadSceneInferenceByFallbackYears(yearDtos);
return writeSceneGeoJson(yyyy, mapSheetScope, detectOption, features);
} }
/** /**
@@ -523,11 +770,17 @@ public class InferenceResultService {
return inferenceResultCoreService.listGetScenes5k(id); return inferenceResultCoreService.listGetScenes5k(id);
} }
/**
* 추론 서버 현황 cpu, gpu 확인
*
* @return 서버 정보
*/
public List<InferenceServerStatusDto> getInferenceServerStatusList() { public List<InferenceServerStatusDto> getInferenceServerStatusList() {
String[] serverNames = inferenceServerName.split(","); String[] serverNames = inferenceServerName.split(",");
int serveCnt = serverNames.length; int serveCnt = serverNames.length;
// 서버정보 조회
List<InferenceServerStatusDto> dtoList = List<InferenceServerStatusDto> dtoList =
inferenceResultCoreService.getInferenceServerStatusList(); inferenceResultCoreService.getInferenceServerStatusList();
int size = dtoList.size(); int size = dtoList.size();
@@ -537,6 +790,7 @@ public class InferenceResultService {
System.out.println("size =" + size); System.out.println("size =" + size);
if (size == 0) { if (size == 0) {
// 서버 정보가 없을때
for (int k = 0; k < serveCnt; k++) { for (int k = 0; k < serveCnt; k++) {
InferenceServerStatusDto dto = new InferenceServerStatusDto(); InferenceServerStatusDto dto = new InferenceServerStatusDto();
dto.setServerName(serverNames[k]); dto.setServerName(serverNames[k]);
@@ -604,17 +858,35 @@ public class InferenceResultService {
return inferenceResultCoreService.getInferenceResultInfo(uuid); return inferenceResultCoreService.getInferenceResultInfo(uuid);
} }
/**
* 분류별 탐지건수 조회
*
* @param uuid 추론 uuid
* @return 분류별 탐지건수 정보
*/
public List<Dashboard> getInferenceClassCountList(UUID uuid) { public List<Dashboard> getInferenceClassCountList(UUID uuid) {
return inferenceResultCoreService.getInferenceClassCountList(uuid); return inferenceResultCoreService.getInferenceClassCountList(uuid);
} }
/**
* 추론결과 geom 목록 조회
*
* @param uuid 추론 uuid
* @param searchGeoReq 추론 결과 상세화면 geom 조회 조건
* @return geom 목록 정보
*/
public Page<Geom> getInferenceGeomList(UUID uuid, SearchGeoReq searchGeoReq) { public Page<Geom> getInferenceGeomList(UUID uuid, SearchGeoReq searchGeoReq) {
return inferenceResultCoreService.getInferenceGeomList(uuid, searchGeoReq); return inferenceResultCoreService.getInferenceGeomList(uuid, searchGeoReq);
} }
/** 추론 종료 */ /**
* 추론 종료
*
* @return 호출한 uuid
*/
@Transactional @Transactional
public UUID deleteInferenceEnd() { public UUID deleteInferenceEnd() {
// 추론 진행중인지 확인
SaveInferenceAiDto dto = inferenceResultCoreService.getProcessing(); SaveInferenceAiDto dto = inferenceResultCoreService.getProcessing();
if (dto == null) { if (dto == null) {
throw new CustomApiException("NOT_FOUND", HttpStatus.NOT_FOUND); throw new CustomApiException("NOT_FOUND", HttpStatus.NOT_FOUND);
@@ -626,13 +898,15 @@ public class InferenceResultService {
headers.setContentType(MediaType.APPLICATION_JSON); headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON)); headers.setAccept(List.of(MediaType.APPLICATION_JSON));
// 종료 api 호출
ExternalCallResult<String> result = ExternalCallResult<String> result =
externalHttpClient.call(url, HttpMethod.DELETE, dto, headers, String.class); externalHttpClient.callLong(url, HttpMethod.DELETE, dto, headers, String.class);
if (!result.success()) { if (!result.success()) {
throw new CustomApiException("BAD_GATEWAY", HttpStatus.BAD_GATEWAY); throw new CustomApiException("BAD_GATEWAY", HttpStatus.BAD_GATEWAY);
} }
// 추론 정보 테이블 update
SaveInferenceAiDto request = new SaveInferenceAiDto(); SaveInferenceAiDto request = new SaveInferenceAiDto();
request.setStatus(Status.FORCED_END.getId()); request.setStatus(Status.FORCED_END.getId());
request.setUuid(dto.getUuid()); request.setUuid(dto.getUuid());
@@ -647,14 +921,17 @@ public class InferenceResultService {
} }
/** /**
* 추론결과 shp zip 파일 다운로드 경로 생성 * 추론결과 shp zip 파일 확인하여 다운로드 경로 생성
* *
* @param uuid * @param uuid 추론 uuid
* @return * @return 32자 추론 uid, shp 파일 경로
*/ */
public Map<String, Object> shpDownloadPath(UUID uuid) { public Map<String, Object> shpDownloadPath(UUID uuid) {
// 추론정보 조회
InferenceLearnDto dto = inferenceResultCoreService.getInferenceUid(uuid); InferenceLearnDto dto = inferenceResultCoreService.getInferenceUid(uuid);
String uid = dto.getUid(); String uid = dto.getUid();
// 파일 경로 생성
Path path = Path.of(datasetDir).resolve(uid).resolve("merge").resolve(uid + ".zip"); Path path = Path.of(datasetDir).resolve(uid).resolve("merge").resolve(uid + ".zip");
Map<String, Object> downloadMap = new HashMap<>(); Map<String, Object> downloadMap = new HashMap<>();
@@ -676,7 +953,7 @@ public class InferenceResultService {
} }
/** /**
* 실행중인 추론 도엽명 목록 * 분석중인 추론 도엽명 목록
* *
* @param uuid uuid * @param uuid uuid
* @return * @return
@@ -684,4 +961,76 @@ public class InferenceResultService {
public List<String> getInferenceRunMapId(UUID uuid) { public List<String> getInferenceRunMapId(UUID uuid) {
return inferenceResultCoreService.getInferenceRunMapId(uuid); return inferenceResultCoreService.getInferenceRunMapId(uuid);
} }
/**
* 파일 경로/이름 , 파일 생성 , 도엽번호 반환
*
* @param yyyy
* @param mapSheetScope
* @param detectOption
* @param sceneInference
* @return Scene
*/
private Scene writeSceneGeoJson(
String yyyy,
String mapSheetScope,
DetectOption detectOption,
List<ImageFeature> sceneInference) {
boolean isAll = MapSheetScope.ALL.getId().equals(mapSheetScope);
String optionSuffix = buildOptionSuffix(detectOption);
String targetDir =
"local".equals(activeEnv) ? System.getProperty("user.home") + "/geojson" : inferenceDir;
// 파일명 생성
String filename =
isAll
? String.format("%s_%s_ALL%s.geojson", yyyy, activeEnv, optionSuffix)
: String.format("%s_%s%s.geojson", yyyy, activeEnv, optionSuffix);
Path outputPath = Paths.get(targetDir, filename);
try {
log.info("create Directories outputPath: {}", outputPath);
log.info(
"activeEnv={}, inferenceDir={}, targetDir={}, filename={}",
activeEnv,
inferenceDir,
targetDir,
filename);
log.info("outputPath={}, parent={}", outputPath.toAbsolutePath(), outputPath.getParent());
Files.createDirectories(outputPath.getParent());
new GeoJsonFileWriter()
.exportToFile(sceneInference, "scene_inference_" + yyyy, 5186, outputPath.toString());
Scene scene = new Scene();
scene.setFeatures(sceneInference);
scene.setFilePath(outputPath.toString());
return scene;
} catch (IOException e) {
log.error(
"FAIL_CREATE_MAP_SHEET_FILE: yyyy={}, isAll={}, path={}", yyyy, isAll, outputPath, e);
throw new CustomApiException("INTERNAL_SERVER_ERROR", HttpStatus.INTERNAL_SERVER_ERROR, e);
}
}
/**
* geojson 파일명 Suffix
*
* @param detectOption
* @return
*/
private String buildOptionSuffix(DetectOption detectOption) {
if (DetectOption.EXCL == detectOption) {
return "_EXCL";
}
if (DetectOption.PREV == detectOption) {
return "_PREV";
}
return "";
}
} }

View File

@@ -1,13 +1,11 @@
package com.kamco.cd.kamcoback.inference.service; package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.Scene;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceLearnDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceLearnDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService; import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService; import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService;
import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService; import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService;
import com.kamco.cd.kamcoback.scheduler.service.ShpPipelineService; import com.kamco.cd.kamcoback.scheduler.service.ShpPipelineService;
import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@@ -39,10 +37,16 @@ public class InferenceResultShpService {
@Value("${file.dataset-dir}") @Value("${file.dataset-dir}")
private String datasetDir; private String datasetDir;
/** inference_results 테이블을 기준으로 분석 결과 테이블과 도형 테이블을 최신 상태로 반영한다. */ /**
* 추론 결과 inference 테이블 upsert
*
* @param uuid learn uuid
* @return
*/
@Transactional @Transactional
public InferenceResultShpDto.InferenceCntDto saveInferenceResultData(Long id) { public InferenceResultShpDto.InferenceCntDto saveInferenceResultData(UUID uuid) {
return coreService.buildInferenceData(id); Long learnId = inferenceResultCoreService.getInferenceLearnIdByUuid(uuid);
return coreService.buildInferenceData(learnId);
} }
/** /**
@@ -57,30 +61,13 @@ public class InferenceResultShpService {
return; return;
} }
String batchId = String batchIds =
Stream.of(dto.getM1ModelBatchId(), dto.getM2ModelBatchId(), dto.getM3ModelBatchId()) Stream.of(dto.getM1ModelBatchId(), dto.getM2ModelBatchId(), dto.getM3ModelBatchId())
.filter(Objects::nonNull) .filter(Objects::nonNull)
.map(String::valueOf) .map(String::valueOf)
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
// shp 파일 비동기 생성 // shp 파일 비동기 생성
shpPipelineService.runPipeline(jarPath, datasetDir, batchId, dto.getUid()); shpPipelineService.runPipeline(jarPath, datasetDir, batchIds, dto.getUid());
}
/**
* 추론 실행전 geojson 파일 생성
*
* @param yyyy
* @param mapSheetScope
* @param detectOption
* @param mapIds
* @return
*/
public Scene createGeojson(
String yyyy, String mapSheetScope, String detectOption, List<String> mapIds) {
Scene getSceneInference =
mapSheetMngCoreService.getSceneInference(yyyy, mapIds, mapSheetScope, detectOption);
log.info("getSceneInference: {}", getSceneInference);
return getSceneInference;
} }
} }

View File

@@ -1,5 +1,6 @@
package com.kamco.cd.kamcoback.label; package com.kamco.cd.kamcoback.label;
import com.kamco.cd.kamcoback.common.download.RangeDownloadResponder;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
@@ -9,20 +10,35 @@ import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.WorkHistoryDto;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.UpdateClosedRequest; import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.UpdateClosedRequest;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerListResponse; import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerListResponse;
import com.kamco.cd.kamcoback.label.service.LabelAllocateService; import com.kamco.cd.kamcoback.label.service.LabelAllocateService;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto.DownloadReq;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto.searchReq;
import io.swagger.v3.oas.annotations.Hidden; import io.swagger.v3.oas.annotations.Hidden;
import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.enums.ParameterIn;
import io.swagger.v3.oas.annotations.media.Content; import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag; import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.validation.Valid; import jakarta.validation.Valid;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDate;
import java.util.List; import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.coyote.BadRequestException;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMapping;
@@ -37,6 +53,10 @@ import org.springframework.web.bind.annotation.RestController;
public class LabelAllocateApiController { public class LabelAllocateApiController {
private final LabelAllocateService labelAllocateService; private final LabelAllocateService labelAllocateService;
private final RangeDownloadResponder rangeDownloadResponder;
@Value("${file.dataset-response}")
private String responsePath;
@Operation(summary = "배정 가능한 사용자 목록 조회", description = "라벨링 작업 배정을 위한 활성 상태의 사용자 목록을 조회합니다.") @Operation(summary = "배정 가능한 사용자 목록 조회", description = "라벨링 작업 배정을 위한 활성 상태의 사용자 목록을 조회합니다.")
@ApiResponses( @ApiResponses(
@@ -333,4 +353,149 @@ public class LabelAllocateApiController {
public ApiResponseDto<Long> labelingIngProcessCnt() { public ApiResponseDto<Long> labelingIngProcessCnt() {
return ApiResponseDto.ok(labelAllocateService.findLabelingIngProcessCnt()); return ApiResponseDto.ok(labelAllocateService.findLabelingIngProcessCnt());
} }
@Operation(
summary = "라벨 파일 다운로드",
description = "라벨 파일 다운로드",
parameters = {
@Parameter(
name = "kamco-download-uuid",
in = ParameterIn.HEADER,
required = true,
description = "다운로드 요청 UUID",
schema =
@Schema(
type = "string",
format = "uuid",
example = "6d8d49dc-0c9d-4124-adc7-b9ca610cc394"))
})
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "라벨 zip파일 다운로드",
content =
@Content(
mediaType = "application/octet-stream",
schema = @Schema(type = "string", format = "binary"))),
@ApiResponse(responseCode = "404", description = "파일 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@GetMapping("/download/{uuid}")
public ResponseEntity<?> download(@PathVariable UUID uuid, HttpServletRequest request)
throws IOException {
String uid = labelAllocateService.findLearnUid(uuid);
Path zipPath = Paths.get(responsePath).resolve(uid + ".zip");
if (!Files.isRegularFile(zipPath)) {
throw new BadRequestException();
}
return rangeDownloadResponder.buildZipResponse(zipPath, uid + ".zip", request);
}
@Operation(summary = "라벨 파일 다운로드 이력 조회", description = "라벨 파일 다운로드 이력 조회")
@GetMapping(value = "/download-audit/{uuid}")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "검색 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Page.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
public ApiResponseDto<Page<AuditLogDto.DownloadRes>> downloadAudit(
@Parameter(description = "UUID", example = "6d8d49dc-0c9d-4124-adc7-b9ca610cc394")
@PathVariable
UUID uuid,
// @Parameter(description = "다운로드일 시작", example = "2025-01-01") @RequestParam(required =
// false)
// LocalDate strtDttm,
// @Parameter(description = "다운로드일 종료", example = "2026-04-01") @RequestParam(required =
// false)
// LocalDate endDttm,
// @Parameter(description = "키워드", example = "") @RequestParam(required = false)
// String searchValue,
@Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0")
int page,
@Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20")
int size) {
AuditLogDto.searchReq searchReq = new searchReq();
searchReq.setPage(page);
searchReq.setSize(size);
DownloadReq downloadReq = new DownloadReq();
downloadReq.setUuid(uuid);
// downloadReq.setStartDate(strtDttm);
// downloadReq.setEndDate(endDttm);
// downloadReq.setSearchValue(searchValue);
downloadReq.setRequestUri("/api/training-data/stage/download/" + uuid);
return ApiResponseDto.ok(labelAllocateService.getDownloadAudit(searchReq, downloadReq));
}
@Operation(summary = "다운로드 가능여부 조회", description = "다운로드 가능여부 조회 API")
@GetMapping(value = "/download-check/{uuid}")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "검색 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Page.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
public ApiResponseDto<Boolean> isDownloadable(
@Parameter(description = "UUID", example = "6d8d49dc-0c9d-4124-adc7-b9ca610cc394")
@PathVariable
UUID uuid) {
return ApiResponseDto.ok(labelAllocateService.isDownloadable(uuid));
}
@Operation(
summary = "라벨링작업 관리 > 추가 작업 배정(실태조사 추가되면)",
description = "라벨링작업 관리 > 추가 작업 배정(실태조사 추가되면)")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "등록 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Long.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/allocate-add-stblt")
public ApiResponseDto<ApiResponseDto.ResponseObj> labelAllocateAddStblt(
@RequestBody @Valid LabelAllocateDto.AllocateAddStbltDto dto) {
return ApiResponseDto.okObject(
labelAllocateService.allocateAddStbltYn(
dto.getTotalCnt(), dto.getUuid(), dto.getLabelers(), dto.getBaseDate()));
}
@Operation(summary = "라벨링 추가 할당 가능한 건수", description = "라벨링 추가 할당 가능한 건수 API")
@ApiResponses(
value = {
@ApiResponse(responseCode = "200", description = "조회 성공"),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음"),
@ApiResponse(responseCode = "500", description = "서버 오류")
})
@GetMapping("/allocate-add-cnt")
public ApiResponseDto<Long> allocateAddCnt(
@RequestParam UUID uuid, @RequestParam LocalDate baseDate) {
return ApiResponseDto.ok(labelAllocateService.findAllocateAddCnt(uuid, baseDate));
}
} }

View File

@@ -3,6 +3,7 @@ package com.kamco.cd.kamcoback.label.dto;
import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose; import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose;
import com.kamco.cd.kamcoback.common.utils.enums.EnumType; import com.kamco.cd.kamcoback.common.utils.enums.EnumType;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import java.time.LocalDate;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
@@ -359,4 +360,41 @@ public class LabelAllocateDto {
@Schema(description = "작업기간 종료일") @Schema(description = "작업기간 종료일")
private ZonedDateTime projectCloseDttm; private ZonedDateTime projectCloseDttm;
} }
@Getter
@Setter
@AllArgsConstructor
@NoArgsConstructor
public static class InferenceLearnDto {
private UUID analUuid;
private String learnUid;
private String analState;
private Long analId;
}
@Getter
@Setter
@AllArgsConstructor
public static class AllocateAddStbltDto {
@Schema(description = "총 잔여 건수", example = "179")
private Integer totalCnt;
@Schema(
description = "추가할당할 라벨러",
example =
"""
[
"123454", "654321", "222233", "777222"
]
""")
private List<String> labelers;
@Schema(description = "회차 마스터 key", example = "c0e77cc7-8c28-46ba-9ca4-11e90246ab44")
private UUID uuid;
@Schema(description = "기준일자", example = "2026-02-20")
private LocalDate baseDate;
}
} }

View File

@@ -219,6 +219,9 @@ public class WorkerStatsDto {
@Deprecated @Deprecated
@Schema(description = "[Deprecated] inspectionRemainingCount 사용 권장") @Schema(description = "[Deprecated] inspectionRemainingCount 사용 권장")
private Long remainingInspectCount; private Long remainingInspectCount;
@Schema(description = "파일 다운로드 가능한 폴리곤 수")
private Long downloadPolygonCnt;
} }
@Getter @Getter

View File

@@ -16,25 +16,29 @@ import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.searchReq;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo; import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo; import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerListResponse; import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerListResponse;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
import com.kamco.cd.kamcoback.log.dto.AuditLogDto.DownloadReq;
import com.kamco.cd.kamcoback.postgres.core.AuditLogCoreService;
import com.kamco.cd.kamcoback.postgres.core.LabelAllocateCoreService; import com.kamco.cd.kamcoback.postgres.core.LabelAllocateCoreService;
import java.time.LocalDate;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.UUID; import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@Slf4j @Slf4j
@Service @Service
@Transactional @Transactional(readOnly = true)
@RequiredArgsConstructor
public class LabelAllocateService { public class LabelAllocateService {
private final LabelAllocateCoreService labelAllocateCoreService; private final LabelAllocateCoreService labelAllocateCoreService;
private final AuditLogCoreService auditLogCoreService;
public LabelAllocateService(LabelAllocateCoreService labelAllocateCoreService) {
this.labelAllocateCoreService = labelAllocateCoreService;
}
/** /**
* 도엽 기준 asc sorting 해서 할당 수만큼 배정하는 로직 * 도엽 기준 asc sorting 해서 할당 수만큼 배정하는 로직
@@ -153,6 +157,7 @@ public class LabelAllocateService {
return labelAllocateCoreService.findInferenceDetail(uuid); return labelAllocateCoreService.findInferenceDetail(uuid);
} }
@Transactional
public ApiResponseDto.ResponseObj allocateMove( public ApiResponseDto.ResponseObj allocateMove(
Integer totalCnt, String uuid, List<String> targetUsers, String userId) { Integer totalCnt, String uuid, List<String> targetUsers, String userId) {
@@ -273,4 +278,78 @@ public class LabelAllocateService {
public Long findLabelingIngProcessCnt() { public Long findLabelingIngProcessCnt() {
return labelAllocateCoreService.findLabelingIngProcessCnt(); return labelAllocateCoreService.findLabelingIngProcessCnt();
} }
@Transactional(propagation = Propagation.NOT_SUPPORTED)
public String findLearnUid(UUID uuid) {
return labelAllocateCoreService.findLearnUid(uuid);
}
/**
* 다운로드 이력 조회
*
* @param searchReq 페이징
* @param downloadReq 조회조건
*/
public Page<AuditLogDto.DownloadRes> getDownloadAudit(
AuditLogDto.searchReq searchReq, DownloadReq downloadReq) {
return auditLogCoreService.findLogByAccount(searchReq, downloadReq);
}
/**
* 다운로드 가능 여부 조회
*
* @param uuid
* @return
*/
public boolean isDownloadable(UUID uuid) {
return labelAllocateCoreService.isDownloadable(uuid);
}
/**
* 실태조사가 값 들어온 기간만큼 할당하는 로직 (최초 할당 이후 작업)
*
* @param uuid
* @param targetUsers
* @return
*/
@Transactional
public ApiResponseDto.ResponseObj allocateAddStbltYn(
Integer totalCnt, UUID uuid, List<String> targetUsers, LocalDate baseDate) {
int userCount = targetUsers.size();
if (userCount == 0) {
return new ApiResponseDto.ResponseObj(ApiResponseCode.BAD_REQUEST, "추가 할당할 라벨러를 선택해주세요.");
}
int base = totalCnt / userCount;
int remainder = totalCnt % userCount;
Long lastId = null;
List<AllocateInfoDto> allIds =
labelAllocateCoreService.fetchNextIdsAddStbltYn(
uuid, baseDate, lastId, totalCnt.longValue());
// MapSheetAnalInferenceEntity analUid 가져오기
Long analUid = labelAllocateCoreService.findMapSheetAnalInferenceUid(uuid);
int index = 0;
for (int i = 0; i < userCount; i++) {
int assignCount = base;
// 마지막 사람에게 나머지 몰아주기
if (i == userCount - 1) {
assignCount += remainder;
}
int end = index + assignCount;
List<AllocateInfoDto> sub = allIds.subList(index, end);
labelAllocateCoreService.assignOwner(sub, targetUsers.get(i), analUid);
index = end;
}
return new ApiResponseDto.ResponseObj(ApiResponseCode.OK, "추가 할당이 완료되었습니다.");
}
public Long findAllocateAddCnt(UUID uuid, LocalDate baseDate) {
return labelAllocateCoreService.findAllocateAddCnt(uuid, baseDate);
}
} }

View File

@@ -31,6 +31,9 @@ public class LayerDto {
@Schema(description = "uuid") @Schema(description = "uuid")
private UUID uuid; private UUID uuid;
@Schema(description = "레이어명")
private String layerName;
@Schema(example = "WMTS", description = "유형 (TILE/GEOJSON/WMTS/WMS)") @Schema(example = "WMTS", description = "유형 (TILE/GEOJSON/WMTS/WMS)")
private String layerType; private String layerType;
@@ -63,6 +66,9 @@ public class LayerDto {
@Schema(description = "uuid") @Schema(description = "uuid")
private UUID uuid; private UUID uuid;
@Schema(description = "레이어명")
private String layerName;
@Schema(description = "유형 (TILE/GEOJSON/WMTS/WMS)") @Schema(description = "유형 (TILE/GEOJSON/WMTS/WMS)")
private String layerType; private String layerType;
@@ -119,6 +125,9 @@ public class LayerDto {
@Schema(name = "LayerAddReq") @Schema(name = "LayerAddReq")
public static class AddReq { public static class AddReq {
@Schema(description = "레이어명")
private String layerName;
@Schema(description = "title WMS, WMTS 선택한 tile") @Schema(description = "title WMS, WMTS 선택한 tile")
private String title; private String title;
@@ -215,6 +224,9 @@ public class LayerDto {
@Schema(name = "LayerMapDto") @Schema(name = "LayerMapDto")
public static class LayerMapDto { public static class LayerMapDto {
@Schema(description = "레이어명")
private String layerName;
@Schema(example = "WMTS", description = "유형 (TILE/GEOJSON/WMTS/WMS)") @Schema(example = "WMTS", description = "유형 (TILE/GEOJSON/WMTS/WMS)")
private String layerType; private String layerType;
@@ -268,6 +280,7 @@ public class LayerDto {
private String crs; private String crs;
public LayerMapDto( public LayerMapDto(
String layerName,
String layerType, String layerType,
String tag, String tag,
Long sortOrder, Long sortOrder,
@@ -282,6 +295,7 @@ public class LayerDto {
UUID uuid, UUID uuid,
String rawJsonString, String rawJsonString,
String crs) { String crs) {
this.layerName = layerName;
this.layerType = layerType; this.layerType = layerType;
this.tag = tag; this.tag = tag;
this.sortOrder = sortOrder; this.sortOrder = sortOrder;

View File

@@ -26,5 +26,6 @@ public class WmsDto {
private String title; private String title;
private String description; private String description;
private String tag; private String tag;
private String layerName;
} }
} }

View File

@@ -26,5 +26,6 @@ public class WmtsDto {
private String title; private String title;
private String description; private String description;
private String tag; private String tag;
private String layerName;
} }
} }

View File

@@ -9,7 +9,6 @@ import com.kamco.cd.kamcoback.layer.dto.LayerDto.LayerMapDto;
import com.kamco.cd.kamcoback.layer.dto.LayerDto.OrderReq; import com.kamco.cd.kamcoback.layer.dto.LayerDto.OrderReq;
import com.kamco.cd.kamcoback.layer.dto.LayerDto.TileUrlDto; import com.kamco.cd.kamcoback.layer.dto.LayerDto.TileUrlDto;
import com.kamco.cd.kamcoback.layer.dto.WmsDto.WmsAddDto; import com.kamco.cd.kamcoback.layer.dto.WmsDto.WmsAddDto;
import com.kamco.cd.kamcoback.layer.dto.WmsDto.WmsAddReqDto;
import com.kamco.cd.kamcoback.layer.dto.WmsLayerInfo; import com.kamco.cd.kamcoback.layer.dto.WmsLayerInfo;
import com.kamco.cd.kamcoback.layer.dto.WmtsDto.WmtsAddDto; import com.kamco.cd.kamcoback.layer.dto.WmtsDto.WmtsAddDto;
import com.kamco.cd.kamcoback.layer.dto.WmtsLayerInfo; import com.kamco.cd.kamcoback.layer.dto.WmtsLayerInfo;
@@ -63,7 +62,7 @@ public class LayerService {
.orElseThrow(() -> new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST)); .orElseThrow(() -> new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST));
switch (layerType) { switch (layerType) {
case TILE -> { case TILE, KAMCO_WMS, KAMCO_WMTS -> {
return mapLayerCoreService.saveTile(dto); return mapLayerCoreService.saveTile(dto);
} }
@@ -79,6 +78,7 @@ public class LayerService {
addDto.setDescription(dto.getDescription()); addDto.setDescription(dto.getDescription());
addDto.setTitle(dto.getTitle()); addDto.setTitle(dto.getTitle());
addDto.setTag(dto.getTag()); addDto.setTag(dto.getTag());
addDto.setLayerName(dto.getLayerName());
return mapLayerCoreService.saveWmts(addDto); return mapLayerCoreService.saveWmts(addDto);
} }
@@ -89,6 +89,7 @@ public class LayerService {
addDto.setDescription(dto.getDescription()); addDto.setDescription(dto.getDescription());
addDto.setTitle(dto.getTitle()); addDto.setTitle(dto.getTitle());
addDto.setTag(dto.getTag()); addDto.setTag(dto.getTag());
addDto.setLayerName(dto.getLayerName());
return mapLayerCoreService.saveWms(addDto); return mapLayerCoreService.saveWms(addDto);
} }
@@ -165,24 +166,6 @@ public class LayerService {
return wmsService.getTile(); return wmsService.getTile();
} }
/**
* wms 저장
*
* @param dto
* @return
*/
@Transactional
public UUID saveWms(WmsAddReqDto dto) {
// 선택한 tile 상세정보 조회
WmsLayerInfo info = wmsService.getDetail(dto.getTitle());
WmsAddDto addDto = new WmsAddDto();
addDto.setWmsLayerInfo(info);
addDto.setDescription(dto.getDescription());
addDto.setTitle(dto.getTitle());
addDto.setTag(dto.getTag());
return mapLayerCoreService.saveWms(addDto);
}
public List<LayerMapDto> findLayerMapList(String type) { public List<LayerMapDto> findLayerMapList(String type) {
List<LayerMapDto> layerMapDtoList = mapLayerCoreService.findLayerMapList(type); List<LayerMapDto> layerMapDtoList = mapLayerCoreService.findLayerMapList(type);
layerMapDtoList.forEach( layerMapDtoList.forEach(

View File

@@ -1,14 +1,10 @@
package com.kamco.cd.kamcoback.mapsheet; package com.kamco.cd.kamcoback.mapsheet;
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto; import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
import com.kamco.cd.kamcoback.code.service.CommonCodeService;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.FilesDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.FoldersDto; import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.FoldersDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.SrchFilesDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.SrchFoldersDto; import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.SrchFoldersDto;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto; import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngYyyyDto;
import com.kamco.cd.kamcoback.mapsheet.service.MapSheetMngService; import com.kamco.cd.kamcoback.mapsheet.service.MapSheetMngService;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto.ModelUploadResDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto.ModelUploadResDto;
import com.kamco.cd.kamcoback.upload.dto.UploadDto; import com.kamco.cd.kamcoback.upload.dto.UploadDto;
@@ -42,7 +38,6 @@ import org.springframework.web.multipart.MultipartFile;
@RequestMapping({"/api/imagery/dataset"}) @RequestMapping({"/api/imagery/dataset"})
public class MapSheetMngApiController { public class MapSheetMngApiController {
private final CommonCodeService commonCodeService;
private final MapSheetMngService mapSheetMngService; private final MapSheetMngService mapSheetMngService;
@Value("${file.sync-root-dir}") @Value("${file.sync-root-dir}")
@@ -51,7 +46,7 @@ public class MapSheetMngApiController {
@Value("${file.sync-tmp-dir}") @Value("${file.sync-tmp-dir}")
private String syncRootTmpDir; private String syncRootTmpDir;
@Operation(summary = "영상 데이터 관리 목록 조회", description = "영상 데이터 관리 목록 조회") @Operation(summary = "영상데이터관리 > 목록 조회", description = "영상데이터관리 > 목록 조회")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -70,7 +65,7 @@ public class MapSheetMngApiController {
return ApiResponseDto.ok(mapSheetMngService.findMapSheetMngList()); return ApiResponseDto.ok(mapSheetMngService.findMapSheetMngList());
} }
@Operation(summary = "영상 데이터 관리 상세", description = "영상 데이터 관리 상세") @Operation(summary = "영상데이터관리 > 상세 조회", description = "영상데이터관리 > 상세 조회")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -89,7 +84,7 @@ public class MapSheetMngApiController {
return ApiResponseDto.ok(mapSheetMngService.findMapSheetMng(mngYyyy)); return ApiResponseDto.ok(mapSheetMngService.findMapSheetMng(mngYyyy));
} }
@Operation(summary = "영상관리 > 데이터 등록", description = "영상관리 > 데이터 등록") @Operation(summary = "영상데이터관리 > 데이터 등록", description = "영상데이터관리 > 데이터 등록")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -109,26 +104,7 @@ public class MapSheetMngApiController {
return ApiResponseDto.ok(mapSheetMngService.mngDataSave(AddReq)); return ApiResponseDto.ok(mapSheetMngService.mngDataSave(AddReq));
} }
@Operation(summary = "영상관리 > 작업완료", description = "영상관리 > 작업완료") @Operation(summary = "영상데이터관리 > 데이터 등록 > 연도 선택 목록", description = "영상데이터관리 > 데이터 등록 > 연도 선택 목록")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "작업완료 처리 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Long.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PutMapping("/mng-complete")
public ApiResponseDto<MapSheetMngDto.DmlReturn> mngComplete(@RequestParam @Valid int mngYyyy) {
return ApiResponseDto.ok(mapSheetMngService.mngComplete(mngYyyy));
}
@Operation(summary = "영상 데이터 관리 년도 목록", description = "영상 데이터 관리 년도 목록")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -147,7 +123,7 @@ public class MapSheetMngApiController {
return ApiResponseDto.ok(mapSheetMngService.findMapSheetMngYyyyList()); return ApiResponseDto.ok(mapSheetMngService.findMapSheetMngYyyyList());
} }
@Operation(summary = "영상 데이터 관리 오류 목록", description = "영상 데이터 관리 오류 목록") @Operation(summary = "영상데이터관리 > 상세 > 오류 처리 내역", description = "영상데이터관리 > 상세 > 오류 처리 내역")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -166,42 +142,9 @@ public class MapSheetMngApiController {
return ApiResponseDto.ok(mapSheetMngService.findMapSheetErrorList(searchReq)); return ApiResponseDto.ok(mapSheetMngService.findMapSheetErrorList(searchReq));
} }
/* @Operation(
@Operation(summary = "오류데이터 팝업 > 업로드 처리", description = "오류데이터 팝업 > 업로드 처리") summary = "영상데이터관리 > 상세 > 오류 처리 내역 > 업로드 (페어 파일 저장)",
@ApiResponses( description = "영상데이터관리 > 상세 > 오류 처리 내역 > 업로드 (페어 파일 저장)")
value = {
@ApiResponse(
responseCode = "201",
description = "업로드 처리 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Long.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PutMapping("/upload-process")
public ApiResponseDto<MapSheetMngDto.DmlReturn> uploadProcess(
@RequestBody @Valid List<Long> hstUidList) {
return ApiResponseDto.ok(mapSheetMngService.uploadProcess(hstUidList));
}
*/
/*
@Operation(summary = "오류데이터 팝업 > 추론 제외", description = "오류데이터 팝업 > 추론 제외")
@PutMapping("/except-inference")
public ApiResponseDto<MapSheetMngDto.DmlReturn> updateExceptUseInference(
@RequestBody @Valid List<Long> hstUidList) {
return ApiResponseDto.ok(mapSheetMngService.updateExceptUseInference(hstUidList));
}
*/
@Operation(summary = "페어 파일 업로드", description = "TFW/TIF 두 파일을 쌍으로 업로드 및 검증")
@PostMapping(value = "/upload-pair", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) @PostMapping(value = "/upload-pair", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public ApiResponseDto<MapSheetMngDto.DmlReturn> uploadPair( public ApiResponseDto<MapSheetMngDto.DmlReturn> uploadPair(
@RequestPart("tfw") MultipartFile tfwFile, @RequestPart("tfw") MultipartFile tfwFile,
@@ -213,7 +156,9 @@ public class MapSheetMngApiController {
mapSheetMngService.uploadPair(tfwFile, tifFile, hstUid, tifFileSize)); mapSheetMngService.uploadPair(tfwFile, tifFile, hstUid, tifFileSize));
} }
@Operation(summary = "영상관리 > 파일조회", description = "영상관리 > 파일조회") @Operation(
summary = "영상데이터관리 > 상세 > 오류 처리 내역 > 중복제거 > 팝업 내 해당 파일조회",
description = "영상데이터관리 > 상세 > 오류 처리 내역 > 중복제거 > 팝업 내 해당 파일조회")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -234,13 +179,13 @@ public class MapSheetMngApiController {
} }
@Operation( @Operation(
summary = "영상관리 > 파일사용설정 및 중복제거", summary = "영상데이터관리 > 상세 > 오류 처리 내역 > 중복제거 업데이트",
description = "영상관리 >파일사용설정 및 중복제거(중복파일제거 및 선택파일사용설정)") description = "영상데이터관리 > 상세 > 오류 처리 내역 > 중복제거 업데이트")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
responseCode = "201", responseCode = "201",
description = "파일사용설정 처리 성공", description = "중복제거 업데이트 처리 성공",
content = content =
@Content( @Content(
mediaType = "application/json", mediaType = "application/json",
@@ -255,7 +200,7 @@ public class MapSheetMngApiController {
return ApiResponseDto.ok(mapSheetMngService.setUseByFileUidMngFile(fileUids)); return ApiResponseDto.ok(mapSheetMngService.setUseByFileUidMngFile(fileUids));
} }
@Operation(summary = "폴더 조회", description = "폴더 조회 (ROOT:/app/original-images 이하로 경로입력)") @Operation(summary = "영상데이터관리 > 데이터 등록 > NAS 폴더 선택", description = "영상데이터관리 > 데이터 등록 > NAS 폴더 선택")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
@@ -274,45 +219,9 @@ public class MapSheetMngApiController {
return ApiResponseDto.createOK(mapSheetMngService.getFolderAll(srchDto)); return ApiResponseDto.createOK(mapSheetMngService.getFolderAll(srchDto));
} }
@Operation(summary = "지정폴더내 파일목록 조회", description = "지정폴더내 파일목록 조회") @Operation(
@ApiResponses( summary = "영상데이터관리 > 상세 > 오류 처리 내역 > 업로드 (영상 tif 대용량 파일 분할 전송)",
value = { description = "영상데이터관리 > 상세 > 오류 처리 내역 > 업로드 (영상 tif 대용량 파일 분할 전송)")
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/file-list")
public ApiResponseDto<FilesDto> getFiles(@RequestBody SrchFilesDto srchDto) {
return ApiResponseDto.createOK(mapSheetMngService.getFilesAll(srchDto));
}
@Operation(summary = "영상 데이터 관리 완료 년도 목록 조회", description = "영상 데이터 관리 완료 년도 목록 조회")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/mng-done-yyyy-list")
public ApiResponseDto<List<MngYyyyDto>> findMapSheetMngDoneYyyyList() {
return ApiResponseDto.ok(mapSheetMngService.findMapSheetMngDoneYyyyList());
}
@Operation(summary = "영상 tif 대용량 파일 분할 전송", description = "영상 tif 파일 대용량 파일을 청크 단위로 전송합니다.")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content), @ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content),

View File

@@ -47,6 +47,9 @@ public class MapSheetMngService {
private final UploadService uploadService; private final UploadService uploadService;
private final UserUtil userUtil = new UserUtil(); private final UserUtil userUtil = new UserUtil();
@Value("${file.root}")
private String nfsRootDir;
@Value("${file.sync-root-dir}") @Value("${file.sync-root-dir}")
private String syncRootDir; private String syncRootDir;
@@ -56,10 +59,20 @@ public class MapSheetMngService {
@Value("${file.sync-file-extention}") @Value("${file.sync-file-extention}")
private String syncFileExtention; private String syncFileExtention;
/**
* 영상데이터관리 > 목록 조회
*
* @return
*/
public List<MngDto> findMapSheetMngList() { public List<MngDto> findMapSheetMngList() {
return mapSheetMngCoreService.findMapSheetMngList(); return mapSheetMngCoreService.findMapSheetMngList();
} }
/**
* 영상데이터관리 > 데이터 등록 > 연도 선택 목록
*
* @return
*/
public List<Integer> findMapSheetMngYyyyList() { public List<Integer> findMapSheetMngYyyyList() {
return mapSheetMngCoreService.findMapSheetMngYyyyList(); return mapSheetMngCoreService.findMapSheetMngYyyyList();
} }
@@ -74,6 +87,12 @@ public class MapSheetMngService {
.toList(); .toList();
} }
/**
* 영상데이터관리 > 상세 조회
*
* @param mngYyyy
* @return
*/
public MngDto findMapSheetMng(int mngYyyy) { public MngDto findMapSheetMng(int mngYyyy) {
return mapSheetMngCoreService.findMapSheetMng(mngYyyy); return mapSheetMngCoreService.findMapSheetMng(mngYyyy);
} }
@@ -86,6 +105,12 @@ public class MapSheetMngService {
return new DmlReturn("success", "작업완료 처리되었습니다."); return new DmlReturn("success", "작업완료 처리되었습니다.");
} }
/**
* 영상데이터관리 > 상세 > 오류 처리 내역
*
* @param searchReq
* @return
*/
public Page<ErrorDataDto> findMapSheetErrorList(@Valid ErrorSearchReq searchReq) { public Page<ErrorDataDto> findMapSheetErrorList(@Valid ErrorSearchReq searchReq) {
return mapSheetMngCoreService.findMapSheetErrorList(searchReq); return mapSheetMngCoreService.findMapSheetErrorList(searchReq);
} }
@@ -94,12 +119,20 @@ public class MapSheetMngService {
return mapSheetMngCoreService.findMapSheetError(hstUid); return mapSheetMngCoreService.findMapSheetError(hstUid);
} }
/**
* 영상데이터관리 > 데이터 등록
*
* @param addReq
* @return
*/
@Transactional @Transactional
public DmlReturn mngDataSave(AddReq addReq) { public DmlReturn mngDataSave(AddReq addReq) {
addReq.setCreatedUid(userUtil.getId()); addReq.setCreatedUid(userUtil.getId());
int execCnt = mapSheetMngCoreService.mngDataSave(addReq); int execCnt =
mapSheetMngCoreService.mngDataSave(addReq); // tb_map_sheet_mng_hst 테이블에 insert된 개수 리턴됨
return new MapSheetMngDto.DmlReturn("success", addReq.getMngYyyy() + "년, " + execCnt + "건 생성"); return new MapSheetMngDto.DmlReturn("success", addReq.getMngYyyy() + "년, " + execCnt + "건 생성");
} }
@@ -107,15 +140,24 @@ public class MapSheetMngService {
return mapSheetMngCoreService.updateExceptUseInference(hstUidList); return mapSheetMngCoreService.updateExceptUseInference(hstUidList);
} }
/**
* 영상데이터관리 > 상세 > 오류 처리 내역 > 업로드 (페어 파일 저장)
*
* @param tfwFile
* @param tifFile
* @param hstUid
* @param tifFileSize
* @return
*/
@Transactional @Transactional
public DmlReturn uploadPair( public DmlReturn uploadPair(
MultipartFile tfwFile, String tifFile, Long hstUid, Long tifFileSize) { MultipartFile tfwFile, String tifFile, Long hstUid, Long tifFileSize) {
String rootPath = syncRootDir;
String tmpPath = syncTmpDir; String tmpPath = syncTmpDir;
DmlReturn dmlReturn = new DmlReturn("success", "UPLOAD COMPLETE"); DmlReturn dmlReturn = new DmlReturn("success", "UPLOAD COMPLETE");
// hst 테이블에 선택한 hstUid row 정보 조회
ErrorDataDto errDto = mapSheetMngCoreService.findMapSheetError(hstUid); ErrorDataDto errDto = mapSheetMngCoreService.findMapSheetError(hstUid);
if (errDto == null) { if (errDto == null) {
@@ -133,33 +175,14 @@ public class MapSheetMngService {
return dmlReturn; return dmlReturn;
} }
MngDto mngDto = mapSheetMngCoreService.findMapSheetMng(errDto.getMngYyyy()); // ★ tif 파일은 대용량 업로드에서 아래 정보들을 체크하기 때문에 체크 로직에서 제외됨
String targetYearDir = mngDto.getMngPath();
// 중복체크 -> 도엽50k/uuid 경로에 업로드 할 거라 overwrite 되지 않음
// if (!overwrite) {
// dmlReturn =
// this.duplicateFile(
// errDto.getMngYyyy(), tfwFile.getOriginalFilename(), tifFile.getOriginalFilename());
// if (dmlReturn.getFlag().equals("duplicate")) {
// return dmlReturn;
// }
// }
// 멀티파트 파일 tmp폴더 저장(파일형식 체크를 위해) // 멀티파트 파일 tmp폴더 저장(파일형식 체크를 위해)
String tfwTmpPath = tmpPath + tfwFile.getOriginalFilename(); String tfwTmpPath = tmpPath + tfwFile.getOriginalFilename();
// String tifTmpPath = tmpPath + tifFile;
if (!FIleChecker.multipartSaveTo(tfwFile, tfwTmpPath)) { if (!FIleChecker.multipartSaveTo(tfwFile, tfwTmpPath)) {
return new DmlReturn("fail", "UPLOAD ERROR"); return new DmlReturn("fail", "UPLOAD ERROR");
} }
// if (!FIleChecker.multipartSaveTo(tifFile, tifTmpPath)) {
// return new DmlReturn("fail", "UPLOAD ERROR");
// }
// if (!FIleChecker.cmmndGdalInfo(tifTmpPath)) {
// return new DmlReturn("fail", "TIF TYPE ERROR");
// }
if (!FIleChecker.checkTfw(tfwTmpPath)) { if (!FIleChecker.checkTfw(tfwTmpPath)) {
return new DmlReturn("fail", "TFW TYPE ERROR"); return new DmlReturn("fail", "TFW TYPE ERROR");
} }
@@ -173,7 +196,6 @@ public class MapSheetMngService {
} }
Path tfwTmpSavePath = Paths.get(tfwTmpPath); Path tfwTmpSavePath = Paths.get(tfwTmpPath);
// Path tifTmpSavePath = Paths.get(tifTmpPath);
Path tfwTargetPath = null; Path tfwTargetPath = null;
Path tifTargetPath = null; Path tifTargetPath = null;
@@ -220,7 +242,7 @@ public class MapSheetMngService {
try { try {
Files.move(tfwTmpSavePath, tfwTargetPath, StandardCopyOption.REPLACE_EXISTING); Files.move(tfwTmpSavePath, tfwTargetPath, StandardCopyOption.REPLACE_EXISTING);
// Files.move(tifTmpSavePath, tifTargetPath, StandardCopyOption.REPLACE_EXISTING); // tif파일은 대용량 업로드에서 이미 업로드 했기 때문에 완료된 상태
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@@ -232,7 +254,7 @@ public class MapSheetMngService {
updReqSyncCheckState.setFilePath(uploadPath); updReqSyncCheckState.setFilePath(uploadPath);
updReqSyncCheckState.setSyncCheckTfwFileName(tfwFile.getOriginalFilename()); updReqSyncCheckState.setSyncCheckTfwFileName(tfwFile.getOriginalFilename());
updReqSyncCheckState.setSyncCheckTifFileName(tifFile); updReqSyncCheckState.setSyncCheckTifFileName(tifFile);
updReqSyncCheckState.setSyncCheckState("DONE"); updReqSyncCheckState.setSyncCheckState("DONE"); // 오류처리 완료
mapSheetMngCoreService.updateMapSheetMngHstSyncCheckState(updReqSyncCheckState); mapSheetMngCoreService.updateMapSheetMngHstSyncCheckState(updReqSyncCheckState);
// 파일정보 업데이트 // 파일정보 업데이트
mapSheetMngCoreService.deleteByHstUidMngFile(hstUid); mapSheetMngCoreService.deleteByHstUidMngFile(hstUid);
@@ -337,12 +359,16 @@ public class MapSheetMngService {
public FoldersDto getFolderAll(SrchFoldersDto srchDto) { public FoldersDto getFolderAll(SrchFoldersDto srchDto) {
Path startPath = Paths.get(syncRootDir + srchDto.getDirPath()); // "경로중복"
String dirPath = syncRootDir + srchDto.getDirPath(); String dirPath =
String sortType = "name desc"; (srchDto.getDirPath() == null || srchDto.getDirPath().isEmpty())
? syncRootDir
: srchDto.getDirPath();
// String dirPath = syncRootDir + srchDto.getDirPath();
log.info("[FIND_FOLDER] DIR : {}", dirPath);
List<FIleChecker.Folder> folderList = List<FIleChecker.Folder> folderList =
FIleChecker.getFolderAll(dirPath).stream() FIleChecker.getFolderAll(dirPath, nfsRootDir).stream()
.filter(dir -> dir.getIsValid().equals(true)) .filter(dir -> dir.getIsValid().equals(true))
.toList(); .toList();
@@ -380,6 +406,11 @@ public class MapSheetMngService {
mapSheetMngCoreService.getSceneInference(yyyy); mapSheetMngCoreService.getSceneInference(yyyy);
} }
/**
* 연도 목록 조회
*
* @return
*/
public List<MngYyyyDto> findMapSheetMngDoneYyyyList() { public List<MngYyyyDto> findMapSheetMngDoneYyyyList() {
List<MngDto> mngList = mapSheetMngCoreService.findMapSheetMngList(); List<MngDto> mngList = mapSheetMngCoreService.findMapSheetMngList();

View File

@@ -169,6 +169,7 @@ public class AuthController {
if (refreshToken == null || !jwtTokenProvider.isValidToken(refreshToken)) { if (refreshToken == null || !jwtTokenProvider.isValidToken(refreshToken)) {
throw new AccessDeniedException("만료되었거나 유효하지 않은 리프레시 토큰 입니다."); throw new AccessDeniedException("만료되었거나 유효하지 않은 리프레시 토큰 입니다.");
} }
String username = jwtTokenProvider.getSubject(refreshToken); String username = jwtTokenProvider.getSubject(refreshToken);
// Redis에 저장된 RefreshToken과 일치하는지 확인 // Redis에 저장된 RefreshToken과 일치하는지 확인

View File

@@ -5,7 +5,6 @@ import com.kamco.cd.kamcoback.members.dto.MembersDto;
import com.kamco.cd.kamcoback.members.dto.MembersDto.Basic; import com.kamco.cd.kamcoback.members.dto.MembersDto.Basic;
import com.kamco.cd.kamcoback.members.service.AdminService; import com.kamco.cd.kamcoback.members.service.AdminService;
import com.kamco.cd.kamcoback.members.service.MembersService; import com.kamco.cd.kamcoback.members.service.MembersService;
import com.kamco.cd.kamcoback.scheduler.service.MemberInactiveJobService;
import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content; import io.swagger.v3.oas.annotations.media.Content;
@@ -35,7 +34,6 @@ public class MembersApiController {
private final MembersService membersService; private final MembersService membersService;
private final AdminService adminService; private final AdminService adminService;
private final MemberInactiveJobService memberInactiveJobService;
@Operation(summary = "회원정보 목록", description = "회원정보 조회") @Operation(summary = "회원정보 목록", description = "회원정보 조회")
@ApiResponses( @ApiResponses(
@@ -159,13 +157,4 @@ public class MembersApiController {
String employeeNo) { String employeeNo) {
return ApiResponseDto.ok(adminService.existsByEmployeeNo(employeeNo)); return ApiResponseDto.ok(adminService.existsByEmployeeNo(employeeNo));
} }
@Operation(
summary = "라벨러/검수자 최종로그인 28일 경과 이후 사용중지(스케줄링 실행)",
description = "라벨러/검수자 최종로그인 28일 경과 이후 사용중지 처리")
@GetMapping("/member-inactive-job")
public ApiResponseDto<Void> memberInactiveJob() {
memberInactiveJobService.memberActive28daysToInactive();
return ApiResponseDto.ok(null);
}
} }

View File

@@ -9,8 +9,10 @@ import lombok.RequiredArgsConstructor;
import org.springframework.cache.annotation.Cacheable; import org.springframework.cache.annotation.Cacheable;
import org.springframework.data.redis.core.StringRedisTemplate; import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service @Service
@Transactional(readOnly = true)
@RequiredArgsConstructor @RequiredArgsConstructor
public class MenuService { public class MenuService {

View File

@@ -41,21 +41,6 @@ public class ModelMngApiController {
private final ModelMngService modelMngService; private final ModelMngService modelMngService;
@Value("${file.sync-root-dir}")
private String syncRootDir;
@Value("${file.sync-tmp-dir}")
private String syncTmpDir;
@Value("${file.sync-file-extention}")
private String syncFileExtention;
@Value("${file.dataset-dir}")
private String datasetDir;
@Value("${file.dataset-tmp-dir}")
private String datasetTmpDir;
@Value("${file.model-dir}") @Value("${file.model-dir}")
private String modelDir; private String modelDir;

View File

@@ -21,9 +21,9 @@ public class ModelMngDto {
@Getter @Getter
@AllArgsConstructor @AllArgsConstructor
public enum ModelType implements EnumType { public enum ModelType implements EnumType {
M1("모델 M1"), G1("G1"),
M2("모델 M2"), G2("G2"),
M3("모델 M3"); G3("G3");
private final String desc; private final String desc;
@@ -67,6 +67,7 @@ public class ModelMngDto {
private String clsModelVersion; private String clsModelVersion;
private Double priority; private Double priority;
private String memo; private String memo;
private UUID uuid;
public Basic( public Basic(
Long modelUid, Long modelUid,
@@ -89,7 +90,8 @@ public class ModelMngDto {
String clsModelFileName, String clsModelFileName,
String clsModelVersion, String clsModelVersion,
double priority, double priority,
String memo) { String memo,
UUID uuid) {
this.modelUid = modelUid; this.modelUid = modelUid;
this.modelVer = modelVer; this.modelVer = modelVer;
this.createCompleteDttm = createCompleteDttm; this.createCompleteDttm = createCompleteDttm;
@@ -111,6 +113,7 @@ public class ModelMngDto {
this.clsModelVersion = clsModelVersion; this.clsModelVersion = clsModelVersion;
this.priority = priority; this.priority = priority;
this.memo = memo; this.memo = memo;
this.uuid = uuid;
} }
} }

View File

@@ -35,33 +35,22 @@ public class ModelMngService {
private final UploadService uploadService; private final UploadService uploadService;
@Value("${file.sync-root-dir}")
private String syncRootDir;
@Value("${file.sync-tmp-dir}")
private String syncTmpDir;
@Value("${file.sync-file-extention}")
private String syncFileExtention;
@Value("${file.dataset-dir}")
private String datasetDir;
@Value("${file.dataset-tmp-dir}")
private String datasetTmpDir;
@Value("${file.model-dir}")
private String modelDir;
@Value("${file.model-tmp-dir}")
private String modelTmpDir;
@Value("${file.pt-path}") @Value("${file.pt-path}")
private String ptPath; private String ptPath;
@Value("${file.pt-FileName}") @Value("${file.pt-FileName}")
private String ptFileName; private String ptFileName;
/**
* 모델조회
*
* @param searchReq 페이징
* @param startDate 시작날짜
* @param endDate 종료날짜
* @param modelType 모델 타입 G1, G2, G3
* @param searchVal 모델 ver
* @return 모델 목록
*/
public Page<ModelMngDto.ModelList> findModelMgmtList( public Page<ModelMngDto.ModelList> findModelMgmtList(
ModelMngDto.searchReq searchReq, ModelMngDto.searchReq searchReq,
LocalDate startDate, LocalDate startDate,

View File

@@ -46,6 +46,13 @@ public class AuditLogCoreService
return auditLogRepository.findLogByAccount(searchRange, searchValue); return auditLogRepository.findLogByAccount(searchRange, searchValue);
} }
/**
* 다운로드 이력 조회
*
* @param searchReq 페이징 파라미터
* @param downloadReq 다운로드 이력 팝업 검색 조건
* @return 다운로드 이력 정보 목록
*/
public Page<AuditLogDto.DownloadRes> findLogByAccount( public Page<AuditLogDto.DownloadRes> findLogByAccount(
AuditLogDto.searchReq searchReq, DownloadReq downloadReq) { AuditLogDto.searchReq searchReq, DownloadReq downloadReq) {
return auditLogRepository.findDownloadLog(searchReq, downloadReq); return auditLogRepository.findDownloadLog(searchReq, downloadReq);

View File

@@ -6,7 +6,10 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto; import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapScaleType; import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapScaleType;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapSheetList; import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapSheetList;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.PointFeatureList;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.PolygonFeatureList;
import com.kamco.cd.kamcoback.common.enums.DetectionClassification; import com.kamco.cd.kamcoback.common.enums.DetectionClassification;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.changedetection.ChangeDetectionRepository; import com.kamco.cd.kamcoback.postgres.repository.changedetection.ChangeDetectionRepository;
import java.util.List; import java.util.List;
@@ -15,6 +18,7 @@ import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Point;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@Service @Service
@@ -96,4 +100,50 @@ public class ChangeDetectionCoreService {
public List<MapSheetList> getChangeDetectionMapSheet50kList(UUID uuid) { public List<MapSheetList> getChangeDetectionMapSheet50kList(UUID uuid) {
return changeDetectionRepository.getChangeDetectionMapSheet50kList(uuid); return changeDetectionRepository.getChangeDetectionMapSheet50kList(uuid);
} }
/**
* 선택 폴리곤 조회 by object id
*
* @param chnDtctId 회차 uid 32자
* @param cdObjectId geo object uid 32자
* @param cdObjectIds geo object uids 32자
* @return
*/
public ChangeDetectionDto.PolygonFeatureList getPolygonListByCd(
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu) {
return changeDetectionRepository.getPolygonListByCd(chnDtctId, cdObjectId, cdObjectIds, pnu);
}
/**
* 선택 Point 조회 by object id
*
* @param chnDtctId 회차 uid 32자
* @param cdObjectId geo object uid 32자
* @param cdObjectIds geo object uids 32자
* @return
*/
public ChangeDetectionDto.PointFeatureList getPointListByCd(
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu) {
return changeDetectionRepository.getPointListByCd(chnDtctId, cdObjectId, cdObjectIds, pnu);
}
/**
* learn uuid 조회
*
* @param chnDtctId
* @return
*/
public UUID getLearnUuid(String chnDtctId) {
return changeDetectionRepository
.getLearnUuid(chnDtctId)
.orElseThrow(() -> new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND));
}
public PolygonFeatureList getChangeDetectionPnuPolygonList(UUID uuid, String pnu) {
return changeDetectionRepository.getChangeDetectionPnuPolygonList(uuid, pnu);
}
public PointFeatureList getChangeDetectionPnuPointList(UUID uuid, String pnu) {
return changeDetectionRepository.getChangeDetectionPnuPointList(uuid, pnu);
}
} }

View File

@@ -77,4 +77,12 @@ public class GukYuinCoreService {
public void updateMapSheetInferenceLabelEndStatus(Long learnId) { public void updateMapSheetInferenceLabelEndStatus(Long learnId) {
gukYuinRepository.updateMapSheetInferenceLabelEndStatus(learnId); gukYuinRepository.updateMapSheetInferenceLabelEndStatus(learnId);
} }
public List<String> findStbltObjectIds(String uid, String mapSheetNum) {
return gukYuinRepository.findStbltObjectIds(uid, mapSheetNum);
}
public Integer updateStbltRandomData(String uid, int updateCnt) {
return gukYuinRepository.updateStbltRandomData(uid, updateCnt);
}
} }

View File

@@ -2,6 +2,7 @@ package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.gukyuin.dto.GukYuinDto.GeomUidDto; import com.kamco.cd.kamcoback.gukyuin.dto.GukYuinDto.GeomUidDto;
import com.kamco.cd.kamcoback.postgres.repository.gukyuin.GukYuinLabelJobRepository; import com.kamco.cd.kamcoback.postgres.repository.gukyuin.GukYuinLabelJobRepository;
import java.time.LocalDate;
import java.util.List; import java.util.List;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@@ -15,8 +16,8 @@ public class GukYuinLabelJobCoreService {
this.gukYuinLabelRepository = gukYuinLabelRepository; this.gukYuinLabelRepository = gukYuinLabelRepository;
} }
public List<GeomUidDto> findYesterdayLabelingCompleteList() { public List<GeomUidDto> findYesterdayLabelingCompleteList(LocalDate baseDate) {
return gukYuinLabelRepository.findYesterdayLabelingCompleteList(); return gukYuinLabelRepository.findYesterdayLabelingCompleteList(baseDate);
} }
@Transactional @Transactional

View File

@@ -0,0 +1,26 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.postgres.repository.gukyuin.GukYuinPnuCntUpdateJobRepository;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
public class GukYuinPnuCntUpdateJobCoreService {
private final GukYuinPnuCntUpdateJobRepository gukYuinPnuCntUpdateRepository;
public GukYuinPnuCntUpdateJobCoreService(
GukYuinPnuCntUpdateJobRepository gukYuinPnuCntUpdateRepository) {
this.gukYuinPnuCntUpdateRepository = gukYuinPnuCntUpdateRepository;
}
@Transactional
public void updateGukYuinContListPnuUpdateCnt() {
gukYuinPnuCntUpdateRepository.updateGukYuinContListPnuUpdateCnt();
}
@Transactional
public void updateGukYuinApplyStatus(String uid, String status) {
gukYuinPnuCntUpdateRepository.updateGukYuinApplyStatus(uid, status);
}
}

View File

@@ -1,5 +1,6 @@
package com.kamco.cd.kamcoback.postgres.core; package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectContDto.StbltResult;
import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto.LearnKeyDto; import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto.LearnKeyDto;
import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto.RlbDtctMastDto; import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto.RlbDtctMastDto;
import com.kamco.cd.kamcoback.postgres.entity.PnuEntity; import com.kamco.cd.kamcoback.postgres.entity.PnuEntity;
@@ -24,13 +25,10 @@ public class GukYuinStbltJobCoreService {
@Transactional @Transactional
public void updateGukYuinEligibleForSurvey(String resultUid, RlbDtctMastDto stbltDto) { public void updateGukYuinEligibleForSurvey(String resultUid, RlbDtctMastDto stbltDto) {
String chnDtctObjtId = "";
PnuEntity entity = PnuEntity entity =
gukYuinStbltRepository.findPnuEntityByResultUid(resultUid, stbltDto.getPnu()); gukYuinStbltRepository.findPnuEntityByResultUid(resultUid, stbltDto.getPnu());
if (entity != null) { if (entity != null) {
chnDtctObjtId = resultUid;
entity.setPnuDtctId(stbltDto.getPnuDtctId()); entity.setPnuDtctId(stbltDto.getPnuDtctId());
entity.setPnu(stbltDto.getPnu()); entity.setPnu(stbltDto.getPnu());
entity.setLrmSyncYmd(stbltDto.getLrmSyncYmd()); entity.setLrmSyncYmd(stbltDto.getLrmSyncYmd());
@@ -68,8 +66,11 @@ public class GukYuinStbltJobCoreService {
entity.setCreatedDttm(ZonedDateTime.now()); entity.setCreatedDttm(ZonedDateTime.now());
gukYuinStbltRepository.save(entity); gukYuinStbltRepository.save(entity);
//
} }
} }
@Transactional
public void updateGukYuinObjectStbltYn(String resultUid, StbltResult stbResult) {
gukYuinStbltRepository.updateGukYuinObjectStbltYn(resultUid, stbResult);
}
} }

View File

@@ -19,6 +19,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListDto; import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto.ModelType;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity; import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity; import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
@@ -35,6 +36,7 @@ import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto;
import jakarta.persistence.EntityManager; import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityNotFoundException; import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
import java.time.LocalDateTime;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
@@ -45,7 +47,7 @@ import java.util.function.Consumer;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2; import lombok.extern.slf4j.Slf4j;
import org.springframework.dao.DataAccessException; import org.springframework.dao.DataAccessException;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.http.HttpStatus; import org.springframework.http.HttpStatus;
@@ -53,7 +55,7 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@Service @Service
@Log4j2 @Slf4j
@RequiredArgsConstructor @RequiredArgsConstructor
public class InferenceResultCoreService { public class InferenceResultCoreService {
@@ -78,12 +80,13 @@ public class InferenceResultCoreService {
} }
/** /**
* 변화탐지 실행 정보 생성 * 변화탐지 실행 정보 생성 - 추론 실행 목록 테이블 저장, 도엽목록별 상태 체크 테이블 저장
* *
* @param req * @param req 추론 실행 목록 uuid
*/ */
public UUID saveInferenceInfo(InferenceResultDto.RegReq req, List<MngListDto> targetList) { public UUID saveInferenceInfo(InferenceResultDto.RegReq req, List<MngListDto> targetList) {
// 대표 도엽명 외 N 건 실행 문구 만들기 위해 Null, 중복 제거
List<MngListDto> distinctList = List<MngListDto> distinctList =
targetList.stream() targetList.stream()
.filter(dto -> dto.getMapSheetName() != null && !dto.getMapSheetName().isBlank()) .filter(dto -> dto.getMapSheetName() != null && !dto.getMapSheetName().isBlank())
@@ -110,30 +113,26 @@ public class InferenceResultCoreService {
MapSheetLearnEntity mapSheetLearnEntity = new MapSheetLearnEntity(); MapSheetLearnEntity mapSheetLearnEntity = new MapSheetLearnEntity();
mapSheetLearnEntity.setTitle(req.getTitle()); mapSheetLearnEntity.setTitle(req.getTitle());
mapSheetLearnEntity.setRunningModelType("M1"); mapSheetLearnEntity.setRunningModelType(ModelType.G1.getId());
mapSheetLearnEntity.setM1ModelUuid(req.getModel1Uuid()); mapSheetLearnEntity.setM1ModelUuid(req.getModel1Uuid());
mapSheetLearnEntity.setM2ModelUuid(req.getModel2Uuid()); mapSheetLearnEntity.setM2ModelUuid(req.getModel2Uuid());
mapSheetLearnEntity.setM3ModelUuid(req.getModel3Uuid()); mapSheetLearnEntity.setM3ModelUuid(req.getModel3Uuid());
mapSheetLearnEntity.setCompareYyyy(req.getCompareYyyy()); mapSheetLearnEntity.setCompareYyyy(req.getCompareYyyy());
mapSheetLearnEntity.setTargetYyyy(req.getTargetYyyy()); mapSheetLearnEntity.setTargetYyyy(req.getTargetYyyy());
mapSheetLearnEntity.setMapSheetScope(req.getMapSheetScope()); mapSheetLearnEntity.setMapSheetScope(req.getMapSheetScope());
mapSheetLearnEntity.setDetectOption(req.getDetectOption()); mapSheetLearnEntity.setDetectOption(req.getDetectOption().getId());
mapSheetLearnEntity.setCreatedUid(userUtil.getId()); mapSheetLearnEntity.setCreatedUid(userUtil.getId());
mapSheetLearnEntity.setMapSheetCnt(mapSheetName); mapSheetLearnEntity.setMapSheetCnt(mapSheetName);
mapSheetLearnEntity.setDetectingCnt(0L); mapSheetLearnEntity.setDetectingCnt(0L);
mapSheetLearnEntity.setTotalJobs((long) targetList.size()); mapSheetLearnEntity.setTotalJobs((long) targetList.size());
// 회차는 국유인 반영할때 update로 변경됨
// mapSheetLearnEntity.setStage(
// mapSheetLearnRepository.getLearnStage(req.getCompareYyyy(), req.getTargetYyyy()));
// learn 테이블 저장 // learn 테이블 저장
MapSheetLearnEntity savedLearn = mapSheetLearnRepository.save(mapSheetLearnEntity); MapSheetLearnEntity savedLearn = mapSheetLearnRepository.save(mapSheetLearnEntity);
final int CHUNK = 1000; final int CHUNK = 1000;
List<MapSheetLearn5kEntity> buffer = new ArrayList<>(CHUNK); List<MapSheetLearn5kEntity> buffer = new ArrayList<>(CHUNK);
// learn 도엽별 저장 // learn 도엽별 저장, 도엽수가 많으므로 1000개 씩 저장함
for (MngListDto mngDto : targetList) { for (MngListDto mngDto : targetList) {
MapSheetLearn5kEntity entity = new MapSheetLearn5kEntity(); MapSheetLearn5kEntity entity = new MapSheetLearn5kEntity();
entity.setLearn(savedLearn); entity.setLearn(savedLearn);
@@ -144,12 +143,15 @@ public class InferenceResultCoreService {
buffer.add(entity); buffer.add(entity);
if (buffer.size() == CHUNK) { if (buffer.size() == CHUNK) {
// 도엽별 저장 learn 5k 테이블
flushChunk(buffer); flushChunk(buffer);
buffer.clear(); buffer.clear();
} }
} }
// chunk 남은거 처리
if (!buffer.isEmpty()) { if (!buffer.isEmpty()) {
// 도엽별 저장 learn 5k 테이블
flushChunk(buffer); flushChunk(buffer);
buffer.clear(); buffer.clear();
} }
@@ -158,9 +160,9 @@ public class InferenceResultCoreService {
} }
/** /**
* 도엽별 저장 * 도엽별 저장 learn 5k 테이블
* *
* @param buffer * @param buffer 저장 정보
*/ */
private void flushChunk(List<MapSheetLearn5kEntity> buffer) { private void flushChunk(List<MapSheetLearn5kEntity> buffer) {
@@ -270,7 +272,7 @@ public class InferenceResultCoreService {
.getInferenceResultByUuid(request.getUuid()) .getInferenceResultByUuid(request.getUuid())
.orElseThrow(EntityNotFoundException::new); .orElseThrow(EntityNotFoundException::new);
// M1/M2/M3 영역 업데이트 // G1/G2/G3 영역 업데이트
if (request.getType() != null) { if (request.getType() != null) {
applyModelUpdate(entity, request); applyModelUpdate(entity, request);
} }
@@ -282,7 +284,7 @@ public class InferenceResultCoreService {
.distinct() // 중복 방지 (선택) .distinct() // 중복 방지 (선택)
.toList(); .toList();
// testing 추론결과 테이블 조회하여 탐지 개수 업데이트 // testing 테이블 추론결과 테이블 조회하여 탐지 개수 업데이트
Long testing = getInferenceResultCnt(batchIds); Long testing = getInferenceResultCnt(batchIds);
// 공통 영역 업데이트 // 공통 영역 업데이트
@@ -301,7 +303,7 @@ public class InferenceResultCoreService {
private void applyModelUpdate(MapSheetLearnEntity entity, SaveInferenceAiDto request) { private void applyModelUpdate(MapSheetLearnEntity entity, SaveInferenceAiDto request) {
switch (request.getType()) { switch (request.getType()) {
case "M1" -> case "G1" ->
applyModelFields( applyModelFields(
request, request,
entity::setM1ModelBatchId, entity::setM1ModelBatchId,
@@ -311,7 +313,7 @@ public class InferenceResultCoreService {
entity::setM1RunningJobs, entity::setM1RunningJobs,
entity::setM1CompletedJobs, entity::setM1CompletedJobs,
entity::setM1FailedJobs); entity::setM1FailedJobs);
case "M2" -> case "G2" ->
applyModelFields( applyModelFields(
request, request,
entity::setM2ModelBatchId, entity::setM2ModelBatchId,
@@ -321,7 +323,7 @@ public class InferenceResultCoreService {
entity::setM2RunningJobs, entity::setM2RunningJobs,
entity::setM2CompletedJobs, entity::setM2CompletedJobs,
entity::setM2FailedJobs); entity::setM2FailedJobs);
case "M3" -> case "G3" ->
applyModelFields( applyModelFields(
request, request,
entity::setM3ModelBatchId, entity::setM3ModelBatchId,
@@ -368,6 +370,12 @@ public class InferenceResultCoreService {
return mapSheetLearnRepository.getInferenceServerStatusList(); return mapSheetLearnRepository.getInferenceServerStatusList();
} }
/**
* 진행중 배치 조회
*
* @param status
* @return
*/
public InferenceBatchSheet getInferenceResultByStatus(String status) { public InferenceBatchSheet getInferenceResultByStatus(String status) {
MapSheetLearnEntity entity = MapSheetLearnEntity entity =
mapSheetLearnRepository.getInferenceResultByStatus(status).orElse(null); mapSheetLearnRepository.getInferenceResultByStatus(status).orElse(null);
@@ -402,6 +410,12 @@ public class InferenceResultCoreService {
return mapSheetLearnRepository.getInferenceAiResultById(id, modelUuid); return mapSheetLearnRepository.getInferenceAiResultById(id, modelUuid);
} }
/**
* 추론 진행 현황 상세
*
* @param uuid
* @return
*/
public InferenceStatusDetailDto getInferenceStatus(UUID uuid) { public InferenceStatusDetailDto getInferenceStatus(UUID uuid) {
return mapSheetLearnRepository.getInferenceStatus(uuid); return mapSheetLearnRepository.getInferenceStatus(uuid);
} }
@@ -409,7 +423,7 @@ public class InferenceResultCoreService {
/** /**
* 추론 진행중인지 확인 * 추론 진행중인지 확인
* *
* @return * @return 추론 실행중인 추론 uuid, batch id
*/ */
public SaveInferenceAiDto getProcessing() { public SaveInferenceAiDto getProcessing() {
MapSheetLearnEntity entity = mapSheetLearnRepository.getProcessing(); MapSheetLearnEntity entity = mapSheetLearnRepository.getProcessing();
@@ -440,19 +454,45 @@ public class InferenceResultCoreService {
* @return * @return
*/ */
public AnalResultInfo getInferenceResultInfo(UUID uuid) { public AnalResultInfo getInferenceResultInfo(UUID uuid) {
// 추론 결과 정보조회
log.info("get inference result info start time = {}", LocalDateTime.now());
AnalResultInfo resultInfo = mapSheetLearnRepository.getInferenceResultInfo(uuid); AnalResultInfo resultInfo = mapSheetLearnRepository.getInferenceResultInfo(uuid);
log.info("get inference result info end time = {}", LocalDateTime.now());
// bbox, point 조회
log.info("get inference result info bbox start time = {}", LocalDateTime.now());
BboxPointDto bboxPointDto = mapSheetLearnRepository.getBboxPoint(uuid); BboxPointDto bboxPointDto = mapSheetLearnRepository.getBboxPoint(uuid);
log.info("get inference result info bbox end time = {}", LocalDateTime.now());
resultInfo.setBboxGeom(bboxPointDto.getBboxGeom()); resultInfo.setBboxGeom(bboxPointDto.getBboxGeom());
resultInfo.setBboxCenterPoint(bboxPointDto.getBboxCenterPoint()); resultInfo.setBboxCenterPoint(bboxPointDto.getBboxCenterPoint());
return resultInfo; return resultInfo;
} }
/**
* 분류별 탐지건수 조회
*
* @param uuid 추론 uuid
* @return 분류별 탐지건수 정보
*/
public List<Dashboard> getInferenceClassCountList(UUID uuid) { public List<Dashboard> getInferenceClassCountList(UUID uuid) {
return mapSheetLearnRepository.getInferenceClassCountList(uuid); log.info("get inference class count list start time = {}", LocalDateTime.now());
List<Dashboard> classCountList = mapSheetLearnRepository.getInferenceClassCountList(uuid);
log.info("get inference class count list end time = {}", LocalDateTime.now());
return classCountList;
} }
/**
* @param uuid 추론 uuid
* @param searchGeoReq 추론 결과 상세화면 geom 조회 조건
* @return geom 목록 정보
*/
public Page<Geom> getInferenceGeomList(UUID uuid, SearchGeoReq searchGeoReq) { public Page<Geom> getInferenceGeomList(UUID uuid, SearchGeoReq searchGeoReq) {
return mapSheetLearnRepository.getInferenceGeomList(uuid, searchGeoReq); log.info("get Inference Geom List start time = {}", LocalDateTime.now());
Page<Geom> geom = mapSheetLearnRepository.getInferenceGeomList(uuid, searchGeoReq);
log.info("get Inference Geom List end time = {}", LocalDateTime.now());
return geom;
} }
/** /**
@@ -462,9 +502,13 @@ public class InferenceResultCoreService {
*/ */
@Transactional @Transactional
public void upsertGeomData(Long id) { public void upsertGeomData(Long id) {
// 추론 결과 목록 저장
Long analId = inferenceResultRepository.upsertGroupsFromMapSheetAnal(id); Long analId = inferenceResultRepository.upsertGroupsFromMapSheetAnal(id);
// 추론 결과 상세 저장
inferenceResultRepository.upsertGroupsFromInferenceResults(analId); inferenceResultRepository.upsertGroupsFromInferenceResults(analId);
// geom 목록 추론 결과 저장
inferenceResultRepository.upsertGeomsFromInferenceResults(analId); inferenceResultRepository.upsertGeomsFromInferenceResults(analId);
// 집계 추론 결과 저장
inferenceResultRepository.upsertSttcFromInferenceResults(analId); inferenceResultRepository.upsertSttcFromInferenceResults(analId);
} }
@@ -497,26 +541,33 @@ public class InferenceResultCoreService {
} }
/** /**
* 추론 결과 shp파일 생성위해서 조회 * 추론 결과 조회
* *
* @param batchIds * @param batchIds
* @return * @return
*/ */
public List<InferenceResultsTestingDto.ShpDto> getInferenceResults(List<Long> batchIds) { public List<InferenceResultsTestingDto.Basic> getInferenceResults(List<Long> batchIds) {
List<InferenceResultsTestingEntity> list = List<InferenceResultsTestingEntity> list =
inferenceResultsTestingRepository.getInferenceResultList(batchIds); inferenceResultsTestingRepository.getInferenceResultList(batchIds);
return list.stream().map(InferenceResultsTestingDto.ShpDto::fromEntity).toList();
return list.stream().map(InferenceResultsTestingEntity::toDto).toList();
} }
/**
* 테스팅 테이블 조회하여 탐지건수 조회
*
* @param batchIds batchIds
* @return batchIds 조회 count 수
*/
public Long getInferenceResultCnt(List<Long> batchIds) { public Long getInferenceResultCnt(List<Long> batchIds) {
return inferenceResultsTestingRepository.getInferenceResultCnt(batchIds); return inferenceResultsTestingRepository.getInferenceResultCnt(batchIds);
} }
/** /**
* uid 조회 * 추론 정보 조회 하여 batch id, 32자 uid 리턴
* *
* @param uuid * @param uuid 추론 uuid
* @return * @return 추론정보
*/ */
public InferenceLearnDto getInferenceUid(UUID uuid) { public InferenceLearnDto getInferenceUid(UUID uuid) {
MapSheetLearnEntity entity = inferenceResultRepository.getInferenceUid(uuid).orElse(null); MapSheetLearnEntity entity = inferenceResultRepository.getInferenceUid(uuid).orElse(null);
@@ -533,7 +584,7 @@ public class InferenceResultCoreService {
} }
/** /**
* 실행중인 추론 도엽명 목록 * 분석중인 추론 도엽명 목록
* *
* @param uuid 추론 실행중인 uuid * @param uuid 추론 실행중인 uuid
* @return * @return
@@ -542,11 +593,37 @@ public class InferenceResultCoreService {
return mapSheetLearn5kRepository.getInferenceRunMapId(uuid); return mapSheetLearn5kRepository.getInferenceRunMapId(uuid);
} }
/**
* 실패 처리되어야 할 목록 중에 아직 실패로 표시되지 않은 ID 조회
*
* @param uuid 추론 uuid
* @param failMapIds AI API 연결하여 조회한 실패 job id
* @param type 모델 타입
* @return job id
*/
public List<Long> findFail5kList(UUID uuid, List<Long> failMapIds, String type) { public List<Long> findFail5kList(UUID uuid, List<Long> failMapIds, String type) {
return mapSheetLearn5kRepository.findFail5kList(uuid, failMapIds, type); return mapSheetLearn5kRepository.findFail5kList(uuid, failMapIds, type);
} }
/**
* 완료된 것으로 들어온 목록 중 실제로 존재하는 5k jobId 조회
*
* @param uuid 추론 uuid
* @param completedIds AI API 연결하여 조회한 성공 job id
* @param type 모델 타입
* @return job id
*/
public List<Long> findCompleted5kList(UUID uuid, List<Long> completedIds, String type) { public List<Long> findCompleted5kList(UUID uuid, List<Long> completedIds, String type) {
return mapSheetLearn5kRepository.findCompleted5kList(uuid, completedIds, type); return mapSheetLearn5kRepository.findCompleted5kList(uuid, completedIds, type);
} }
/**
* testing 테이블 결과로 기본정보 조회
*
* @param batchIds batch id
* @return batch id, model ver, year 정보
*/
public List<InferenceResultsTestingDto.Basic> getInferenceResultGroupList(List<Long> batchIds) {
return inferenceResultsTestingRepository.getInferenceResultGroupList(batchIds);
}
} }

View File

@@ -1,12 +1,17 @@
package com.kamco.cd.kamcoback.postgres.core; package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository; import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@Service @Service
@Log4j2
@RequiredArgsConstructor @RequiredArgsConstructor
public class InferenceResultShpCoreService { public class InferenceResultShpCoreService {
@@ -15,15 +20,33 @@ public class InferenceResultShpCoreService {
/** /**
* inference_results 기준으로 - tb_map_sheet_anal_data_inference - * inference_results 기준으로 - tb_map_sheet_anal_data_inference -
* tb_map_sheet_anal_data_inference_geom 테이블을 최신 상태로 구성한다. * tb_map_sheet_anal_data_inference_geom 테이블을 최신 상태로 구성한다.
*
* @param id learn id
* @return
*/ */
@Transactional @Transactional
public InferenceResultShpDto.InferenceCntDto buildInferenceData(Long id) { public InferenceResultShpDto.InferenceCntDto buildInferenceData(Long id) {
Long analId = repo.upsertGroupsFromMapSheetAnal(id);
repo.upsertGroupsFromInferenceResults(analId);
repo.upsertGeomsFromInferenceResults(analId);
repo.upsertSttcFromInferenceResults(analId);
InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto();
MapSheetAnalInferenceEntity analInferenceEntity =
repo.getAnalInferenceDataByLearnId(id).orElse(null);
if (analInferenceEntity != null) {
throw new CustomApiException("CONFLICT", HttpStatus.CONFLICT);
}
Long analId = repo.upsertGroupsFromMapSheetAnal(id);
int analDataCnt = repo.upsertGroupsFromInferenceResults(analId);
int geomCnt = repo.upsertGeomsFromInferenceResults(analId);
int sttcCnt = repo.upsertSttcFromInferenceResults(analId);
InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto();
cntDto.setSheetAnalDataCnt(analDataCnt);
cntDto.setInferenceGeomCnt(geomCnt);
cntDto.setInferenceSttcnt(sttcCnt);
log.info(
"[ANAL SAVE] analId={}, tb_map_sheet_anal_data_inference={}, tb_map_sheet_anal_data_inference_geom={}, tb_map_sheet_anal_sttc={}",
analId,
analDataCnt,
geomCnt,
sttcCnt);
return cntDto; return cntDto;
} }
} }

View File

@@ -1,8 +1,11 @@
package com.kamco.cd.kamcoback.postgres.core; package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.AllocateInfoDto; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.AllocateInfoDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceLearnDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelMngState;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelerDetail;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelingStatDto;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveInfo; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.MoveInfo;
@@ -13,12 +16,18 @@ import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.ProjectInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo; import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkProgressInfo;
import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerStatistics; import com.kamco.cd.kamcoback.label.dto.WorkerStatsDto.WorkerStatistics;
import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity; import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity;
import com.kamco.cd.kamcoback.postgres.repository.batch.BatchStepHistoryRepository;
import com.kamco.cd.kamcoback.postgres.repository.label.LabelAllocateRepository; import com.kamco.cd.kamcoback.postgres.repository.label.LabelAllocateRepository;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@Service @Service
@@ -26,6 +35,10 @@ import org.springframework.stereotype.Service;
public class LabelAllocateCoreService { public class LabelAllocateCoreService {
private final LabelAllocateRepository labelAllocateRepository; private final LabelAllocateRepository labelAllocateRepository;
private final BatchStepHistoryRepository batchStepHistoryRepository;
@Value("${file.dataset-response}")
private String responsePath;
public List<AllocateInfoDto> fetchNextIds(Long lastId, Long batchSize, UUID uuid) { public List<AllocateInfoDto> fetchNextIds(Long lastId, Long batchSize, UUID uuid) {
return labelAllocateRepository.fetchNextIds(lastId, batchSize, uuid); return labelAllocateRepository.fetchNextIds(lastId, batchSize, uuid);
@@ -234,4 +247,47 @@ public class LabelAllocateCoreService {
public Long findLabelingIngProcessCnt() { public Long findLabelingIngProcessCnt() {
return labelAllocateRepository.findLabelingIngProcessCnt(); return labelAllocateRepository.findLabelingIngProcessCnt();
} }
public boolean isDownloadable(UUID uuid) {
InferenceLearnDto dto = labelAllocateRepository.findLabelingIngProcessId(uuid);
if (dto == null) {
return false;
}
// 파일이 있는지만 확인
Path path = Paths.get(responsePath).resolve(dto.getLearnUid() + ".zip");
if (!Files.isRegularFile(path)) {
return false; // exists 포함
}
String state = dto.getAnalState();
boolean isLabelingIng =
LabelMngState.ASSIGNED.getId().equals(state) || LabelMngState.ING.getId().equals(state);
if (isLabelingIng) {
Long analId = dto.getAnalId();
if (analId == null) {
return false;
}
return batchStepHistoryRepository.isDownloadable(analId);
}
return true;
}
public String findLearnUid(UUID uuid) {
return labelAllocateRepository
.findLearnUid(uuid)
.orElseThrow(() -> new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND));
}
public List<AllocateInfoDto> fetchNextIdsAddStbltYn(
UUID uuid, LocalDate baseDate, Long lastId, Long totalCnt) {
return labelAllocateRepository.fetchNextIdsAddStbltYn(uuid, baseDate, lastId, totalCnt);
}
public Long findAllocateAddCnt(UUID uuid, LocalDate baseDate) {
return labelAllocateRepository.findAllocateAddCnt(uuid, baseDate);
}
} }

View File

@@ -87,6 +87,10 @@ public class MapLayerCoreService {
entity.setDescription(dto.getDescription()); entity.setDescription(dto.getDescription());
} }
if (dto.getLayerName() != null) {
entity.setLayerName(dto.getLayerName());
}
if (dto.getUrl() != null) { if (dto.getUrl() != null) {
entity.setUrl(dto.getUrl()); entity.setUrl(dto.getUrl());
} }
@@ -213,6 +217,7 @@ public class MapLayerCoreService {
Long order = mapLayerRepository.findSortOrderDesc(); Long order = mapLayerRepository.findSortOrderDesc();
MapLayerEntity mapLayerEntity = new MapLayerEntity(); MapLayerEntity mapLayerEntity = new MapLayerEntity();
mapLayerEntity.setLayerName(dto.getLayerName());
mapLayerEntity.setDescription(dto.getDescription()); mapLayerEntity.setDescription(dto.getDescription());
mapLayerEntity.setUrl(dto.getUrl()); mapLayerEntity.setUrl(dto.getUrl());
mapLayerEntity.setTag(dto.getTag()); mapLayerEntity.setTag(dto.getTag());
@@ -243,6 +248,7 @@ public class MapLayerCoreService {
Long order = mapLayerRepository.findSortOrderDesc(); Long order = mapLayerRepository.findSortOrderDesc();
MapLayerEntity mapLayerEntity = new MapLayerEntity(); MapLayerEntity mapLayerEntity = new MapLayerEntity();
mapLayerEntity.setLayerName(addDto.getLayerName());
mapLayerEntity.setDescription(addDto.getDescription()); mapLayerEntity.setDescription(addDto.getDescription());
mapLayerEntity.setUrl(addDto.getUrl()); mapLayerEntity.setUrl(addDto.getUrl());
mapLayerEntity.setTag(addDto.getTag()); mapLayerEntity.setTag(addDto.getTag());
@@ -273,6 +279,7 @@ public class MapLayerCoreService {
} }
MapLayerEntity mapLayerEntity = new MapLayerEntity(); MapLayerEntity mapLayerEntity = new MapLayerEntity();
mapLayerEntity.setLayerName(addDto.getLayerName());
mapLayerEntity.setTitle(addDto.getTitle()); mapLayerEntity.setTitle(addDto.getTitle());
mapLayerEntity.setDescription(addDto.getDescription()); mapLayerEntity.setDescription(addDto.getDescription());
mapLayerEntity.setCreatedUid(userUtil.getId()); mapLayerEntity.setCreatedUid(userUtil.getId());
@@ -305,6 +312,7 @@ public class MapLayerCoreService {
} }
MapLayerEntity mapLayerEntity = new MapLayerEntity(); MapLayerEntity mapLayerEntity = new MapLayerEntity();
mapLayerEntity.setLayerName(addDto.getLayerName());
mapLayerEntity.setTitle(addDto.getTitle()); mapLayerEntity.setTitle(addDto.getTitle());
mapLayerEntity.setDescription(addDto.getDescription()); mapLayerEntity.setDescription(addDto.getDescription());
mapLayerEntity.setCreatedUid(userUtil.getId()); mapLayerEntity.setCreatedUid(userUtil.getId());

View File

@@ -1,13 +1,8 @@
package com.kamco.cd.kamcoback.postgres.core; package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.common.enums.MngStateType; import com.kamco.cd.kamcoback.common.enums.MngStateType;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter; import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter;
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature; import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature;
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.Scene;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.DetectOption;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto; import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListCompareDto; import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListCompareDto;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListDto; import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListDto;
@@ -21,21 +16,21 @@ import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.Valid; import jakarta.validation.Valid;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@@ -53,13 +48,20 @@ public class MapSheetMngCoreService {
@Value("${file.sync-root-dir}") @Value("${file.sync-root-dir}")
private String syncRootDir; private String syncRootDir;
@Value("${inference.geojson-dir}") /**
private String inferenceDir; * 영상데이터관리 > 목록 조회
*
* @return
*/
public List<MapSheetMngDto.MngDto> findMapSheetMngList() { public List<MapSheetMngDto.MngDto> findMapSheetMngList() {
return mapSheetMngRepository.findMapSheetMngList(); return mapSheetMngRepository.findMapSheetMngList();
} }
/**
* 영상데이터관리 > 데이터 등록 > 연도 선택 목록
*
* @return
*/
public List<Integer> findMapSheetMngYyyyList() { public List<Integer> findMapSheetMngYyyyList() {
return mapSheetMngRepository.findMapSheetMngYyyyList(); return mapSheetMngRepository.findMapSheetMngYyyyList();
} }
@@ -76,6 +78,12 @@ public class MapSheetMngCoreService {
return mapSheetMngRepository.getYears(req); return mapSheetMngRepository.getYears(req);
} }
/**
* 영상데이터관리 > 상세 조회
*
* @param mngYyyy
* @return
*/
public MapSheetMngDto.MngDto findMapSheetMng(int mngYyyy) { public MapSheetMngDto.MngDto findMapSheetMng(int mngYyyy) {
return mapSheetMngRepository.findMapSheetMng(mngYyyy); return mapSheetMngRepository.findMapSheetMng(mngYyyy);
} }
@@ -103,15 +111,33 @@ public class MapSheetMngCoreService {
mapSheetMngRepository.updateMapSheetMngHstSyncCheckState(reqDto); mapSheetMngRepository.updateMapSheetMngHstSyncCheckState(reqDto);
} }
/**
* 영상데이터관리 > 상세 > 오류 처리 내역
*
* @param searchReq 오류 검색 조건
* @return
*/
public Page<MapSheetMngDto.ErrorDataDto> findMapSheetErrorList( public Page<MapSheetMngDto.ErrorDataDto> findMapSheetErrorList(
MapSheetMngDto.@Valid ErrorSearchReq searchReq) { MapSheetMngDto.@Valid ErrorSearchReq searchReq) {
return mapSheetMngRepository.findMapSheetErrorList(searchReq); return mapSheetMngRepository.findMapSheetErrorList(searchReq);
} }
/**
* hst 테이블에 선택한 hstUid row 정보 조회
*
* @param hstUid
* @return
*/
public MapSheetMngDto.ErrorDataDto findMapSheetError(Long hstUid) { public MapSheetMngDto.ErrorDataDto findMapSheetError(Long hstUid) {
return mapSheetMngRepository.findMapSheetError(hstUid); return mapSheetMngRepository.findMapSheetError(hstUid);
} }
/**
* 파일 목록으로 업로드 경로 확인
*
* @param hstUid
* @return
*/
public List<MapSheetMngDto.MngFilesDto> findByHstUidMapSheetFileList(Long hstUid) { public List<MapSheetMngDto.MngFilesDto> findByHstUidMapSheetFileList(Long hstUid) {
return mapSheetMngRepository.findByHstUidMapSheetFileList(hstUid); return mapSheetMngRepository.findByHstUidMapSheetFileList(hstUid);
} }
@@ -154,12 +180,19 @@ public class MapSheetMngCoreService {
entity.setCreatedUid(addReq.getCreatedUid()); entity.setCreatedUid(addReq.getCreatedUid());
entity.setUpdatedUid(addReq.getCreatedUid()); entity.setUpdatedUid(addReq.getCreatedUid());
// 같은 년도로 저장된 데이터가 있다면 삭제
// tb_map_sheet_mng, tb_map_sheet_hst, tb_map_sheet_mng_files, tb_map_sheet_mng_tile
mapSheetMngRepository.deleteByMngYyyyMngAll(addReq.getMngYyyy()); mapSheetMngRepository.deleteByMngYyyyMngAll(addReq.getMngYyyy());
// tb_map_sheet_mng 엔티티 저장
MapSheetMngEntity saved = mapSheetMngRepository.save(entity); MapSheetMngEntity saved = mapSheetMngRepository.save(entity);
// 5k 도엽 기준으로 tb_map_sheet_hst 테이블에 먼저 insert 하기
int hstCnt = int hstCnt =
mapSheetMngRepository.insertMapSheetOrgDataToMapSheetMngHst( mapSheetMngRepository.insertMapSheetOrgDataToMapSheetMngHst(
saved.getMngYyyy(), saved.getMngPath()); saved.getMngYyyy(), saved.getMngPath());
// tb_year 에 해당 년도 완료로 업데이트
mapSheetMngRepository.updateYearState(saved.getMngYyyy(), "DONE"); mapSheetMngRepository.updateYearState(saved.getMngYyyy(), "DONE");
// 년도별 Tile 정보 등록 // 년도별 Tile 정보 등록
@@ -233,86 +266,70 @@ public class MapSheetMngCoreService {
} }
/** /**
* 추론 실행에 필요한 geojson 파일 생성 * geojson 생성시 필요한 영상파일 정보 조회
* *
* @param yyyy 영상관리 파일별 년도 * @param yyyy
* @param scenes 5k 도엽 번호 리스트 * @param mapSheetNums
* @param mapSheetScope EXCL : 추론제외, PREV 이전 년도 도엽 사용 * @return ImageFeature
* @return
*/ */
public Scene getSceneInference( public List<ImageFeature> loadSceneInferenceBySheets(String yyyy, List<String> mapSheetNums) {
String yyyy, List<String> scenes, String mapSheetScope, String detectOption) {
Map<String, Object> result = new HashMap<>(); if (mapSheetNums == null || mapSheetNums.isEmpty()) {
boolean isAll = MapSheetScope.ALL.getId().equals(mapSheetScope); return List.of();
String optionSuffix = "";
if (DetectOption.EXCL.getId().equals(detectOption)) {
optionSuffix = "_EXCL";
} else if (DetectOption.PREV.getId().equals(detectOption)) {
optionSuffix = "_PREV";
} }
// 1) 경로/파일명 결정 // CHUNK_SIZE 단위로 나누어 여러 번 조회한다.
String targetDir = final int CHUNK_SIZE = 1000;
"local".equals(activeEnv) ? System.getProperty("user.home") + "/geojson" : inferenceDir; List<ImageFeature> features = new ArrayList<>();
String filename = // i부터 CHUNK_SIZE만큼 잘라서 조회
isAll // 마지막 구간은 남은 개수만큼만 처리하기 위해 Math.min 사용
? String.format("%s_%s_ALL%s.geojson", yyyy, activeEnv, optionSuffix) for (int i = 0; i < mapSheetNums.size(); i += CHUNK_SIZE) {
: String.format("%s_%s%s.geojson", yyyy, activeEnv, optionSuffix); List<String> chunk = mapSheetNums.subList(i, Math.min(i + CHUNK_SIZE, mapSheetNums.size()));
features.addAll(mapSheetMngRepository.getSceneInference(yyyy, chunk));
Path outputPath = Paths.get(targetDir, filename);
// 2) ALL일 때만 재사용
// if (isAll && Files.exists(outputPath)) {
// return outputPath.toString();
// }
// 3) 데이터 조회
List<ImageFeature> sceneInference = mapSheetMngRepository.getSceneInference(yyyy, scenes);
if (sceneInference == null || sceneInference.isEmpty()) {
log.warn(
"NOT_FOUND_TARGET_YEAR: yyyy={}, isAll={}, scenesSize={}",
yyyy,
isAll,
scenes == null ? 0 : scenes.size());
throw new CustomApiException("NOT_FOUND_TARGET_YEAR", HttpStatus.NOT_FOUND);
}
// 4) 파일 생성
try {
Files.createDirectories(outputPath.getParent());
new GeoJsonFileWriter()
.exportToFile(sceneInference, "scene_inference_" + yyyy, 5186, outputPath.toString());
Scene scene = new Scene();
scene.setFeatures(sceneInference);
scene.setFilePath(outputPath.toString());
return scene;
} catch (IOException e) {
log.error(
"FAIL_CREATE_MAP_SHEET_FILE: yyyy={}, isAll={}, path={}", yyyy, isAll, outputPath, e);
throw new CustomApiException("FAIL_CREATE_MAP_SHEET_FILE", HttpStatus.INTERNAL_SERVER_ERROR);
} }
return features;
} }
/** /**
* 변화탐지 실행 가능 기준 년도 조회 * 년도별로 나눠 조회
* *
* @param req * @param yearDtos
* @return * @return ImageFeature
*/ */
public List<MngListDto> getHstMapSheetList(InferenceResultDto.RegReq req) { public List<ImageFeature> loadSceneInferenceByFallbackYears(List<MngListDto> yearDtos) {
return mapSheetMngRepository.findByHstMapSheetTargetList(req); if (yearDtos == null || yearDtos.isEmpty()) {
} return List.of();
}
public List<MngListDto> getHstMapSheetList(int mngYyyy, List<String> mapIds) { // 년도 별로 루프를 돌리기위해 년도별 정리
return mapSheetMngRepository.findByHstMapSheetTargetList(mngYyyy, mapIds); Map<Integer, List<MngListDto>> groupedByYear =
yearDtos.stream()
.filter(d -> d.getMngYyyy() != 0 && d.getMapSheetNum() != null)
.collect(Collectors.groupingBy(MngListDto::getMngYyyy));
List<ImageFeature> sceneInference = new ArrayList<>();
for (Map.Entry<Integer, List<MngListDto>> entry : groupedByYear.entrySet()) {
Integer year = entry.getKey();
// 년도별 mapSheetNum 만들기
List<String> sheetNums =
entry.getValue().stream()
.map(MngListDto::getMapSheetNum)
.filter(Objects::nonNull)
.distinct()
.toList();
// tif파일 정보 조회
List<ImageFeature> temp = loadSceneInferenceBySheets(year.toString(), sheetNums);
if (temp != null && !temp.isEmpty()) {
sceneInference.addAll(temp);
}
}
return sceneInference;
} }
public void updateMapSheetMngHstUploadId(Long hstUid, UUID uuid, String uploadId) { public void updateMapSheetMngHstUploadId(Long hstUid, UUID uuid, String uploadId) {
@@ -328,10 +345,38 @@ public class MapSheetMngCoreService {
* 변화탐지 실행 가능 비교년도 조회 * 변화탐지 실행 가능 비교년도 조회
* *
* @param mngYyyy 비교년도 * @param mngYyyy 비교년도
* @param mapId 5k 도엽번호 * @param mapIds 5k 도엽번호
* @return List<MngListCompareDto> * @return List<MngListCompareDto>
*/ */
public List<MngListCompareDto> getByHstMapSheetCompareList(int mngYyyy, List<String> mapId) { public List<MngListCompareDto> getByHstMapSheetCompareList(int mngYyyy, List<String> mapIds) {
return mapSheetMngYearRepository.findByHstMapSheetCompareList(mngYyyy, mapId); return mapSheetMngYearRepository.findByHstMapSheetCompareList(mngYyyy, mapIds);
}
public List<MngListDto> getMapSheetMngHst(Integer year, List<String> mapSheetNums50k) {
return mapSheetMngRepository.getMapSheetMngHst(year, mapSheetNums50k);
}
/**
* 이전 년도 도엽 조회 조건이 많을 수 있으므로 chunk 줘서 끊어서 조회
*
* @param year
* @param mapIds
* @return
*/
public List<MngListDto> findFallbackCompareYearByMapSheets(Integer year, List<String> mapIds) {
if (mapIds == null || mapIds.isEmpty()) {
return Collections.emptyList();
}
int chunkSize = 1000;
List<MngListDto> result = new ArrayList<>();
for (int i = 0; i < mapIds.size(); i += chunkSize) {
List<String> chunk = mapIds.subList(i, Math.min(i + chunkSize, mapIds.size()));
result.addAll(mapSheetMngRepository.findFallbackCompareYearByMapSheets(year, chunk));
}
return result;
} }
} }

View File

@@ -6,6 +6,7 @@ import com.kamco.cd.kamcoback.postgres.repository.mapsheet.MapSheetMngYearReposi
import com.kamco.cd.kamcoback.postgres.repository.scheduler.MapSheetMngFileJobRepository; import com.kamco.cd.kamcoback.postgres.repository.scheduler.MapSheetMngFileJobRepository;
import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto; import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto;
import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto.MngHstDto; import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto.MngHstDto;
import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto.YearMinMax;
import jakarta.validation.Valid; import jakarta.validation.Valid;
import java.util.List; import java.util.List;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
@@ -67,9 +68,10 @@ public class MapSheetMngFileJobCoreService {
return mapSheetMngFileJobRepository.findNotYetMapSheetMng(); return mapSheetMngFileJobRepository.findNotYetMapSheetMng();
} }
public Long findByHstMapSheetBeforeYyyyListCount(int strtYyyy, int endYyyy, String mapSheetNum) { public Long findByHstMapSheetBeforeYyyyListCount(
int mngYyyy, int strtYyyy, int endYyyy, String mapSheetNum) {
return mapSheetMngFileJobRepository.findByHstMapSheetBeforeYyyyListCount( return mapSheetMngFileJobRepository.findByHstMapSheetBeforeYyyyListCount(
strtYyyy, endYyyy, mapSheetNum); mngYyyy, strtYyyy, endYyyy, mapSheetNum);
} }
public void updateException5kMapSheet(String mapSheetNum, CommonUseStatus commonUseStatus) { public void updateException5kMapSheet(String mapSheetNum, CommonUseStatus commonUseStatus) {
@@ -79,4 +81,16 @@ public class MapSheetMngFileJobCoreService {
public void saveSheetMngYear() { public void saveSheetMngYear() {
mapSheetMngYearRepository.saveFileInfo(); mapSheetMngYearRepository.saveFileInfo();
} }
public YearMinMax findYearMinMaxInfo() {
return mapSheetMngYearRepository.findYearMinMaxInfo();
}
public Long findMngYyyyCnt(Integer mngYyyy) {
return mapSheetMngFileJobRepository.findMngYyyyCnt(mngYyyy);
}
public Long findMapSheetUseExceptCnt(String mapSheetNum) {
return mapSheetMngFileJobRepository.findMapSheetUseExceptCnt(mapSheetNum);
}
} }

View File

@@ -17,6 +17,16 @@ public class ModelMngCoreService {
private final ModelMngRepository modelMngRepository; private final ModelMngRepository modelMngRepository;
/**
* 모델조회
*
* @param searchReq 페이징
* @param startDate 시작날짜
* @param endDate 종료날짜
* @param modelType 모델 타입 G1, G2, G3
* @param searchVal 모델 ver
* @return 모델 목록
*/
public Page<ModelMngDto.ModelList> findModelMgmtList( public Page<ModelMngDto.ModelList> findModelMgmtList(
ModelMngDto.searchReq searchReq, ModelMngDto.searchReq searchReq,
LocalDate startDate, LocalDate startDate,
@@ -99,4 +109,18 @@ public class ModelMngCoreService {
.orElseThrow(() -> new EntityNotFoundException("모델 정보가 없습니다.")); .orElseThrow(() -> new EntityNotFoundException("모델 정보가 없습니다."));
return entity.toDto(); return entity.toDto();
} }
/**
* 모델 버전명으로 조회
*
* @param ver 모델버전
* @return 모델정보
*/
public ModelMngDto.Basic findByModelVer(String ver) {
ModelMngEntity entity =
modelMngRepository
.findByModelVer(ver)
.orElseThrow(() -> new EntityNotFoundException("모델 정보가 없습니다."));
return entity.toDto();
}
} }

View File

@@ -3,6 +3,7 @@ package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.postgres.repository.scheduler.TrainingDataLabelJobRepository; import com.kamco.cd.kamcoback.postgres.repository.scheduler.TrainingDataLabelJobRepository;
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.InspectorPendingDto; import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.InspectorPendingDto;
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.Tasks; import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.Tasks;
import java.time.LocalDate;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
@@ -14,8 +15,8 @@ public class TrainingDataLabelJobCoreService {
private final TrainingDataLabelJobRepository trainingDataLabelJobRepository; private final TrainingDataLabelJobRepository trainingDataLabelJobRepository;
public List<Tasks> findCompletedYesterdayUnassigned() { public List<Tasks> findCompletedYesterdayUnassigned(LocalDate baseDate) {
return trainingDataLabelJobRepository.findCompletedYesterdayUnassigned(); return trainingDataLabelJobRepository.findCompletedYesterdayUnassigned(baseDate);
} }
public void assignReviewerBatch(List<UUID> assignmentUids, String reviewerId) { public void assignReviewerBatch(List<UUID> assignmentUids, String reviewerId) {

View File

@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.postgres.repository.scheduler.TrainingDataReviewJo
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.AnalCntInfo; import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.AnalCntInfo;
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.AnalMapSheetList; import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.AnalMapSheetList;
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.CompleteLabelData; import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.CompleteLabelData;
import java.time.LocalDate;
import java.util.List; import java.util.List;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@@ -15,12 +16,13 @@ public class TrainingDataReviewJobCoreService {
private final TrainingDataReviewJobRepository trainingDataReviewJobRepository; private final TrainingDataReviewJobRepository trainingDataReviewJobRepository;
public List<CompleteLabelData> findCompletedYesterdayLabelingList( public List<CompleteLabelData> findCompletedYesterdayLabelingList(
Long analUid, String mapSheetNum) { Long analUid, String mapSheetNum, LocalDate baseDate) {
return trainingDataReviewJobRepository.findCompletedYesterdayLabelingList(analUid, mapSheetNum); return trainingDataReviewJobRepository.findCompletedYesterdayLabelingList(
analUid, mapSheetNum, baseDate);
} }
public List<AnalMapSheetList> findCompletedAnalMapSheetList(Long analUid) { public List<AnalMapSheetList> findCompletedAnalMapSheetList(Long analUid, LocalDate baseDate) {
return trainingDataReviewJobRepository.findCompletedAnalMapSheetList(analUid); return trainingDataReviewJobRepository.findCompletedAnalMapSheetList(analUid, baseDate);
} }
public List<AnalCntInfo> findAnalCntInfoList() { public List<AnalCntInfo> findAnalCntInfoList() {

View File

@@ -0,0 +1,62 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import java.time.LocalDateTime;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
@Entity
@Table(name = "batch_step_history")
public class BatchStepHistoryEntity {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "id", nullable = false)
private Long id;
@NotNull
@Column(name = "anal_uid", nullable = false)
private Long analUid;
@Size(max = 255)
@NotNull
@Column(name = "result_uid", nullable = false)
private String resultUid;
@Size(max = 100)
@NotNull
@Column(name = "step_name", nullable = false, length = 100)
private String stepName;
@Size(max = 50)
@NotNull
@Column(name = "status", nullable = false, length = 50)
private String status;
@Column(name = "error_message", length = Integer.MAX_VALUE)
private String errorMessage;
@NotNull
@Column(name = "started_dttm", nullable = false)
private LocalDateTime startedDttm;
@Column(name = "completed_dttm")
private LocalDateTime completedDttm;
@NotNull
@Column(name = "created_dttm", nullable = false)
private LocalDateTime createdDttm;
@NotNull
@Column(name = "updated_dttm", nullable = false)
private LocalDateTime updatedDttm;
}

View File

@@ -11,37 +11,94 @@ import lombok.Getter;
import lombok.Setter; import lombok.Setter;
import org.hibernate.annotations.ColumnDefault; import org.hibernate.annotations.ColumnDefault;
/**
* GPU 메트릭 엔티티
*
* <p>서버의 GPU 성능 및 자원 사용량 메트릭 데이터를 저장하는 JPA 엔티티입니다. GPU 연산 사용률 및 메모리 사용량 등 GPU 리소스 모니터링 데이터를 관리합니다.
*
* <p>데이터 소스: nvidia-smi 명령어 또는 NVML (NVIDIA Management Library)
*
* <p>활용 사례:
*
* <ul>
* <li>AI/ML 학습 모니터링: 딥러닝 작업 중 GPU 활용도 추적
* <li>리소스 최적화: GPU 메모리 부족 또는 유휴 상태 감지
* <li>용량 계획: GPU 추가 필요 시점 예측
* <li>알림 설정: gpuUtil > 95% 또는 gpuMemUsed/gpuMemTotal > 90% 시 경고
* </ul>
*/
@Getter @Getter
@Setter @Setter
@Entity @Entity
@Table(name = "gpu_metrics") @Table(name = "gpu_metrics")
public class GpuMetricEntity { public class GpuMetricEntity {
/** 기본 키 (UUID, 자동 생성) */
@Id @Id
@ColumnDefault("gen_random_uuid()") @ColumnDefault("gen_random_uuid()")
@Column(name = "uuid", nullable = false) @Column(name = "uuid", nullable = false)
private UUID id; private UUID id;
/** 시퀀스 기반 보조 ID */
@NotNull @NotNull
@ColumnDefault("nextval('gpu_metrics_id_seq')") @ColumnDefault("nextval('gpu_metrics_id_seq')")
@Column(name = "id", nullable = false) @Column(name = "id", nullable = false)
private Integer id1; private Integer id1;
/** 메트릭 수집 시각 (시간대 포함, 기본값: 현재 시각) */
@NotNull @NotNull
@ColumnDefault("now()") @ColumnDefault("now()")
@Column(name = "\"timestamp\"", nullable = false) @Column(name = "\"timestamp\"", nullable = false)
private OffsetDateTime timestamp; private OffsetDateTime timestamp;
/** 모니터링 대상 서버 이름 */
@NotNull @NotNull
@Column(name = "server_name", nullable = false, length = Integer.MAX_VALUE) @Column(name = "server_name", nullable = false, length = Integer.MAX_VALUE)
private String serverName; private String serverName;
/**
* GPU 연산 사용률 (백분율)
*
* <p>GPU 코어의 연산 처리 활용도를 나타냅니다.
*
* <p>범위: 0.0 ~ 100.0
*
* <p>예시: 85.5 = GPU가 85.5% 활용되어 연산 중
*
* <p>데이터 소스: nvidia-smi의 'utilization.gpu' 또는 NVML의 nvmlDeviceGetUtilizationRates
*
* <p>참고: 높은 사용률(>90%)은 GPU가 충분히 활용되고 있음을 의미하며, 낮은 사용률은 병목 지점이 다른 곳(CPU, I/O)에 있을 수 있음
*/
@Column(name = "gpu_util") @Column(name = "gpu_util")
private Float gpuUtil; private Float gpuUtil;
/**
* GPU 메모리 사용량 (MB 단위)
*
* <p>현재 GPU에 할당되어 사용 중인 메모리 양
*
* <p>예시: 10240.0 = 약 10GB의 GPU 메모리 사용 중
*
* <p>데이터 소스: nvidia-smi의 'memory.used' 또는 NVML의 nvmlDeviceGetMemoryInfo
*
* <p>용도: 딥러닝 모델 크기, 배치 사이즈 최적화, OOM(Out Of Memory) 에러 예측
*/
@Column(name = "gpu_mem_used") @Column(name = "gpu_mem_used")
private Float gpuMemUsed; private Float gpuMemUsed;
/**
* GPU 총 메모리 용량 (MB 단위)
*
* <p>GPU에 장착된 전체 메모리 용량
*
* <p>예시: 16384.0 = 16GB VRAM 장착
*
* <p>데이터 소스: nvidia-smi의 'memory.total' 또는 NVML의 nvmlDeviceGetMemoryInfo
*
* <p>계산식: 메모리 사용률(%) = (gpuMemUsed / gpuMemTotal) × 100
*
* <p>활용: 여유 메모리 = gpuMemTotal - gpuMemUsed
*/
@Column(name = "gpu_mem_total") @Column(name = "gpu_mem_total")
private Float gpuMemTotal; private Float gpuMemTotal;
} }

View File

@@ -1,5 +1,6 @@
package com.kamco.cd.kamcoback.postgres.entity; package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
import jakarta.persistence.Column; import jakarta.persistence.Column;
import jakarta.persistence.Entity; import jakarta.persistence.Entity;
import jakarta.persistence.Id; import jakarta.persistence.Id;
@@ -84,4 +85,28 @@ public class InferenceResultsTestingEntity {
@Column(name = "geometry", columnDefinition = "geometry") @Column(name = "geometry", columnDefinition = "geometry")
private Geometry geometry; private Geometry geometry;
public InferenceResultsTestingDto.Basic toDto() {
return new InferenceResultsTestingDto.Basic(
this.probability,
this.beforeYear,
this.afterYear,
this.mapId,
this.modelVersion,
this.clsModelPath,
this.clsModelVersion,
this.cdModelType,
this.id,
this.modelName,
this.batchId,
this.area,
this.beforeC,
this.beforeP,
this.afterC,
this.afterP,
this.seq,
this.createdDate,
this.uid,
this.geometry);
}
} }

View File

@@ -43,6 +43,10 @@ public class MapLayerEntity {
@Column(name = "title", length = 200) @Column(name = "title", length = 200)
private String title; private String title;
@Size(max = 255)
@Column(name = "layer_name")
private String layerName;
@Column(name = "description", length = Integer.MAX_VALUE) @Column(name = "description", length = Integer.MAX_VALUE)
private String description; private String description;
@@ -109,6 +113,7 @@ public class MapLayerEntity {
public LayerDto.Detail toDto() { public LayerDto.Detail toDto() {
return new LayerDto.Detail( return new LayerDto.Detail(
this.uuid, this.uuid,
this.layerName,
this.layerType, this.layerType,
this.title, this.title,
this.description, this.description,

View File

@@ -202,6 +202,33 @@ public class MapSheetLearnEntity {
@Column(name = "chn_dtct_mst_id") @Column(name = "chn_dtct_mst_id")
private String chnDtctMstId; private String chnDtctMstId;
@Column(name = "shp_create_status")
private String shp_create_status;
@Column(name = "shp_create_message")
private String shp_create_message;
@Column(name = "shp_create_status_dttm")
private ZonedDateTime shp_create_status_dttm;
@Column(name = "shp_status")
private String shp_status;
@Column(name = "shp_stage")
private String shp_stage;
@Column(name = "shp_started_dttm")
private ZonedDateTime shp_started_dttm;
@Column(name = "shp_ended_dttm")
private ZonedDateTime shp_ended_dttm;
@Column(name = "shp_last_message")
private String shp_last_message;
@Column(name = "shp_error_message")
private String shp_error_message;
public InferenceResultDto.ResultList toDto() { public InferenceResultDto.ResultList toDto() {
return new InferenceResultDto.ResultList( return new InferenceResultDto.ResultList(
this.uuid, this.uuid,

View File

@@ -53,8 +53,6 @@ import lombok.NoArgsConstructor;
* system leveraging 1:5k map data. * system leveraging 1:5k map data.
*/ */
@Getter @Getter
// entity의 접근제어를 위해 @setter를 사용 x
// @Setter
@NoArgsConstructor(access = AccessLevel.PROTECTED) @NoArgsConstructor(access = AccessLevel.PROTECTED)
@Entity @Entity
// 영상관리이력 // 영상관리이력
@@ -92,7 +90,7 @@ public class MapSheetMngHstEntity extends CommonDateEntity {
private Integer scaleRatio; private Integer scaleRatio;
@Column(name = "data_state", length = 20) @Column(name = "data_state", length = 20)
private String dataState; private String dataState; // DONE,NOTYET 둘중하나임 같은연도는 같은값
@Column(name = "data_state_dttm") @Column(name = "data_state_dttm")
private ZonedDateTime dataStateDttm; private ZonedDateTime dataStateDttm;
@@ -165,13 +163,4 @@ public class MapSheetMngHstEntity extends CommonDateEntity {
@Column(name = "upload_id") @Column(name = "upload_id")
private String uploadId; private String uploadId;
// 파일정보 업데이트
public void updateFileInfos(Long tifSizeBytes, Long tfwSizeBytes) {
tifSizeBytes = tifSizeBytes == null ? 0L : tifSizeBytes;
tfwSizeBytes = tfwSizeBytes == null ? 0L : tfwSizeBytes;
this.tifSizeBytes = tifSizeBytes;
this.tfwSizeBytes = tfwSizeBytes;
this.totalSizeBytes = tifSizeBytes + tfwSizeBytes;
}
} }

View File

@@ -117,6 +117,7 @@ public class ModelMngEntity extends CommonDateEntity {
this.clsModelFileName, this.clsModelFileName,
this.clsModelVersion, this.clsModelVersion,
this.priority, this.priority,
this.memo); this.memo,
this.uuid);
} }
} }

View File

@@ -11,48 +11,101 @@ import lombok.Getter;
import lombok.Setter; import lombok.Setter;
import org.hibernate.annotations.ColumnDefault; import org.hibernate.annotations.ColumnDefault;
/**
* 시스템 메트릭 엔티티
*
* <p>서버 시스템의 성능 메트릭 데이터를 저장하는 JPA 엔티티입니다. CPU 및 메모리 사용량 등 시스템 리소스 모니터링 데이터를 관리합니다.
*
* <p>데이터 소스: Linux sar 명령어 또는 /proc/meminfo 파일
*
* <p>활용 사례:
*
* <ul>
* <li>용량 계획: 메모리 추가 필요 시점 예측
* <li>성능 모니터링: 메모리 부족 상황 감지
* <li>트렌드 분석: 시간대별 메모리 사용 패턴 파악
* <li>알림 설정: memused > 90% 시 경고
* </ul>
*/
@Getter @Getter
@Setter @Setter
@Entity @Entity
@Table(name = "system_metrics") @Table(name = "system_metrics")
public class SystemMetricEntity { public class SystemMetricEntity {
/** 기본 키 (UUID, 자동 생성) */
@Id @Id
@ColumnDefault("gen_random_uuid()") @ColumnDefault("gen_random_uuid()")
@Column(name = "uuid", nullable = false) @Column(name = "uuid", nullable = false)
private UUID id; private UUID id;
/** 시퀀스 기반 보조 ID */
@NotNull @NotNull
@ColumnDefault("nextval('system_metrics_id_seq')") @ColumnDefault("nextval('system_metrics_id_seq')")
@Column(name = "id", nullable = false) @Column(name = "id", nullable = false)
private Integer id1; private Integer id1;
/** 메트릭 수집 시각 (시간대 포함) */
@NotNull @NotNull
@Column(name = "\"timestamp\"", nullable = false) @Column(name = "\"timestamp\"", nullable = false)
private OffsetDateTime timestamp; private OffsetDateTime timestamp;
/** 모니터링 대상 서버 이름 */
@NotNull @NotNull
@Column(name = "server_name", nullable = false, length = Integer.MAX_VALUE) @Column(name = "server_name", nullable = false, length = Integer.MAX_VALUE)
private String serverName; private String serverName;
/** 사용자 프로세스가 사용한 CPU 사용률 (%) - 응용 프로그램 실행 */
@Column(name = "cpu_user") @Column(name = "cpu_user")
private Float cpuUser; private Float cpuUser;
/** 시스템 프로세스가 사용한 CPU 사용률 (%) - 커널 작업 */
@Column(name = "cpu_system") @Column(name = "cpu_system")
private Float cpuSystem; private Float cpuSystem;
/** I/O 대기로 소모된 CPU 사용률 (%) - 디스크/네트워크 대기 */
@Column(name = "cpu_iowait") @Column(name = "cpu_iowait")
private Float cpuIowait; private Float cpuIowait;
/** 유휴 상태 CPU 사용률 (%) - 사용 가능한 여유 CPU */
@Column(name = "cpu_idle") @Column(name = "cpu_idle")
private Float cpuIdle; private Float cpuIdle;
/**
* 사용 가능한 여유 메모리 (KB 단위)
*
* <p>시스템에서 즉시 사용 가능한 물리 메모리 양
*
* <p>예시: 4194304 = 약 4GB의 여유 메모리
*
* <p>데이터 소스: /proc/meminfo의 MemFree
*/
@Column(name = "kbmemfree") @Column(name = "kbmemfree")
private Long kbmemfree; private Long kbmemfree;
/**
* 현재 사용 중인 메모리 (KB 단위)
*
* <p>시스템이 현재 할당하여 사용 중인 물리 메모리 양
*
* <p>예시: 8388608 = 약 8GB의 사용 중인 메모리
*
* <p>계산: MemTotal - MemFree
*/
@Column(name = "kbmemused") @Column(name = "kbmemused")
private Long kbmemused; private Long kbmemused;
/**
* 메모리 사용률 (백분율)
*
* <p>전체 메모리 대비 사용 중인 메모리 비율
*
* <p>계산식: (kbmemused / (kbmemused + kbmemfree)) × 100
*
* <p>예시: 66.7 = 전체 메모리의 66.7% 사용 중
*
* <p>관계식: 총 메모리 = kbmemused + kbmemfree
*/
@Column(name = "memused") @Column(name = "memused")
private Float memused; private Float memused;
} }

View File

@@ -1,20 +1,65 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference; package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import java.util.Optional; import java.util.Optional;
import java.util.UUID; import java.util.UUID;
public interface InferenceResultRepositoryCustom { public interface InferenceResultRepositoryCustom {
/**
* tb_map_sheet_anal_inference 추론 결과 목록 저장
*
* @param id learn 테이블 id
* @return
*/
Long upsertGroupsFromMapSheetAnal(Long id); Long upsertGroupsFromMapSheetAnal(Long id);
void upsertGroupsFromInferenceResults(Long analId); /**
* tb_map_sheet_anal_data_inference 추론 결과 상세 저장
*
* @param analId
* @return
*/
int upsertGroupsFromInferenceResults(Long analId);
void upsertGeomsFromInferenceResults(Long analId); /**
* tb_map_sheet_anal_data_inference_geom geom 목록 추론 결과 저장
*
* @param analId
* @return
*/
int upsertGeomsFromInferenceResults(Long analId);
void upsertSttcFromInferenceResults(Long analId); /**
* tb_map_sheet_anal_sttc 집계 추론 결과 저장
*
* @param analId
* @return
*/
int upsertSttcFromInferenceResults(Long analId);
/**
* 추론실행 목록 uuid 조회
*
* @param uuid 추론 uuid
* @return 추론 실행 정보
*/
Long getInferenceLearnIdByUuid(UUID uuid); Long getInferenceLearnIdByUuid(UUID uuid);
/**
* 추론 정보 조회
*
* @param uuid 추론 uuid
* @return 추론 정보
*/
Optional<MapSheetLearnEntity> getInferenceUid(UUID uuid); Optional<MapSheetLearnEntity> getInferenceUid(UUID uuid);
/**
* learn id 로 analInference 값 조회
*
* @param id 추론 id
* @return
*/
Optional<MapSheetAnalInferenceEntity> getAnalInferenceDataByLearnId(Long id);
} }

View File

@@ -1,8 +1,10 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference; package com.kamco.cd.kamcoback.postgres.repository.Inference;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelMngState; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelMngState;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import com.querydsl.jpa.impl.JPAQueryFactory; import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager; import jakarta.persistence.EntityManager;
@@ -80,7 +82,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
* @return 반영된 행 수 * @return 반영된 행 수
*/ */
@Override @Override
public void upsertGroupsFromInferenceResults(Long analId) { public int upsertGroupsFromInferenceResults(Long analId) {
String sql = String sql =
""" """
@@ -124,7 +126,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
updated_dttm = now() updated_dttm = now()
"""; """;
em.createNativeQuery(sql).setParameter("analId", analId).executeUpdate(); return em.createNativeQuery(sql).setParameter("analId", analId).executeUpdate();
} }
/** /**
@@ -136,7 +138,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
* @return 반영된 행 수 * @return 반영된 행 수
*/ */
@Override @Override
public void upsertGeomsFromInferenceResults(Long analUid) { public int upsertGeomsFromInferenceResults(Long analUid) {
String sql = String sql =
""" """
@@ -214,6 +216,9 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
WHERE msl.anal_uid = :analUid WHERE msl.anal_uid = :analUid
AND r.after_c is not null AND r.after_c is not null
AND r.after_p is not null AND r.after_p is not null
AND r.probability is not null
AND r.before_c is not null
AND r.before_p is not null
ORDER BY r.uid, r.created_date DESC NULLS LAST ORDER BY r.uid, r.created_date DESC NULLS LAST
) x ) x
ON CONFLICT (result_uid) ON CONFLICT (result_uid)
@@ -229,11 +234,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ref_map_sheet_num = EXCLUDED.ref_map_sheet_num ref_map_sheet_num = EXCLUDED.ref_map_sheet_num
"""; """;
em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate(); return em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
} }
@Override @Override
public void upsertSttcFromInferenceResults(Long analUid) { public int upsertSttcFromInferenceResults(Long analUid) {
String sql = String sql =
""" """
@@ -306,7 +311,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
,updated_uid = EXCLUDED.updated_uid ,updated_uid = EXCLUDED.updated_uid
"""; """;
em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate(); return em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
} }
// =============================== // ===============================
@@ -331,4 +336,14 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
.where(mapSheetLearnEntity.uuid.eq(uuid)) .where(mapSheetLearnEntity.uuid.eq(uuid))
.fetchOne()); .fetchOne());
} }
@Override
public Optional<MapSheetAnalInferenceEntity> getAnalInferenceDataByLearnId(Long id) {
return Optional.ofNullable(
queryFactory
.select(mapSheetAnalInferenceEntity)
.from(mapSheetAnalInferenceEntity)
.where(mapSheetAnalInferenceEntity.learnId.eq(id))
.fetchOne());
}
} }

View File

@@ -1,11 +1,32 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference; package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity; import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity;
import java.util.List; import java.util.List;
public interface InferenceResultsTestingRepositoryCustom { public interface InferenceResultsTestingRepositoryCustom {
/**
* 추론 결과 조회
*
* @param batchIds batch id
* @return 추론 결과 목록
*/
List<InferenceResultsTestingEntity> getInferenceResultList(List<Long> batchIds); List<InferenceResultsTestingEntity> getInferenceResultList(List<Long> batchIds);
/**
* 테스팅 테이블 조회하여 탐지건수 조회
*
* @param batchIds batchIds
* @return batchIds 조회 count 수
*/
Long getInferenceResultCnt(List<Long> batchIds); Long getInferenceResultCnt(List<Long> batchIds);
/**
* testing 테이블 결과로 기본정보 조회
*
* @param batchIds batch id
* @return batch id, model ver, year 정보
*/
List<InferenceResultsTestingDto.Basic> getInferenceResultGroupList(List<Long> batchIds);
} }

View File

@@ -2,7 +2,9 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference;
import static com.kamco.cd.kamcoback.postgres.entity.QInferenceResultsTestingEntity.inferenceResultsTestingEntity; import static com.kamco.cd.kamcoback.postgres.entity.QInferenceResultsTestingEntity.inferenceResultsTestingEntity;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity; import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity;
import com.querydsl.core.types.Projections;
import com.querydsl.jpa.impl.JPAQueryFactory; import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.List; import java.util.List;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
@@ -21,11 +23,12 @@ public class InferenceResultsTestingRepositoryImpl
.select(inferenceResultsTestingEntity) .select(inferenceResultsTestingEntity)
.from(inferenceResultsTestingEntity) .from(inferenceResultsTestingEntity)
.where( .where(
inferenceResultsTestingEntity inferenceResultsTestingEntity.batchId.in(batchIds),
.batchId inferenceResultsTestingEntity.afterC.isNotNull(),
.in(batchIds) inferenceResultsTestingEntity.afterP.isNotNull(),
.and(inferenceResultsTestingEntity.afterC.isNotNull()) inferenceResultsTestingEntity.beforeC.isNotNull(),
.and(inferenceResultsTestingEntity.afterP.isNotNull())) inferenceResultsTestingEntity.beforeP.isNotNull(),
inferenceResultsTestingEntity.probability.isNotNull())
.fetch(); .fetch();
} }
@@ -42,9 +45,38 @@ public class InferenceResultsTestingRepositoryImpl
.where( .where(
inferenceResultsTestingEntity.batchId.in(batchIds), inferenceResultsTestingEntity.batchId.in(batchIds),
inferenceResultsTestingEntity.afterC.isNotNull(), inferenceResultsTestingEntity.afterC.isNotNull(),
inferenceResultsTestingEntity.afterP.isNotNull()) inferenceResultsTestingEntity.afterP.isNotNull(),
inferenceResultsTestingEntity.beforeC.isNotNull(),
inferenceResultsTestingEntity.beforeP.isNotNull(),
inferenceResultsTestingEntity.probability.isNotNull())
.fetchOne(); .fetchOne();
return cnt == null ? 0L : cnt; return cnt == null ? 0L : cnt;
} }
@Override
public List<InferenceResultsTestingDto.Basic> getInferenceResultGroupList(List<Long> batchIds) {
return queryFactory
.select(
Projections.constructor(
InferenceResultsTestingDto.Basic.class,
inferenceResultsTestingEntity.batchId,
inferenceResultsTestingEntity.modelVersion.max(),
inferenceResultsTestingEntity.beforeYear.max(),
inferenceResultsTestingEntity.afterYear.max()))
.from(inferenceResultsTestingEntity)
.where(
inferenceResultsTestingEntity.batchId.in(batchIds),
inferenceResultsTestingEntity.afterC.isNotNull(),
inferenceResultsTestingEntity.afterP.isNotNull(),
inferenceResultsTestingEntity.beforeC.isNotNull(),
inferenceResultsTestingEntity.beforeP.isNotNull(),
inferenceResultsTestingEntity.probability.isNotNull())
.groupBy(
inferenceResultsTestingEntity.batchId,
inferenceResultsTestingEntity.modelVersion,
inferenceResultsTestingEntity.beforeYear,
inferenceResultsTestingEntity.afterYear)
.fetch();
}
} }

Some files were not shown because too many files have changed in this diff Show More