Compare commits
130 Commits
feat/infer
...
feat/dean/
| Author | SHA1 | Date | |
|---|---|---|---|
| 32d56cf8fe | |||
| c3b7daebb7 | |||
| 2188d426d4 | |||
| 5c2ee0974b | |||
| 7980fe1d42 | |||
| c10141e915 | |||
| 97565c5369 | |||
| 30f0e1a885 | |||
| ba562261c3 | |||
| a084c80715 | |||
| a44e93c234 | |||
| a63b81008a | |||
| 2309357c0d | |||
| ee76389d6c | |||
| 7b15e5bb8c | |||
| 001ad73de7 | |||
| 2508f59a72 | |||
| f2307ff0f4 | |||
| 6f44319d33 | |||
| cefacb291b | |||
| 744cbb55a9 | |||
| 4a120ae5fd | |||
| 7c200b057a | |||
| 8ac0a00311 | |||
| 4863091406 | |||
| 70c28e0b54 | |||
| 9197819340 | |||
| f2500c33e6 | |||
| 18dc831b05 | |||
| 48b46035fd | |||
| 1b9c7faf22 | |||
| fcdba49430 | |||
| 7599c99025 | |||
| 8fd1948d7c | |||
| 2c1047a014 | |||
| 8c54e5c176 | |||
| d3faa87d4f | |||
| 8d8d9d7a9f | |||
| 9c3d6c01f7 | |||
| 02b9a97ee8 | |||
| 438fb3ec9b | |||
| 3105b60759 | |||
| 5dddafbe0c | |||
| c2872c7748 | |||
| 7128eb007e | |||
| 815ee57e06 | |||
| ab52256c05 | |||
| 3ee3cf8425 | |||
| ba11e4c801 | |||
| 14248b29e7 | |||
| e95bea7d29 | |||
| a4c3fc5185 | |||
| d391a73197 | |||
| fdbda7d945 | |||
| d36703fd84 | |||
| 9ffab423c8 | |||
| 496f9c562d | |||
| 2720cc3766 | |||
| 72778d6996 | |||
| 514b07356e | |||
| 85834f2221 | |||
| c93d40f3f3 | |||
| 74e6485930 | |||
| 8cb8632a51 | |||
| 190ba525d5 | |||
| 70e01a2044 | |||
| fad797eea4 | |||
| 9ee1ec94c0 | |||
| 670cedda59 | |||
| 3683c193d4 | |||
| a2293ad1ab | |||
| 78fe7f013b | |||
| 22c3b28237 | |||
| 48fa13615e | |||
| 8d7ddc4c33 | |||
| 1f9d6861a0 | |||
| b859a56ab0 | |||
| 84b2149f78 | |||
| 4b04fb64ec | |||
| df0c689243 | |||
| 827f701186 | |||
| db897268de | |||
| 4dc5c196ca | |||
| ea74203667 | |||
| 9421df2b9b | |||
| 2a3bf9852d | |||
| 3f1bb8f082 | |||
| 21ac562fd5 | |||
| 778e87383c | |||
| aac8c91cd0 | |||
| 38c4fbf4e5 | |||
| b8fc314bff | |||
| 4e2e5c0b1d | |||
| fd1ba1ef3b | |||
| 6b65dbdc75 | |||
| 2d2b55efcd | |||
| ac13f36663 | |||
| 82f08c4240 | |||
| e15b35943b | |||
| 8bdccfdce6 | |||
| e209eeb826 | |||
| 3aca011104 | |||
| 2c320194b4 | |||
| 3f6737706a | |||
| 0df7d7c5cf | |||
| 3724528ea9 | |||
| 9885c19b50 | |||
| 079a899822 | |||
| 5b09b2e29a | |||
| 58a73de9ab | |||
| 4cbd2b8d76 | |||
| f4a890bec8 | |||
| 89504e4156 | |||
| 783609b015 | |||
| 5d33190c31 | |||
| 92232e13f1 | |||
| 81b0b55d57 | |||
| 83ef7e36ed | |||
| 0d13e6989f | |||
| 4342df9bf5 | |||
| 8f9585b516 | |||
| 43b5a79031 | |||
| 3ba3b05f2f | |||
| fffc2efd96 | |||
| 82e3250fd4 | |||
| 470f2191b7 | |||
| c127531412 | |||
| 61cfd8240a | |||
| 54b6712273 | |||
| b2141e98c0 |
@@ -1,8 +1,12 @@
|
||||
# 1단계에서 만든 로컬 베이스 이미지를 사용
|
||||
FROM 192.168.2.73:18082/kamco-cd/base-java21-gdal:1.0
|
||||
FROM 192.168.2.73:18082/kamco-cd/base-java21-gdal:1.0
|
||||
# Stage 1: Build stage (gradle build는 Jenkins에서 이미 수행)
|
||||
FROM eclipse-temurin:21-jre-jammy
|
||||
|
||||
# GDAL 설치
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gdal-bin \
|
||||
libgdal-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# 사용자 설정 (앱 별로 다를 수 있으므로 여기에 유지)
|
||||
ARG UID=1000
|
||||
ARG GID=1000
|
||||
|
||||
@@ -19,6 +23,7 @@ COPY build/libs/ROOT.jar app.jar
|
||||
|
||||
# 포트 노출
|
||||
EXPOSE 8080
|
||||
|
||||
# 애플리케이션 실행
|
||||
# dev 프로파일로 실행
|
||||
ENTRYPOINT ["java", "-jar", "-Dspring.profiles.active=prod", "app.jar"]
|
||||
|
||||
23
Dockerfile-prod_bak
Normal file
23
Dockerfile-prod_bak
Normal file
@@ -0,0 +1,23 @@
|
||||
# Stage 1: Build stage (gradle build는 Jenkins에서 이미 수행)
|
||||
FROM kamco-java-gdal:21
|
||||
|
||||
ARG UID=1000
|
||||
ARG GID=1000
|
||||
|
||||
RUN groupadd -g ${GID} kcomu \
|
||||
&& useradd -u ${UID} -g ${GID} -m kcomu
|
||||
|
||||
USER kcomu
|
||||
|
||||
# 작업 디렉토리 설정
|
||||
WORKDIR /app
|
||||
|
||||
# JAR 파일 복사 (Jenkins에서 빌드된 ROOT.jar)
|
||||
COPY build/libs/ROOT.jar app.jar
|
||||
|
||||
# 포트 노출
|
||||
EXPOSE 8080
|
||||
|
||||
# 애플리케이션 실행
|
||||
# dev 프로파일로 실행
|
||||
ENTRYPOINT ["java", "-jar", "-Dspring.profiles.active=prod", "app.jar"]
|
||||
@@ -15,11 +15,7 @@ services:
|
||||
- SPRING_PROFILES_ACTIVE=dev
|
||||
- TZ=Asia/Seoul
|
||||
volumes:
|
||||
- /mnt/nfs_share/images:/app/original-images
|
||||
- /mnt/nfs_share/model_output:/app/model-outputs
|
||||
- /mnt/nfs_share/train_dataset:/app/train-dataset
|
||||
- /mnt/nfs_share/tmp:/app/tmp
|
||||
- /kamco-nfs:/kamco-nfs
|
||||
- /data:/kamco-nfs
|
||||
networks:
|
||||
- kamco-cds
|
||||
restart: unless-stopped
|
||||
|
||||
@@ -1,25 +1,34 @@
|
||||
services:
|
||||
kamco-changedetection-api:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile-prod
|
||||
args:
|
||||
UID: 1000 # manager01 UID
|
||||
GID: 1000 # manager01 GID
|
||||
image: kamco-changedetection-api:${IMAGE_TAG:-latest}
|
||||
container_name: kamco-changedetection-api
|
||||
user: "1000:1000"
|
||||
nginx:
|
||||
image: nginx:alpine
|
||||
container_name: kamco-cd-api-nginx
|
||||
ports:
|
||||
- "7100:8080"
|
||||
- "12013:443"
|
||||
volumes:
|
||||
- ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
|
||||
- ./nginx/conf.d:/etc/nginx/conf.d:ro
|
||||
- /etc/ssl/certs/globalsign:/etc/ssl/certs/globalsign:ro
|
||||
networks:
|
||||
- kamco-cds
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- kamco-cd-api
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost/health"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 10s
|
||||
|
||||
kamco-cd-api:
|
||||
image: kamco-api-app:260219
|
||||
container_name: kamco-cd-api
|
||||
user: "1000:1000"
|
||||
environment:
|
||||
- SPRING_PROFILES_ACTIVE=dev
|
||||
- SPRING_PROFILES_ACTIVE=prod
|
||||
- TZ=Asia/Seoul
|
||||
volumes:
|
||||
- /mnt/nfs_share/images:/app/original-images
|
||||
- /mnt/nfs_share/model_output:/app/model-outputs
|
||||
- /mnt/nfs_share/train_dataset:/app/train-dataset
|
||||
- /mnt/nfs_share/tmp:/app/tmp
|
||||
- /kamco-nfs:/kamco-nfs
|
||||
- /data:/kamco-nfs
|
||||
networks:
|
||||
- kamco-cds
|
||||
restart: unless-stopped
|
||||
|
||||
122
nginx/README.md
Normal file
122
nginx/README.md
Normal file
@@ -0,0 +1,122 @@
|
||||
# Nginx HTTPS Configuration for KAMCO Change Detection API
|
||||
|
||||
## SSL Certificate Setup
|
||||
|
||||
### Required Files
|
||||
GlobalSign SSL 인증서 파일들을 서버의 `/etc/ssl/certs/globalsign/` 디렉토리에 배치해야 합니다:
|
||||
|
||||
```
|
||||
/etc/ssl/certs/globalsign/
|
||||
├── certificate.crt # SSL 인증서 파일
|
||||
├── private.key # 개인 키 파일
|
||||
└── ca-bundle.crt # CA 번들 파일 (중간 인증서)
|
||||
```
|
||||
|
||||
### Certificate Installation Steps
|
||||
|
||||
1. **디렉토리 생성**
|
||||
```bash
|
||||
sudo mkdir -p /etc/ssl/certs/globalsign
|
||||
sudo chmod 755 /etc/ssl/certs/globalsign
|
||||
```
|
||||
|
||||
2. **인증서 파일 복사**
|
||||
```bash
|
||||
sudo cp your-certificate.crt /etc/ssl/certs/globalsign/certificate.crt
|
||||
sudo cp your-private.key /etc/ssl/certs/globalsign/private.key
|
||||
sudo cp ca-bundle.crt /etc/ssl/certs/globalsign/ca-bundle.crt
|
||||
```
|
||||
|
||||
3. **파일 권한 설정**
|
||||
```bash
|
||||
sudo chmod 644 /etc/ssl/certs/globalsign/certificate.crt
|
||||
sudo chmod 600 /etc/ssl/certs/globalsign/private.key
|
||||
sudo chmod 644 /etc/ssl/certs/globalsign/ca-bundle.crt
|
||||
```
|
||||
|
||||
## Configuration Overview
|
||||
|
||||
### Service Architecture
|
||||
```
|
||||
Internet (HTTPS:12013)
|
||||
↓
|
||||
nginx (443 in container)
|
||||
↓
|
||||
kamco-changedetection-api (8080 in container)
|
||||
```
|
||||
|
||||
### Key Features
|
||||
- **HTTPS/TLS**: TLSv1.2, TLSv1.3 지원
|
||||
- **Port**: 외부 12013 → 내부 443 (nginx)
|
||||
- **Domain**: aicd-api.e-kamco.com:12013
|
||||
- **Reverse Proxy**: kamco-changedetection-api:8080으로 프록시
|
||||
- **Security Headers**: HSTS, X-Frame-Options, X-Content-Type-Options 등
|
||||
- **Health Check**: /health 엔드포인트
|
||||
|
||||
## Deployment
|
||||
|
||||
### Start Services
|
||||
```bash
|
||||
docker-compose -f docker-compose-prod.yml up -d
|
||||
```
|
||||
|
||||
### Check Logs
|
||||
```bash
|
||||
# Nginx logs
|
||||
docker logs kamco-cd-nginx
|
||||
|
||||
# API logs
|
||||
docker logs kamco-changedetection-api
|
||||
```
|
||||
|
||||
### Verify Configuration
|
||||
```bash
|
||||
# Test nginx configuration
|
||||
docker exec kamco-cd-nginx nginx -t
|
||||
|
||||
# Check SSL certificate
|
||||
docker exec kamco-cd-nginx openssl s_client -connect localhost:443 -servername aicd-api.e-kamco.com
|
||||
```
|
||||
|
||||
### Access Service
|
||||
```bash
|
||||
# HTTPS Access
|
||||
curl -k https://aicd-api.e-kamco.com:12013/monitor/health
|
||||
|
||||
# Health Check
|
||||
curl -k https://aicd-api.e-kamco.com:12013/health
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Certificate Issues
|
||||
인증서 파일이 제대로 마운트되었는지 확인:
|
||||
```bash
|
||||
docker exec kamco-cd-nginx ls -la /etc/ssl/certs/globalsign/
|
||||
```
|
||||
|
||||
### Nginx Configuration Test
|
||||
```bash
|
||||
docker exec kamco-cd-nginx nginx -t
|
||||
```
|
||||
|
||||
### Connection Test
|
||||
```bash
|
||||
# Check if nginx is listening
|
||||
docker exec kamco-cd-nginx netstat -tlnp | grep 443
|
||||
|
||||
# Check backend connection
|
||||
docker exec kamco-cd-nginx wget --spider http://kamco-changedetection-api:8080/monitor/health
|
||||
```
|
||||
|
||||
## Configuration Files
|
||||
|
||||
- `nginx/nginx.conf`: Main nginx configuration
|
||||
- `nginx/conf.d/default.conf`: Server block with SSL and proxy settings
|
||||
- `docker-compose-prod.yml`: Docker compose with nginx service
|
||||
|
||||
## Notes
|
||||
|
||||
- 인증서 파일명이 다를 경우 `nginx/conf.d/default.conf`에서 경로를 수정하세요
|
||||
- 인증서 갱신 시 nginx 컨테이너를 재시작하세요: `docker restart kamco-cd-nginx`
|
||||
- 포트 12013이 방화벽에서 허용되어 있는지 확인하세요
|
||||
60
nginx/conf.d/default.conf
Normal file
60
nginx/conf.d/default.conf
Normal file
@@ -0,0 +1,60 @@
|
||||
upstream kamco_api {
|
||||
server kamco-cd-api:8080;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl http2;
|
||||
server_name aicd-api.e-kamco.com;
|
||||
|
||||
# GlobalSign SSL Certificate
|
||||
ssl_certificate /etc/ssl/certs/globalsign/certificate.crt;
|
||||
ssl_certificate_key /etc/ssl/certs/globalsign/private.key;
|
||||
|
||||
# SSL Configuration
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers HIGH:!aNULL:!MD5;
|
||||
ssl_prefer_server_ciphers on;
|
||||
ssl_session_cache shared:SSL:10m;
|
||||
ssl_session_timeout 10m;
|
||||
|
||||
# Security Headers
|
||||
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
|
||||
# Client Body Size
|
||||
client_max_body_size 100M;
|
||||
|
||||
# Proxy Settings
|
||||
location / {
|
||||
proxy_pass http://kamco_api;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
|
||||
# Timeouts
|
||||
proxy_connect_timeout 60s;
|
||||
proxy_send_timeout 60s;
|
||||
proxy_read_timeout 60s;
|
||||
|
||||
# WebSocket Support (if needed)
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
}
|
||||
|
||||
# Health Check Endpoint
|
||||
location /health {
|
||||
access_log off;
|
||||
return 200 "OK";
|
||||
add_header Content-Type text/plain;
|
||||
}
|
||||
|
||||
# Access and Error Logs
|
||||
access_log /var/log/nginx/kamco-api-access.log;
|
||||
error_log /var/log/nginx/kamco-api-error.log;
|
||||
}
|
||||
33
nginx/nginx.conf
Normal file
33
nginx/nginx.conf
Normal file
@@ -0,0 +1,33 @@
|
||||
user nginx;
|
||||
worker_processes auto;
|
||||
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /var/log/nginx/access.log main;
|
||||
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
keepalive_timeout 65;
|
||||
types_hash_max_size 2048;
|
||||
|
||||
gzip on;
|
||||
gzip_vary on;
|
||||
gzip_min_length 1024;
|
||||
gzip_types text/plain text/css text/xml text/javascript application/x-javascript application/xml+rss application/json;
|
||||
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
}
|
||||
@@ -18,12 +18,13 @@ import org.springframework.web.filter.OncePerRequestFilter;
|
||||
@RequiredArgsConstructor
|
||||
public class JwtAuthenticationFilter extends OncePerRequestFilter {
|
||||
|
||||
private final JwtTokenProvider jwtTokenProvider;
|
||||
private final UserDetailsService userDetailsService;
|
||||
private static final AntPathMatcher PATH_MATCHER = new AntPathMatcher();
|
||||
private static final String[] EXCLUDE_PATHS = {
|
||||
"/api/auth/signin", "/api/auth/refresh", "/api/auth/logout", "/api/members/*/password"
|
||||
// "/api/auth/signin", "/api/auth/refresh", "/api/auth/logout", "/api/members/*/password"
|
||||
"/api/auth/signin", "/api/auth/refresh", "/api/auth/logout"
|
||||
};
|
||||
private final JwtTokenProvider jwtTokenProvider;
|
||||
private final UserDetailsService userDetailsService;
|
||||
|
||||
@Override
|
||||
protected void doFilterInternal(
|
||||
|
||||
@@ -146,4 +146,53 @@ public class ChangeDetectionApiController {
|
||||
return ApiResponseDto.ok(
|
||||
changeDetectionService.getChangeDetectionPointList(type, scale, uuid, mapSheetNum));
|
||||
}
|
||||
|
||||
@Operation(summary = "선택 변화탐지 결과 uuid 조회", description = "선택 변화탐지 결과 uuid 조회")
|
||||
@GetMapping("/selected/uuid")
|
||||
public ApiResponseDto<UUID> getChnDtctIdUuid(
|
||||
@Parameter(description = "회차 32자 uid", example = "98ABAA1FC4394F11885C302C19AE5E81")
|
||||
@RequestParam
|
||||
String chnDtctId) {
|
||||
return ApiResponseDto.ok(changeDetectionService.getLearnUuid(chnDtctId));
|
||||
}
|
||||
|
||||
@Operation(summary = "선택 변화탐지 결과 Polygon", description = "선택 변화탐지 결과 Polygon")
|
||||
@GetMapping("/selected/polygon")
|
||||
public ApiResponseDto<ChangeDetectionDto.PolygonFeatureList> getCdPolygonList(
|
||||
@Parameter(description = "회차 32자 uid", example = "98ABAA1FC4394F11885C302C19AE5E81")
|
||||
@RequestParam
|
||||
String chnDtctId,
|
||||
@Parameter(description = "polygon 32자 uid", example = "3B1A7E5F895A4D9698489540EE1BBE1E")
|
||||
@RequestParam
|
||||
String cdObjectId,
|
||||
@Parameter(
|
||||
description = "polygon 32자 uids",
|
||||
example =
|
||||
"3B1A7E5F895A4D9698489540EE1BBE1E,3B221A2AF9614647A0903A972D56C574,3B22686A7ACE44FC9CB20F1B4FA6DEFD,3B376D94A183479BB5FBE3D7166E6E1A")
|
||||
@RequestParam
|
||||
List<String> cdObjectIds,
|
||||
@Parameter(description = "pnu") @RequestParam(required = false) String pnu) {
|
||||
return ApiResponseDto.ok(
|
||||
changeDetectionService.getPolygonListByCd(chnDtctId, cdObjectId, cdObjectIds, pnu));
|
||||
}
|
||||
|
||||
@Operation(summary = "선택 변화탐지 결과 Point", description = "선택 변화탐지 결과 Point")
|
||||
@GetMapping("/selected/point")
|
||||
public ApiResponseDto<ChangeDetectionDto.PointFeatureList> getCdPointList(
|
||||
@Parameter(description = "회차 32자 uid", example = "98ABAA1FC4394F11885C302C19AE5E81")
|
||||
@RequestParam
|
||||
String chnDtctId,
|
||||
@Parameter(description = "polygon 32자 uid", example = "3B1A7E5F895A4D9698489540EE1BBE1E")
|
||||
@RequestParam
|
||||
String cdObjectId,
|
||||
@Parameter(
|
||||
description = "polygon 32자 uids",
|
||||
example =
|
||||
"3B1A7E5F895A4D9698489540EE1BBE1E,3B221A2AF9614647A0903A972D56C574,3B22686A7ACE44FC9CB20F1B4FA6DEFD,3B376D94A183479BB5FBE3D7166E6E1A")
|
||||
@RequestParam
|
||||
List<String> cdObjectIds,
|
||||
@Parameter(description = "pnu") @RequestParam(required = false) String pnu) {
|
||||
return ApiResponseDto.ok(
|
||||
changeDetectionService.getPointListByCd(chnDtctId, cdObjectId, cdObjectIds, pnu));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -197,6 +197,8 @@ public class ChangeDetectionDto {
|
||||
private Double afterConfidence; // 비교 신뢰도(확률)
|
||||
private String afterClass;
|
||||
private Double cdProb; // 탐지정확도
|
||||
private UUID uuid;
|
||||
private String resultUid;
|
||||
}
|
||||
|
||||
@Schema(name = "PointFeature", description = "Geometry 리턴 객체")
|
||||
@@ -250,5 +252,21 @@ public class ChangeDetectionDto {
|
||||
private Double afterConfidence; // 비교 신뢰도(확률)
|
||||
private String afterClass; // 비교 분류
|
||||
private Double cdProb; // 탐지 정확도
|
||||
private UUID uuid;
|
||||
private String uid;
|
||||
}
|
||||
|
||||
@Schema(name = "ChangeDetectionMapDto", description = "변화지도 팝업 검색조건")
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class ChangeDetectionMapDto {
|
||||
private Integer compareYyyy;
|
||||
private Integer targetYyyy;
|
||||
private String cdObjectId;
|
||||
private List<String> cdObjectIds;
|
||||
private String chnDtctId;
|
||||
private String pnu;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,4 +89,42 @@ public class ChangeDetectionService {
|
||||
default -> throw new IllegalArgumentException("Unsupported type: " + type);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 선택 폴리곤 정보 조회
|
||||
*
|
||||
* @param chnDtctId
|
||||
* @param cdObjectId
|
||||
* @param cdObjectIds
|
||||
* @param pnu
|
||||
* @return
|
||||
*/
|
||||
public ChangeDetectionDto.PolygonFeatureList getPolygonListByCd(
|
||||
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu) {
|
||||
return changeDetectionCoreService.getPolygonListByCd(chnDtctId, cdObjectId, cdObjectIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* 선택 Point 조회
|
||||
*
|
||||
* @param chnDtctId
|
||||
* @param cdObjectId
|
||||
* @param cdObjectIds
|
||||
* @param pnu
|
||||
* @return
|
||||
*/
|
||||
public ChangeDetectionDto.PointFeatureList getPointListByCd(
|
||||
String chnDtctId, String cdObjectId, List<String> cdObjectIds, String pnu) {
|
||||
return changeDetectionCoreService.getPointListByCd(chnDtctId, cdObjectId, cdObjectIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* Learn uuid 조회
|
||||
*
|
||||
* @param chnDtctId
|
||||
* @return uuid
|
||||
*/
|
||||
public UUID getLearnUuid(String chnDtctId) {
|
||||
return changeDetectionCoreService.getLearnUuid(chnDtctId);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
package com.kamco.cd.kamcoback.common.download;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.kamco.cd.kamcoback.common.download.dto.DownloadAuditEvent;
|
||||
import com.kamco.cd.kamcoback.menu.dto.MenuDto;
|
||||
import com.kamco.cd.kamcoback.menu.service.MenuService;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.AuditLogEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.repository.log.AuditLogRepository;
|
||||
import java.util.Comparator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.context.event.EventListener;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
@Slf4j
|
||||
@Component
|
||||
@RequiredArgsConstructor
|
||||
public class DownloadAuditEventListener {
|
||||
|
||||
private final AuditLogRepository auditLogRepository;
|
||||
private final MenuService menuService;
|
||||
private final ObjectMapper objectMapper;
|
||||
|
||||
@Async("auditLogExecutor")
|
||||
@Transactional(propagation = Propagation.REQUIRES_NEW)
|
||||
@EventListener
|
||||
public void onDownloadAudit(DownloadAuditEvent ev) {
|
||||
try {
|
||||
String menuUid = resolveMenuUid(ev.normalizedUri());
|
||||
if (menuUid == null) {
|
||||
// menuUid null 불가 -> 스킵
|
||||
log.warn(
|
||||
"MenuUid not resolved. skip audit. uri={}, normalized={}",
|
||||
ev.requestUri(),
|
||||
ev.normalizedUri());
|
||||
return;
|
||||
}
|
||||
|
||||
AuditLogEntity logEntity =
|
||||
AuditLogEntity.forFileDownload(
|
||||
ev.userId(), ev.requestUri(), menuUid, ev.ip(), ev.status(), ev.downloadUuid());
|
||||
|
||||
auditLogRepository.save(logEntity);
|
||||
|
||||
} catch (Exception e) {
|
||||
// 본 요청과 분리되어야 함
|
||||
log.warn("Download audit save failed. uri={}, err={}", ev.requestUri(), e.toString());
|
||||
}
|
||||
}
|
||||
|
||||
private String resolveMenuUid(String normalizedUri) {
|
||||
try {
|
||||
List<?> list = menuService.getFindAll();
|
||||
|
||||
List<MenuDto.Basic> basics =
|
||||
list.stream()
|
||||
.map(
|
||||
item -> {
|
||||
if (item instanceof LinkedHashMap<?, ?> map) {
|
||||
return objectMapper.convertValue(map, MenuDto.Basic.class);
|
||||
} else if (item instanceof MenuDto.Basic dto) {
|
||||
return dto;
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.filter(Objects::nonNull)
|
||||
.toList();
|
||||
|
||||
MenuDto.Basic basic =
|
||||
basics.stream()
|
||||
.filter(m -> m.getMenuUrl() != null && normalizedUri.startsWith(m.getMenuUrl()))
|
||||
.max(Comparator.comparingInt(m -> m.getMenuUrl().length()))
|
||||
.orElse(null);
|
||||
|
||||
if (basic == null) return null;
|
||||
|
||||
String menuUidStr = basic.getMenuUid(); // ← String
|
||||
if (menuUidStr == null || menuUidStr.isBlank()) return null;
|
||||
|
||||
return menuUidStr; // ← Long 변환
|
||||
|
||||
} catch (Exception e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
package com.kamco.cd.kamcoback.common.download;
|
||||
|
||||
import com.kamco.cd.kamcoback.common.download.dto.DownloadSpec;
|
||||
import com.kamco.cd.kamcoback.common.utils.UserUtil;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class DownloadExecutor {
|
||||
|
||||
private final UserUtil userUtil;
|
||||
|
||||
public ResponseEntity<StreamingResponseBody> stream(DownloadSpec spec) throws IOException {
|
||||
|
||||
if (!Files.isReadable(spec.filePath())) {
|
||||
return ResponseEntity.notFound().build();
|
||||
}
|
||||
|
||||
StreamingResponseBody body =
|
||||
os -> {
|
||||
try (InputStream in = Files.newInputStream(spec.filePath())) {
|
||||
in.transferTo(os);
|
||||
os.flush();
|
||||
} catch (Exception e) {
|
||||
// 고용량은 중간 끊김 흔하니까 throw 금지
|
||||
}
|
||||
};
|
||||
|
||||
String fileName =
|
||||
spec.downloadName() != null
|
||||
? spec.downloadName()
|
||||
: spec.filePath().getFileName().toString();
|
||||
|
||||
return ResponseEntity.ok()
|
||||
.contentType(
|
||||
spec.contentType() != null ? spec.contentType() : MediaType.APPLICATION_OCTET_STREAM)
|
||||
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + fileName + "\"")
|
||||
.body(body);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,79 @@
|
||||
package com.kamco.cd.kamcoback.common.download;
|
||||
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import org.springframework.core.io.FileSystemResource;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.core.io.support.ResourceRegion;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.HttpRange;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Component
|
||||
public class RangeDownloadResponder {
|
||||
|
||||
public ResponseEntity<?> buildZipResponse(
|
||||
Path filePath, String downloadFileName, HttpServletRequest request) throws IOException {
|
||||
|
||||
if (!Files.isRegularFile(filePath)) {
|
||||
return ResponseEntity.notFound().build();
|
||||
}
|
||||
|
||||
long totalSize = Files.size(filePath);
|
||||
Resource resource = new FileSystemResource(filePath);
|
||||
|
||||
String disposition = "attachment; filename=\"" + downloadFileName + "\"";
|
||||
String rangeHeader = request.getHeader(HttpHeaders.RANGE);
|
||||
|
||||
// 🔥 공통 헤더 (여기 고정)
|
||||
ResponseEntity.BodyBuilder base =
|
||||
ResponseEntity.ok()
|
||||
.contentType(MediaType.APPLICATION_OCTET_STREAM)
|
||||
.header(HttpHeaders.CONTENT_DISPOSITION, disposition)
|
||||
.header(HttpHeaders.ACCEPT_RANGES, "bytes")
|
||||
.header("X-Accel-Buffering", "no");
|
||||
|
||||
if (rangeHeader == null || rangeHeader.isBlank()) {
|
||||
return base.contentLength(totalSize).body(resource);
|
||||
}
|
||||
|
||||
List<HttpRange> ranges;
|
||||
try {
|
||||
ranges = HttpRange.parseRanges(rangeHeader);
|
||||
} catch (IllegalArgumentException ex) {
|
||||
return ResponseEntity.status(416)
|
||||
.header(HttpHeaders.CONTENT_RANGE, "bytes */" + totalSize)
|
||||
.header("X-Accel-Buffering", "no")
|
||||
.build();
|
||||
}
|
||||
|
||||
HttpRange range = ranges.get(0);
|
||||
|
||||
long start = range.getRangeStart(totalSize);
|
||||
long end = range.getRangeEnd(totalSize);
|
||||
|
||||
if (start >= totalSize) {
|
||||
return ResponseEntity.status(416)
|
||||
.header(HttpHeaders.CONTENT_RANGE, "bytes */" + totalSize)
|
||||
.header("X-Accel-Buffering", "no")
|
||||
.build();
|
||||
}
|
||||
|
||||
long regionLength = end - start + 1;
|
||||
ResourceRegion region = new ResourceRegion(resource, start, regionLength);
|
||||
|
||||
return ResponseEntity.status(206)
|
||||
.contentType(MediaType.APPLICATION_OCTET_STREAM)
|
||||
.header(HttpHeaders.CONTENT_DISPOSITION, disposition)
|
||||
.header(HttpHeaders.ACCEPT_RANGES, "bytes")
|
||||
.header("X-Accel-Buffering", "no")
|
||||
.header(HttpHeaders.CONTENT_RANGE, "bytes " + start + "-" + end + "/" + totalSize)
|
||||
.contentLength(regionLength)
|
||||
.body(region);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
package com.kamco.cd.kamcoback.common.download.dto;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public record DownloadAuditEvent(
|
||||
Long userId,
|
||||
String requestUri,
|
||||
String normalizedUri,
|
||||
String ip,
|
||||
int status,
|
||||
UUID downloadUuid) {}
|
||||
@@ -1,12 +0,0 @@
|
||||
package com.kamco.cd.kamcoback.common.download.dto;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.UUID;
|
||||
import org.springframework.http.MediaType;
|
||||
|
||||
public record DownloadSpec(
|
||||
UUID uuid, // 다운로드 식별(로그/정책용)
|
||||
Path filePath, // 실제 파일 경로
|
||||
String downloadName, // 사용자에게 보일 파일명
|
||||
MediaType contentType // 보통 OCTET_STREAM
|
||||
) {}
|
||||
@@ -27,4 +27,10 @@ public class CustomApiException extends RuntimeException {
|
||||
this.codeName = errorCode.getCode();
|
||||
this.status = errorCode.getStatus();
|
||||
}
|
||||
|
||||
public CustomApiException(String codeName, HttpStatus status, Throwable cause) {
|
||||
super(codeName, cause);
|
||||
this.codeName = codeName;
|
||||
this.status = status;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,11 +7,14 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import lombok.extern.log4j.Log4j2;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Log4j2
|
||||
@Component
|
||||
public class ExternalJarRunner {
|
||||
@Value("${spring.profiles.active}")
|
||||
private String profile;
|
||||
|
||||
private static final long TIMEOUT_MINUTES = TimeUnit.DAYS.toMinutes(3);
|
||||
|
||||
@@ -40,7 +43,7 @@ public class ExternalJarRunner {
|
||||
if (mode != null && !mode.isEmpty()) {
|
||||
addArg(args, "converter.mode", mode);
|
||||
}
|
||||
|
||||
addArg(args, "spring.profiles.active", profile);
|
||||
execJar(jarPath, args);
|
||||
}
|
||||
|
||||
@@ -57,6 +60,7 @@ public class ExternalJarRunner {
|
||||
addArg(args, "upload-shp", register);
|
||||
// addArg(args, "layer", layer);
|
||||
|
||||
addArg(args, "spring.profiles.active", profile);
|
||||
execJar(jarPath, args);
|
||||
}
|
||||
|
||||
|
||||
@@ -279,18 +279,28 @@ public class FIleChecker {
|
||||
return true;
|
||||
}
|
||||
|
||||
public static List<Folder> getFolderAll(String dirPath, String sortType, int maxDepth) {
|
||||
// kamco-nfs를 확인하는곳이 있어서 파라미터 추가 사용용도확인후 처리
|
||||
public static List<Folder> getFolderAll(
|
||||
String dirPath, String sortType, int maxDepth, String nfsRootDir) {
|
||||
|
||||
Path startPath = Paths.get(dirPath);
|
||||
|
||||
List<Folder> folderList = List.of();
|
||||
|
||||
try (Stream<Path> stream = Files.walk(startPath, maxDepth)) {
|
||||
log.info("[FIND_FOLDER] DIR : {} {} {} {}", dirPath, sortType, maxDepth, startPath);
|
||||
|
||||
int childDirCount = getChildFolderCount(startPath.toFile());
|
||||
log.info("[FIND_FOLDER] START_PATH_CHILD_DIR_COUNT : {}", childDirCount);
|
||||
|
||||
try (Stream<Path> stream = Files.walk(startPath, maxDepth)) {
|
||||
folderList =
|
||||
stream
|
||||
.filter(Files::isDirectory)
|
||||
.filter(p -> !p.toString().equals(dirPath))
|
||||
.filter(
|
||||
p ->
|
||||
!p.toAbsolutePath()
|
||||
.normalize()
|
||||
.equals(startPath.toAbsolutePath().normalize()))
|
||||
.map(
|
||||
path -> {
|
||||
int depth = path.getNameCount();
|
||||
@@ -300,11 +310,12 @@ public class FIleChecker {
|
||||
String parentPath = path.getParent().toString();
|
||||
String fullPath = path.toAbsolutePath().toString();
|
||||
|
||||
boolean isValid =
|
||||
!NameValidator.containsKorean(folderNm)
|
||||
&& !NameValidator.containsWhitespaceRegex(folderNm)
|
||||
&& !parentFolderNm.equals("kamco-nfs");
|
||||
|
||||
// 이것이 필요한건가?
|
||||
// boolean isShowHide =
|
||||
// !parentFolderNm.equals("kamco-nfs"); // 폴더 리스트에
|
||||
// kamco-nfs 하위만 나오도록 처리
|
||||
boolean isShowHide =
|
||||
!parentFolderNm.equals(nfsRootDir); // 폴더 리스트에 nfsRootDir 하위만 나오도록 처리
|
||||
File file = new File(fullPath);
|
||||
int childCnt = getChildFolderCount(file);
|
||||
String lastModified = getLastModified(file);
|
||||
@@ -317,7 +328,7 @@ public class FIleChecker {
|
||||
depth,
|
||||
childCnt,
|
||||
lastModified,
|
||||
isValid);
|
||||
isShowHide);
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
|
||||
@@ -352,24 +363,8 @@ public class FIleChecker {
|
||||
return folderList;
|
||||
}
|
||||
|
||||
public static List<Folder> getFolderAll(String dirPath) {
|
||||
return getFolderAll(dirPath, "name", 1);
|
||||
}
|
||||
|
||||
public static List<Folder> getFolderAll(String dirPath, String sortType) {
|
||||
return getFolderAll(dirPath, sortType, 1);
|
||||
}
|
||||
|
||||
public static int getChildFolderCount(String dirPath) {
|
||||
File directory = new File(dirPath);
|
||||
File[] childFolders = directory.listFiles(File::isDirectory);
|
||||
|
||||
int childCnt = 0;
|
||||
if (childFolders != null) {
|
||||
childCnt = childFolders.length;
|
||||
}
|
||||
|
||||
return childCnt;
|
||||
public static List<Folder> getFolderAll(String dirPath, String nfsRootDir) {
|
||||
return getFolderAll(dirPath, "name", 1, nfsRootDir);
|
||||
}
|
||||
|
||||
public static int getChildFolderCount(File directory) {
|
||||
@@ -383,11 +378,6 @@ public class FIleChecker {
|
||||
return childCnt;
|
||||
}
|
||||
|
||||
public static String getLastModified(String dirPath) {
|
||||
File file = new File(dirPath);
|
||||
return dttmFormat.format(new Date(file.lastModified()));
|
||||
}
|
||||
|
||||
public static String getLastModified(File file) {
|
||||
return dttmFormat.format(new Date(file.lastModified()));
|
||||
}
|
||||
|
||||
@@ -1,98 +1,64 @@
|
||||
package com.kamco.cd.kamcoback.config;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.kamco.cd.kamcoback.common.utils.HeaderUtil;
|
||||
import com.kamco.cd.kamcoback.common.download.dto.DownloadAuditEvent;
|
||||
import com.kamco.cd.kamcoback.common.utils.UserUtil;
|
||||
import com.kamco.cd.kamcoback.config.api.ApiLogFunction;
|
||||
import com.kamco.cd.kamcoback.menu.dto.MenuDto;
|
||||
import com.kamco.cd.kamcoback.menu.service.MenuService;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.AuditLogEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.repository.log.AuditLogRepository;
|
||||
import jakarta.servlet.DispatcherType;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import java.util.Comparator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.UUID;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.web.servlet.HandlerInterceptor;
|
||||
|
||||
@Slf4j
|
||||
@Component
|
||||
@RequiredArgsConstructor
|
||||
public class FileDownloadInteceptor implements HandlerInterceptor {
|
||||
|
||||
private final AuditLogRepository auditLogRepository;
|
||||
private final MenuService menuService;
|
||||
private final ApplicationEventPublisher publisher;
|
||||
private final UserUtil userUtil;
|
||||
|
||||
@Autowired private ObjectMapper objectMapper;
|
||||
|
||||
public FileDownloadInteceptor(
|
||||
AuditLogRepository auditLogRepository, MenuService menuService, UserUtil userUtil) {
|
||||
this.auditLogRepository = auditLogRepository;
|
||||
this.menuService = menuService;
|
||||
this.userUtil = userUtil;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean preHandle(
|
||||
HttpServletRequest request, HttpServletResponse response, Object handler) {
|
||||
public void afterCompletion(
|
||||
HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) {
|
||||
|
||||
if (!request.getRequestURI().contains("/download")) return true;
|
||||
String uri = request.getRequestURI();
|
||||
if (uri == null || !uri.contains("/download")) return;
|
||||
if (request.getDispatcherType() != DispatcherType.REQUEST) return;
|
||||
|
||||
if (request.getDispatcherType() != jakarta.servlet.DispatcherType.REQUEST) {
|
||||
return true;
|
||||
}
|
||||
|
||||
saveLog(request, response);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private void saveLog(HttpServletRequest request, HttpServletResponse response) {
|
||||
// 파일 다운로드 API만 필터링
|
||||
if (!request.getRequestURI().contains("/download")) {
|
||||
Long userId;
|
||||
try {
|
||||
userId = userUtil.getId();
|
||||
if (userId == null) return; // userId null 불가면 스킵
|
||||
} catch (Exception e) {
|
||||
log.warn("Download audit userId resolve failed. uri={}, err={}", uri, e.toString());
|
||||
return;
|
||||
}
|
||||
|
||||
Long userId = userUtil.getId();
|
||||
String ip = ApiLogFunction.getClientIp(request);
|
||||
int status = response.getStatus();
|
||||
String normalizedUri = uri.replace("/api", "");
|
||||
|
||||
List<?> list = menuService.getFindAll();
|
||||
List<MenuDto.Basic> result =
|
||||
list.stream()
|
||||
.map(
|
||||
item -> {
|
||||
if (item instanceof LinkedHashMap<?, ?> map) {
|
||||
return objectMapper.convertValue(map, MenuDto.Basic.class);
|
||||
} else if (item instanceof MenuDto.Basic dto) {
|
||||
return dto;
|
||||
} else {
|
||||
throw new IllegalStateException("Unsupported cache type: " + item.getClass());
|
||||
}
|
||||
})
|
||||
.toList();
|
||||
UUID downloadUuid = extractUuidFromUri(uri);
|
||||
if (downloadUuid == null) {
|
||||
log.warn("Download UUID parse failed. uri={}", uri);
|
||||
return; // downloadUuid null 불가 -> 스킵
|
||||
}
|
||||
|
||||
String normalizedUri = request.getRequestURI().replace("/api", "");
|
||||
MenuDto.Basic basic =
|
||||
result.stream()
|
||||
.filter(
|
||||
menu -> menu.getMenuUrl() != null && normalizedUri.startsWith(menu.getMenuUrl()))
|
||||
.max(Comparator.comparingInt(m -> m.getMenuUrl().length()))
|
||||
.orElse(null);
|
||||
publisher.publishEvent(
|
||||
new DownloadAuditEvent(userId, uri, normalizedUri, ip, status, downloadUuid));
|
||||
}
|
||||
|
||||
AuditLogEntity log =
|
||||
AuditLogEntity.forFileDownload(
|
||||
userId,
|
||||
request.getRequestURI(),
|
||||
Objects.requireNonNull(basic).getMenuUid(),
|
||||
ip,
|
||||
response.getStatus(),
|
||||
UUID.fromString(HeaderUtil.get(request, "kamco-download-uuid")));
|
||||
|
||||
auditLogRepository.save(log);
|
||||
private UUID extractUuidFromUri(String uri) {
|
||||
try {
|
||||
String[] parts = uri.split("/");
|
||||
String last = parts[parts.length - 1];
|
||||
return UUID.fromString(last);
|
||||
} catch (Exception e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
package com.kamco.cd.kamcoback.config;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@Component
|
||||
@ConfigurationProperties(prefix = "file")
|
||||
public class FileProperties {
|
||||
|
||||
private String root;
|
||||
private String nfs;
|
||||
private String syncRootDir;
|
||||
private String syncTmpDir;
|
||||
private String syncFileExtention;
|
||||
private String datasetDir;
|
||||
private String datasetTmpDir;
|
||||
private String modelDir;
|
||||
private String modelTmpDir;
|
||||
private String modelFileExtention;
|
||||
private String ptPath;
|
||||
private String datasetResponse;
|
||||
private TrainingData trainingData;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
public static class TrainingData {
|
||||
private String geojsonDir;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package com.kamco.cd.kamcoback.config;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@Component
|
||||
@ConfigurationProperties(prefix = "inference")
|
||||
public class InferenceProperties {
|
||||
|
||||
private String nfs;
|
||||
private String url;
|
||||
private String batchUrl;
|
||||
private String geojsonDir;
|
||||
private String jarPath;
|
||||
private String inferenceServerName;
|
||||
}
|
||||
@@ -24,7 +24,7 @@ public class OpenApiConfig {
|
||||
@Value("${swagger.dev-url:https://kamco.dev-api.gs.dabeeo.com}")
|
||||
private String devUrl;
|
||||
|
||||
@Value("${swagger.prod-url:https://api.kamco.com}")
|
||||
@Value("${swagger.prod-url:https://aicd-api.e-kamco.com:12013}")
|
||||
private String prodUrl;
|
||||
|
||||
@Bean
|
||||
@@ -51,9 +51,9 @@ public class OpenApiConfig {
|
||||
servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버"));
|
||||
// servers.add(new Server().url(prodUrl).description("운영 서버"));
|
||||
} else if ("prod".equals(profile)) {
|
||||
// servers.add(new Server().url(prodUrl).description("운영 서버"));
|
||||
servers.add(new Server().url(prodUrl).description("운영 서버"));
|
||||
servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버"));
|
||||
servers.add(new Server().url(devUrl).description("개발 서버"));
|
||||
|
||||
} else {
|
||||
servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버"));
|
||||
servers.add(new Server().url(devUrl).description("개발 서버"));
|
||||
|
||||
@@ -95,23 +95,24 @@ public class SecurityConfig {
|
||||
"/api/auth/refresh",
|
||||
"/api/auth/logout",
|
||||
"/swagger-ui/**",
|
||||
"/api/members/*/password",
|
||||
"/v3/api-docs/**",
|
||||
"/chunk_upload_test.html",
|
||||
"/download_progress_test.html",
|
||||
"/api/model/file-chunk-upload",
|
||||
"/api/upload/file-chunk-upload",
|
||||
"/api/upload/chunk-upload-complete",
|
||||
"/api/change-detection/**",
|
||||
"/api/layer/map/**",
|
||||
"/api/layer/tile-url",
|
||||
"/api/layer/tile-url-year")
|
||||
"/api/layer/tile-url-year",
|
||||
"/api/common-code/clazz",
|
||||
"/api/inference/download/**")
|
||||
.permitAll()
|
||||
|
||||
// 로그인한 사용자만 가능 IAM
|
||||
.requestMatchers(
|
||||
"/api/user/**",
|
||||
"/api/my/menus",
|
||||
"/api/common-code/**",
|
||||
"/api/members/*/password",
|
||||
"/api/training-data/label/**",
|
||||
"/api/training-data/review/**")
|
||||
.authenticated()
|
||||
|
||||
@@ -16,6 +16,8 @@ public class StartupLogger {
|
||||
|
||||
private final Environment environment;
|
||||
private final DataSource dataSource;
|
||||
private final FileProperties fileProperties;
|
||||
private final InferenceProperties inferenceProperties;
|
||||
|
||||
@EventListener(ApplicationReadyEvent.class)
|
||||
public void logStartupInfo() {
|
||||
@@ -79,6 +81,25 @@ public class StartupLogger {
|
||||
│ DDL Auto : %s
|
||||
│ JDBC Batch Size : %s
|
||||
│ Fetch Batch Size : %s
|
||||
╠════════════════════════════════════════════════════════════════════════════════╣
|
||||
║ FILE CONFIGURATION ║
|
||||
╠────────────────────────────────────────────────────────────────────────────────╣
|
||||
│ Root Directory : %s
|
||||
│ NFS Mount Path : %s
|
||||
│ Sync Root Dir : %s
|
||||
│ Sync Tmp Dir : %s
|
||||
│ Dataset Dir : %s
|
||||
│ Model Dir : %s
|
||||
│ PT Path : %s
|
||||
╠════════════════════════════════════════════════════════════════════════════════╣
|
||||
║ INFERENCE CONFIGURATION ║
|
||||
╠────────────────────────────────────────────────────────────────────────────────╣
|
||||
│ NFS Mount Path : %s
|
||||
│ Inference URL : %s
|
||||
│ Batch URL : %s
|
||||
│ GeoJSON Dir : %s
|
||||
│ JAR Path : %s
|
||||
│ Server Names : %s
|
||||
╚════════════════════════════════════════════════════════════════════════════════╝
|
||||
""",
|
||||
profileInfo,
|
||||
@@ -89,7 +110,24 @@ public class StartupLogger {
|
||||
showSql,
|
||||
ddlAuto,
|
||||
batchSize,
|
||||
batchFetchSize);
|
||||
batchFetchSize,
|
||||
fileProperties.getRoot() != null ? fileProperties.getRoot() : "N/A",
|
||||
fileProperties.getNfs() != null ? fileProperties.getNfs() : "N/A",
|
||||
fileProperties.getSyncRootDir() != null ? fileProperties.getSyncRootDir() : "N/A",
|
||||
fileProperties.getSyncTmpDir() != null ? fileProperties.getSyncTmpDir() : "N/A",
|
||||
fileProperties.getDatasetDir() != null ? fileProperties.getDatasetDir() : "N/A",
|
||||
fileProperties.getModelDir() != null ? fileProperties.getModelDir() : "N/A",
|
||||
fileProperties.getPtPath() != null ? fileProperties.getPtPath() : "N/A",
|
||||
inferenceProperties.getNfs() != null ? inferenceProperties.getNfs() : "N/A",
|
||||
inferenceProperties.getUrl() != null ? inferenceProperties.getUrl() : "N/A",
|
||||
inferenceProperties.getBatchUrl() != null ? inferenceProperties.getBatchUrl() : "N/A",
|
||||
inferenceProperties.getGeojsonDir() != null
|
||||
? inferenceProperties.getGeojsonDir()
|
||||
: "N/A",
|
||||
inferenceProperties.getJarPath() != null ? inferenceProperties.getJarPath() : "N/A",
|
||||
inferenceProperties.getInferenceServerName() != null
|
||||
? inferenceProperties.getInferenceServerName()
|
||||
: "N/A");
|
||||
|
||||
log.info(startupMessage);
|
||||
}
|
||||
|
||||
@@ -11,11 +11,6 @@ import com.kamco.cd.kamcoback.gukyuin.dto.DetectMastDto.Basic;
|
||||
import com.kamco.cd.kamcoback.gukyuin.dto.DetectMastDto.DetectMastReq;
|
||||
import com.kamco.cd.kamcoback.gukyuin.dto.GukYuinDto.GukYuinLinkableRes;
|
||||
import com.kamco.cd.kamcoback.gukyuin.service.GukYuinApiService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiLabelJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiPnuJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiStatusJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiStbltJobService;
|
||||
import io.swagger.v3.oas.annotations.Hidden;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
@@ -43,10 +38,6 @@ import org.springframework.web.bind.annotation.RestController;
|
||||
public class GukYuinApiController {
|
||||
|
||||
private final GukYuinApiService gukYuinApiService;
|
||||
private final GukYuinApiPnuJobService gukYuinApiPnuJobService;
|
||||
private final GukYuinApiStatusJobService gukYuinApiStatusJobService;
|
||||
private final GukYuinApiLabelJobService gukYuinApiLabelJobService;
|
||||
private final GukYuinApiStbltJobService gukYuinApiStbltJobService;
|
||||
|
||||
/** 탐지결과 등록 */
|
||||
@Operation(summary = "탐지결과 등록", description = "탐지결과 등록")
|
||||
@@ -128,7 +119,7 @@ public class GukYuinApiController {
|
||||
})
|
||||
public ApiResponseDto<ChngDetectMastDto.ResultDto> selectChangeDetectionDtctIdList(
|
||||
@RequestParam(required = false) String chnDtctId) {
|
||||
return ApiResponseDto.ok(gukYuinApiService.listChnDtctId(chnDtctId));
|
||||
return ApiResponseDto.ok(gukYuinApiService.listChnDtctId(chnDtctId, ""));
|
||||
}
|
||||
|
||||
@Operation(summary = "탐지결과 등록목록 조회(1건 조회)", description = "탐지결과 등록목록 조회")
|
||||
@@ -192,7 +183,7 @@ public class GukYuinApiController {
|
||||
@PathVariable String chnDtctId,
|
||||
@RequestParam(defaultValue = "0") Integer pageIndex,
|
||||
@RequestParam(defaultValue = "10") Integer pageSize) {
|
||||
return ApiResponseDto.ok(gukYuinApiService.findChnContList(chnDtctId, pageIndex, pageSize));
|
||||
return ApiResponseDto.ok(gukYuinApiService.findChnContList(chnDtctId, pageIndex, pageSize, ""));
|
||||
}
|
||||
|
||||
@Operation(summary = "탐지객체 조회 (탐지객체 1건 조회)", description = "탐지객체 조회 (탐지객체 1건 조회)")
|
||||
@@ -272,7 +263,8 @@ public class GukYuinApiController {
|
||||
@PostMapping("/rlb/objt/{chnDtctObjtId}/lbl/{lblYn}")
|
||||
public ApiResponseDto<ChngDetectContDto.ResultLabelDto> updateChnDtctObjtLabelingYn(
|
||||
@PathVariable String chnDtctObjtId, @PathVariable String lblYn) {
|
||||
return ApiResponseDto.ok(gukYuinApiService.updateChnDtctObjtLabelingYn(chnDtctObjtId, lblYn));
|
||||
return ApiResponseDto.ok(
|
||||
gukYuinApiService.updateChnDtctObjtLabelingYn(chnDtctObjtId, lblYn, ""));
|
||||
}
|
||||
|
||||
@Operation(summary = "국유in연동 등록", description = "국유in연동 등록")
|
||||
@@ -309,7 +301,7 @@ public class GukYuinApiController {
|
||||
@PathVariable String chnDtctId,
|
||||
@Parameter(description = "날짜(기본은 어제 날짜)") @RequestParam(defaultValue = "20260205")
|
||||
String yyyymmdd) {
|
||||
return ApiResponseDto.ok(gukYuinApiService.findRlbDtctList(chnDtctId, yyyymmdd));
|
||||
return ApiResponseDto.ok(gukYuinApiService.findRlbDtctList(chnDtctId, yyyymmdd, ""));
|
||||
}
|
||||
|
||||
@Operation(summary = "탐지객체 적합여부 조회 (객체별 조회)", description = "탐지객체 적합여부 조회 (객체별 조회)")
|
||||
@@ -330,36 +322,4 @@ public class GukYuinApiController {
|
||||
@PathVariable String chnDtctObjtId) {
|
||||
return ApiResponseDto.ok(gukYuinApiService.findRlbDtctObject(chnDtctObjtId));
|
||||
}
|
||||
|
||||
@Hidden
|
||||
@Operation(summary = "job test pnu", description = "job test pnu")
|
||||
@GetMapping("/job-test/pnu")
|
||||
public ApiResponseDto<Void> findGukYuinContListPnuUpdate() {
|
||||
gukYuinApiPnuJobService.findGukYuinContListPnuUpdate();
|
||||
return ApiResponseDto.ok(null);
|
||||
}
|
||||
|
||||
@Hidden
|
||||
@Operation(summary = "job test status", description = "job test status")
|
||||
@GetMapping("/job-test/status")
|
||||
public ApiResponseDto<Void> findGukYuinMastCompleteYn() {
|
||||
gukYuinApiStatusJobService.findGukYuinMastCompleteYn();
|
||||
return ApiResponseDto.ok(null);
|
||||
}
|
||||
|
||||
@Hidden
|
||||
@Operation(summary = "job test label", description = "job test label")
|
||||
@GetMapping("/job-test/label")
|
||||
public ApiResponseDto<Void> findLabelingCompleteSend() {
|
||||
gukYuinApiLabelJobService.findLabelingCompleteSend();
|
||||
return ApiResponseDto.ok(null);
|
||||
}
|
||||
|
||||
@Hidden
|
||||
@Operation(summary = "job test stblt", description = "job test stblt")
|
||||
@GetMapping("/job-test/stblt")
|
||||
public ApiResponseDto<Void> findGukYuinEligibleForSurvey() {
|
||||
gukYuinApiStbltJobService.findGukYuinEligibleForSurvey();
|
||||
return ApiResponseDto.ok(null);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -83,7 +83,7 @@ public class ChngDetectMastDto {
|
||||
|
||||
@Schema(
|
||||
description = "탐지결과 절대경로명 /kamco_nas/export/{chnDtctId}",
|
||||
example = "/kamco-nfs/dataset/export/D5F192EC76D34F6592035BE63A84F591")
|
||||
example = "{file.nfs}/dataset/export/D5F192EC76D34F6592035BE63A84F591")
|
||||
private String pathNm;
|
||||
|
||||
@Schema(description = "사원번호", example = "123456")
|
||||
|
||||
@@ -66,8 +66,11 @@ public class GukYuinApiService {
|
||||
@Value("${gukyuin.cdi}")
|
||||
private String gukyuinCdiUrl;
|
||||
|
||||
@Value("${file.dataset-dir}")
|
||||
private String datasetDir;
|
||||
@Value("${file.nfs}")
|
||||
private String nfs;
|
||||
|
||||
// @Value("${file.dataset-dir}")
|
||||
// private String datasetDir;
|
||||
|
||||
@Transactional
|
||||
public ChngDetectMastDto.RegistResDto regist(
|
||||
@@ -259,7 +262,8 @@ public class GukYuinApiService {
|
||||
}
|
||||
|
||||
// 탐지객체 리스트 조회
|
||||
public ResultContDto findChnContList(String chnDtctId, Integer pageIndex, Integer pageSize) {
|
||||
public ResultContDto findChnContList(
|
||||
String chnDtctId, Integer pageIndex, Integer pageSize, String batchYn) {
|
||||
|
||||
String url =
|
||||
gukyuinCdiUrl
|
||||
@@ -272,7 +276,7 @@ public class GukYuinApiService {
|
||||
+ "&reqIp="
|
||||
+ myip
|
||||
+ "&reqEpno="
|
||||
+ userUtil.getEmployeeNo();
|
||||
+ ("Y".equals(batchYn) ? "BATCH" : userUtil.getEmployeeNo());
|
||||
|
||||
ExternalCallResult<ChngDetectContDto.ResultContDto> result =
|
||||
externalHttpClient.call(
|
||||
@@ -334,12 +338,12 @@ public class GukYuinApiService {
|
||||
}
|
||||
|
||||
public ChngDetectContDto.ResultLabelDto updateChnDtctObjtLabelingYn(
|
||||
String chnDtctObjtId, String lblYn) {
|
||||
String chnDtctObjtId, String lblYn, String batchYn) {
|
||||
String url = gukyuinCdiUrl + "/rlb/objt/" + chnDtctObjtId + "/lbl/" + lblYn;
|
||||
|
||||
ReqInfo info = new ReqInfo();
|
||||
info.setReqIp(myip);
|
||||
info.setReqEpno(userUtil.getEmployeeNo());
|
||||
info.setReqEpno("Y".equals(batchYn) ? "BATCH" : userUtil.getEmployeeNo());
|
||||
|
||||
ExternalCallResult<ChngDetectContDto.ResultLabelDto> result =
|
||||
externalHttpClient.call(
|
||||
@@ -391,7 +395,7 @@ public class GukYuinApiService {
|
||||
return result.body();
|
||||
}
|
||||
|
||||
public ResultDto listChnDtctId(String chnDtctId) {
|
||||
public ResultDto listChnDtctId(String chnDtctId, String batchYn) {
|
||||
String url =
|
||||
gukyuinCdiUrl
|
||||
+ "/chn/mast/"
|
||||
@@ -399,7 +403,7 @@ public class GukYuinApiService {
|
||||
+ "?reqIp="
|
||||
+ myip
|
||||
+ "&reqEpno="
|
||||
+ userUtil.getEmployeeNo();
|
||||
+ ("Y".equals(batchYn) ? "BATCH" : userUtil.getEmployeeNo());
|
||||
|
||||
ExternalCallResult<ChngDetectMastDto.ResultDto> result =
|
||||
externalHttpClient.call(
|
||||
@@ -452,7 +456,10 @@ public class GukYuinApiService {
|
||||
return new ResponseObj(ApiResponseCode.DUPLICATE_DATA, "이미 국유인 연동을 한 회차입니다.");
|
||||
}
|
||||
|
||||
if (!Files.isDirectory(Path.of("/kamco-nfs/dataset/export/" + info.getUid()))) {
|
||||
// String kamconfsDatasetExportPathfsDatasetExportPath = "/kamco-nfs/dataset/export/";
|
||||
String kamconfsDatasetExportPathfsDatasetExportPath =
|
||||
String.format("%s%s", nfs, "/dataset/export/");
|
||||
if (!Files.isDirectory(Path.of(kamconfsDatasetExportPathfsDatasetExportPath + info.getUid()))) {
|
||||
return new ResponseObj(
|
||||
ApiResponseCode.NOT_FOUND_DATA, "파일 경로에 회차 실행 파일이 생성되지 않았습니다. 확인 부탁드립니다.");
|
||||
}
|
||||
@@ -467,7 +474,7 @@ public class GukYuinApiService {
|
||||
reqDto.setCrtrYr(String.valueOf(info.getTargetYyyy()));
|
||||
reqDto.setChnDtctSno(String.valueOf(maxStage + 1));
|
||||
reqDto.setChnDtctId(info.getUid());
|
||||
reqDto.setPathNm("/kamco-nfs/dataset/export/" + info.getUid());
|
||||
reqDto.setPathNm(kamconfsDatasetExportPathfsDatasetExportPath + info.getUid());
|
||||
|
||||
// 1회차를 종료 상태로 처리하고 2회차를 보내야 함
|
||||
// 추론(learn), 학습데이터(inference) 둘 다 종료 처리
|
||||
@@ -528,7 +535,8 @@ public class GukYuinApiService {
|
||||
return result.body();
|
||||
}
|
||||
|
||||
public ChngDetectMastDto.RlbDtctDto findRlbDtctList(String chnDtctId, String yyyymmdd) {
|
||||
public ChngDetectMastDto.RlbDtctDto findRlbDtctList(
|
||||
String chnDtctId, String yyyymmdd, String batchYn) {
|
||||
|
||||
String url =
|
||||
gukyuinCdiUrl
|
||||
@@ -537,7 +545,7 @@ public class GukYuinApiService {
|
||||
+ "?reqIp="
|
||||
+ myip
|
||||
+ "&reqEpno="
|
||||
+ userUtil.getEmployeeNo()
|
||||
+ ("Y".equals(batchYn) ? "BATCH" : userUtil.getEmployeeNo())
|
||||
+ "&yyyymmdd="
|
||||
+ yyyymmdd;
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package com.kamco.cd.kamcoback.inference;
|
||||
|
||||
import com.kamco.cd.kamcoback.common.download.DownloadExecutor;
|
||||
import com.kamco.cd.kamcoback.common.download.dto.DownloadSpec;
|
||||
import com.kamco.cd.kamcoback.common.download.RangeDownloadResponder;
|
||||
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
|
||||
@@ -26,6 +25,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponses;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.validation.Valid;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
@@ -34,8 +34,8 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.log4j.Log4j2;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.DeleteMapping;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
@@ -45,9 +45,9 @@ import org.springframework.web.bind.annotation.RequestBody;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody;
|
||||
|
||||
@Tag(name = "추론관리", description = "추론관리 API")
|
||||
@Log4j2
|
||||
@RequestMapping("/api/inference")
|
||||
@RequiredArgsConstructor
|
||||
@RestController
|
||||
@@ -56,7 +56,7 @@ public class InferenceResultApiController {
|
||||
private final InferenceResultService inferenceResultService;
|
||||
private final MapSheetMngService mapSheetMngService;
|
||||
private final ModelMngService modelMngService;
|
||||
private final DownloadExecutor downloadExecutor;
|
||||
private final RangeDownloadResponder rangeDownloadResponder;
|
||||
|
||||
@Operation(summary = "추론관리 목록", description = "어드민 홈 > 추론관리 > 추론관리 > 추론관리 목록")
|
||||
@ApiResponses(
|
||||
@@ -151,7 +151,7 @@ public class InferenceResultApiController {
|
||||
@RequestBody
|
||||
@Valid
|
||||
InferenceResultDto.RegReq req) {
|
||||
UUID uuid = inferenceResultService.saveInferenceInfo(req);
|
||||
UUID uuid = inferenceResultService.run(req);
|
||||
return ApiResponseDto.ok(uuid);
|
||||
}
|
||||
|
||||
@@ -195,7 +195,7 @@ public class InferenceResultApiController {
|
||||
LocalDate endDttm,
|
||||
@Parameter(description = "키워드 (모델버전)", example = "M1.H1.E28") @RequestParam(required = false)
|
||||
String searchVal,
|
||||
@Parameter(description = "타입", example = "M1") @RequestParam(required = false)
|
||||
@Parameter(description = "타입", example = "G1") @RequestParam(required = false)
|
||||
String modelType,
|
||||
@RequestParam(defaultValue = "0") int page,
|
||||
@RequestParam(defaultValue = "20") int size) {
|
||||
@@ -356,9 +356,8 @@ public class InferenceResultApiController {
|
||||
@ApiResponse(responseCode = "404", description = "파일 없음", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@GetMapping(value = "/download/{uuid}")
|
||||
public ResponseEntity<StreamingResponseBody> download(
|
||||
@Parameter(example = "69c4e56c-e0bf-4742-9225-bba9aae39052") @PathVariable UUID uuid)
|
||||
@GetMapping("/download/{uuid}")
|
||||
public ResponseEntity<?> download(@PathVariable UUID uuid, HttpServletRequest request)
|
||||
throws IOException {
|
||||
|
||||
String path;
|
||||
@@ -374,8 +373,8 @@ public class InferenceResultApiController {
|
||||
|
||||
Path zipPath = Path.of(path);
|
||||
|
||||
return downloadExecutor.stream(
|
||||
new DownloadSpec(uuid, zipPath, uid + ".zip", MediaType.APPLICATION_OCTET_STREAM));
|
||||
// Range + 200/206/416 공통 처리 (추가 헤더 포함)
|
||||
return rangeDownloadResponder.buildZipResponse(zipPath, uid + ".zip", request);
|
||||
}
|
||||
|
||||
@Operation(summary = "shp 파일 다운로드 이력 조회", description = "추론관리 분석결과 shp 파일 다운로드 이력 조회")
|
||||
@@ -415,7 +414,7 @@ public class InferenceResultApiController {
|
||||
downloadReq.setStartDate(strtDttm);
|
||||
downloadReq.setEndDate(endDttm);
|
||||
downloadReq.setSearchValue(searchValue);
|
||||
downloadReq.setRequestUri("/api/inference/download-audit/download/" + uuid);
|
||||
downloadReq.setRequestUri("/api/inference/download/" + uuid);
|
||||
|
||||
return ApiResponseDto.ok(inferenceResultService.getDownloadAudit(searchReq, downloadReq));
|
||||
}
|
||||
|
||||
@@ -246,15 +246,15 @@ public class InferenceResultDto {
|
||||
@NotBlank
|
||||
private String title;
|
||||
|
||||
@Schema(description = "M1", example = "b40e0f68-c1d8-49fc-93f9-a36270093861")
|
||||
@Schema(description = "G1", example = "643adead-f3d2-4f10-9037-862bee919399")
|
||||
@NotNull
|
||||
private UUID model1Uuid;
|
||||
|
||||
@Schema(description = "M2", example = "ec92b7d2-b5a3-4915-9bdf-35fb3ca8ad27")
|
||||
@Schema(description = "G2", example = "dd86b4ef-28e3-4e3d-9ee4-f60d9cb54e13")
|
||||
@NotNull
|
||||
private UUID model2Uuid;
|
||||
|
||||
@Schema(description = "M3", example = "37f45782-8ccf-4cf6-911c-a055a1510d39")
|
||||
@Schema(description = "G3", example = "58c1153e-dec6-4424-82a1-189083a9d9dc")
|
||||
@NotNull
|
||||
private UUID model3Uuid;
|
||||
|
||||
@@ -297,6 +297,30 @@ public class InferenceResultDto {
|
||||
@Schema(name = "InferenceStatusDetailDto", description = "추론(변화탐지) 진행상태")
|
||||
public static class InferenceStatusDetailDto {
|
||||
|
||||
@Schema(description = "모델1 사용시간 시작일시")
|
||||
@JsonFormatDttm
|
||||
ZonedDateTime m1ModelStartDttm;
|
||||
|
||||
@Schema(description = "모델2 사용시간 시작일시")
|
||||
@JsonFormatDttm
|
||||
ZonedDateTime m2ModelStartDttm;
|
||||
|
||||
@Schema(description = "모델3 사용시간 시작일시")
|
||||
@JsonFormatDttm
|
||||
ZonedDateTime m3ModelStartDttm;
|
||||
|
||||
@Schema(description = "모델1 사용시간 종료일시")
|
||||
@JsonFormatDttm
|
||||
ZonedDateTime m1ModelEndDttm;
|
||||
|
||||
@Schema(description = "모델2 사용시간 종료일시")
|
||||
@JsonFormatDttm
|
||||
ZonedDateTime m2ModelEndDttm;
|
||||
|
||||
@Schema(description = "모델3 사용시간 종료일시")
|
||||
@JsonFormatDttm
|
||||
ZonedDateTime m3ModelEndDttm;
|
||||
|
||||
@Schema(description = "탐지대상 도엽수")
|
||||
private Long detectingCnt;
|
||||
|
||||
@@ -336,30 +360,6 @@ public class InferenceResultDto {
|
||||
@Schema(description = "모델3 분석 실패")
|
||||
private Integer m3FailedJobs;
|
||||
|
||||
@Schema(description = "모델1 사용시간 시작일시")
|
||||
@JsonFormatDttm
|
||||
ZonedDateTime m1ModelStartDttm;
|
||||
|
||||
@Schema(description = "모델2 사용시간 시작일시")
|
||||
@JsonFormatDttm
|
||||
ZonedDateTime m2ModelStartDttm;
|
||||
|
||||
@Schema(description = "모델3 사용시간 시작일시")
|
||||
@JsonFormatDttm
|
||||
ZonedDateTime m3ModelStartDttm;
|
||||
|
||||
@Schema(description = "모델1 사용시간 종료일시")
|
||||
@JsonFormatDttm
|
||||
ZonedDateTime m1ModelEndDttm;
|
||||
|
||||
@Schema(description = "모델2 사용시간 종료일시")
|
||||
@JsonFormatDttm
|
||||
ZonedDateTime m2ModelEndDttm;
|
||||
|
||||
@Schema(description = "모델3 사용시간 종료일시")
|
||||
@JsonFormatDttm
|
||||
ZonedDateTime m3ModelEndDttm;
|
||||
|
||||
@Schema(description = "변화탐지 제목")
|
||||
private String title;
|
||||
|
||||
@@ -496,19 +496,19 @@ public class InferenceResultDto {
|
||||
return MapSheetScope.getDescByCode(this.mapSheetScope);
|
||||
}
|
||||
|
||||
@Schema(description = "M1 사용시간")
|
||||
@Schema(description = "G1 사용시간")
|
||||
@JsonProperty("m1ElapsedTim")
|
||||
public String getM1ElapsedTime() {
|
||||
return formatElapsedTime(this.m1ModelStartDttm, this.m1ModelEndDttm);
|
||||
}
|
||||
|
||||
@Schema(description = "M2 사용시간")
|
||||
@Schema(description = "G2 사용시간")
|
||||
@JsonProperty("m2ElapsedTim")
|
||||
public String getM2ElapsedTime() {
|
||||
return formatElapsedTime(this.m2ModelStartDttm, this.m2ModelEndDttm);
|
||||
}
|
||||
|
||||
@Schema(description = "M3 사용시간")
|
||||
@Schema(description = "G3 사용시간")
|
||||
@JsonProperty("m3ElapsedTim")
|
||||
public String getM3ElapsedTime() {
|
||||
return formatElapsedTime(this.m3ModelStartDttm, this.m3ModelEndDttm);
|
||||
@@ -676,4 +676,13 @@ public class InferenceResultDto {
|
||||
private Long m2ModelBatchId;
|
||||
private Long m3ModelBatchId;
|
||||
}
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class MapSheetFallbackYearDto {
|
||||
private String mapSheetNum;
|
||||
private Integer mngYyyy;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
package com.kamco.cd.kamcoback.inference.dto;
|
||||
|
||||
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity;
|
||||
import java.time.ZonedDateTime;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.Setter;
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
|
||||
public class InferenceResultsTestingDto {
|
||||
|
||||
@@ -22,4 +24,31 @@ public class InferenceResultsTestingDto {
|
||||
return new ShpDto(e.getBatchId(), e.getUid(), e.getMapId());
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@AllArgsConstructor
|
||||
@NoArgsConstructor
|
||||
public static class Basic {
|
||||
private Double probability;
|
||||
private Long beforeYear;
|
||||
private Long afterYear;
|
||||
private String mapId;
|
||||
private String modelVersion;
|
||||
private String clsModelPath;
|
||||
private String clsModelVersion;
|
||||
private String cdModelType;
|
||||
private Long id;
|
||||
private String modelName;
|
||||
private Long batchId;
|
||||
private Double area;
|
||||
private String beforeC;
|
||||
private Double beforeP;
|
||||
private String afterC;
|
||||
private Double afterP;
|
||||
private Long seq;
|
||||
private ZonedDateTime createdDate;
|
||||
private String uid;
|
||||
private Geometry geometry;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,8 +5,10 @@ import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.Setter;
|
||||
import lombok.ToString;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
/** AI API 추론 실행 DTO */
|
||||
@Slf4j
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
package com.kamco.cd.kamcoback.inference.service;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
@RestController
|
||||
@RequiredArgsConstructor
|
||||
@RequestMapping("/api/inference/manual")
|
||||
public class InferenceManualApiController {}
|
||||
@@ -0,0 +1,24 @@
|
||||
package com.kamco.cd.kamcoback.inference.service;
|
||||
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
|
||||
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
|
||||
import java.util.List;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class InferenceManualService {
|
||||
private final InferenceResultCoreService inferenceResultCoreService;
|
||||
|
||||
public void getResultsTesting(List<Long> batchIds) {
|
||||
List<InferenceResultsTestingDto.Basic> resultList =
|
||||
inferenceResultCoreService.getInferenceResults(batchIds);
|
||||
|
||||
if (resultList.isEmpty()) {}
|
||||
|
||||
for (InferenceResultsTestingDto.Basic result : resultList) {
|
||||
System.out.println(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -21,6 +21,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.DetectOption;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceLearnDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetFallbackYearDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetNumDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
|
||||
@@ -45,6 +46,7 @@ import java.time.ZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
@@ -63,6 +65,7 @@ import org.springframework.http.MediaType;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
/** 추론 관리 */
|
||||
@Service
|
||||
@Log4j2
|
||||
@RequiredArgsConstructor
|
||||
@@ -116,6 +119,273 @@ public class InferenceResultService {
|
||||
return dto.getUuid();
|
||||
}
|
||||
|
||||
/**
|
||||
* 추론 실행 - 추론제외, 이전년도 도엽 사용 분기
|
||||
*
|
||||
* @param req
|
||||
* @return
|
||||
*/
|
||||
@Transactional
|
||||
public UUID run(InferenceResultDto.RegReq req) {
|
||||
if (req.getDetectOption().equals(DetectOption.EXCL.getId())) {
|
||||
return runExcl(req);
|
||||
}
|
||||
return runPrev(req);
|
||||
}
|
||||
|
||||
/**
|
||||
* 변화탐지 옵션 추론제외 실행
|
||||
*
|
||||
* @param req
|
||||
* @return
|
||||
*/
|
||||
public UUID runExcl(InferenceResultDto.RegReq req) {
|
||||
// target 도엽 조회
|
||||
List<MngListDto> targetDtoList = mapSheetMngCoreService.getHstMapSheetList(req);
|
||||
|
||||
// target 리스트 추출 (null 제거 + 중복 제거)
|
||||
List<String> targetList =
|
||||
targetDtoList.stream()
|
||||
.map(MngListDto::getMapSheetNum)
|
||||
.filter(Objects::nonNull)
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
// compare 도엽번호 리스트 조회 (null 제거 + 중복 제거)
|
||||
List<String> compareList =
|
||||
mapSheetMngCoreService.getMapSheetNumByHst(req.getCompareYyyy()).stream()
|
||||
.filter(Objects::nonNull)
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
// compare 기준
|
||||
Set<String> compareSet = new HashSet<>(compareList);
|
||||
|
||||
// target 기준으로 compare에 존재하는 도엽만 필터링
|
||||
List<String> filteredTargetList = targetList.stream().filter(compareSet::contains).toList();
|
||||
|
||||
// 도엽 비교 로그 출력
|
||||
logYearComparison(targetList, compareList, filteredTargetList);
|
||||
|
||||
// compare geojson 파일 생성
|
||||
Scene compareScene =
|
||||
getSceneInference(
|
||||
req.getCompareYyyy().toString(), // 기준년도
|
||||
filteredTargetList, // 교집합 도엽
|
||||
req.getMapSheetScope(), // ALL / 부분
|
||||
req.getDetectOption()); // EXCL / PREV
|
||||
|
||||
// target geojson 파일 생성
|
||||
Scene targetScene =
|
||||
getSceneInference(
|
||||
req.getTargetYyyy().toString(), // 대상년도
|
||||
filteredTargetList, // 교집합 도엽
|
||||
req.getMapSheetScope(),
|
||||
req.getDetectOption());
|
||||
|
||||
// 추론 실행
|
||||
return executeInference(
|
||||
req,
|
||||
targetDtoList, // 전체 target 목록
|
||||
filteredTargetList, // 최종 추론 대상
|
||||
compareScene, // compare geojson
|
||||
targetScene // target geojson
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* 변화탐지 옵션 이전 년도 도엽 사용 실행
|
||||
*
|
||||
* @param req
|
||||
* @return
|
||||
*/
|
||||
@Transactional
|
||||
public UUID runPrev(InferenceResultDto.RegReq req) {
|
||||
// target 목록 조회
|
||||
List<MngListDto> targetDtoList = mapSheetMngCoreService.getHstMapSheetList(req);
|
||||
|
||||
// target 도엽번호 리스트 추출 중복 제거
|
||||
List<String> targetList =
|
||||
targetDtoList.stream()
|
||||
.map(MngListDto::getMapSheetNum)
|
||||
.filter(Objects::nonNull)
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
// compare 목록 조회
|
||||
List<MapSheetFallbackYearDto> compareDtoList =
|
||||
new ArrayList<>(mapSheetMngCoreService.getMapSheetNumDtoByHst(req.getCompareYyyy()));
|
||||
|
||||
// compare 도엽번호 Set 구성
|
||||
Set<String> compareSet =
|
||||
compareDtoList.stream()
|
||||
.map(MapSheetFallbackYearDto::getMapSheetNum)
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
// target에는 있으나 compare에는 없는 도엽 추출
|
||||
List<String> remainingTargetList =
|
||||
targetList.stream().filter(s -> !compareSet.contains(s)).toList();
|
||||
|
||||
// compare에 없을때 이전 년도 사용 가능여부 조회
|
||||
List<MapSheetFallbackYearDto> fallbackYearDtoList =
|
||||
mapSheetMngCoreService.findFallbackCompareYearByMapSheets(
|
||||
req.getTargetYyyy(), // 대상년도 기준
|
||||
remainingTargetList // compare에 없는 도엽들
|
||||
);
|
||||
|
||||
// 기존 compare , 사용가능 이전년도 정보 합치기
|
||||
compareDtoList.addAll(fallbackYearDtoList);
|
||||
|
||||
// 중복제거하여 사용할 compare 도엽 목록
|
||||
Set<String> availableCompareSheets =
|
||||
compareDtoList.stream()
|
||||
.map(MapSheetFallbackYearDto::getMapSheetNum)
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
// 최종 추론 대상 도엽
|
||||
// target 기준으로 compare 에 존재하는 도엽만 추출
|
||||
List<String> filteredTargetList =
|
||||
targetList.stream().filter(availableCompareSheets::contains).toList();
|
||||
|
||||
// compareDtoList도 최종 기준으로 필터
|
||||
Set<String> filteredTargetSet = new HashSet<>(filteredTargetList);
|
||||
|
||||
List<MapSheetFallbackYearDto> filteredCompareDtoList =
|
||||
compareDtoList.stream()
|
||||
.filter(d -> d.getMapSheetNum() != null)
|
||||
.filter(d -> filteredTargetSet.contains(d.getMapSheetNum()))
|
||||
.toList();
|
||||
|
||||
// compare only 계산 (target에는 없는 compare 도엽 수) log 용
|
||||
long compareOnlyCount =
|
||||
compareDtoList.stream()
|
||||
.map(MapSheetFallbackYearDto::getMapSheetNum)
|
||||
.filter(s -> s != null && !targetList.contains(s))
|
||||
.count();
|
||||
|
||||
// 연도별 도엽 비교 로그 출력
|
||||
log.info(
|
||||
"""
|
||||
===== MapSheet Year Comparison =====
|
||||
target Total: {}
|
||||
compare Total: {}
|
||||
Intersection: {}
|
||||
target Only (Excluded): {}
|
||||
compare Only: {}
|
||||
====================================
|
||||
""",
|
||||
targetList.size(), // target count
|
||||
compareDtoList.size(), // compare count
|
||||
filteredTargetList.size(), // target 기준으로 compare 비교하여 최종 추론할 도엽 count
|
||||
targetList.size() - filteredTargetList.size(), // compare에 존재하지 않는 target 도엽 수
|
||||
compareOnlyCount); // target 에 존재하지 않는 compare 도엽수
|
||||
|
||||
// compare 기준 geojson 생성 (년도 fallback 반영)
|
||||
Scene compareScene =
|
||||
getSceneInference(
|
||||
filteredCompareDtoList,
|
||||
req.getCompareYyyy().toString(),
|
||||
req.getMapSheetScope(),
|
||||
req.getDetectOption());
|
||||
|
||||
// target 기준 geojson 생성
|
||||
Scene targetScene =
|
||||
getSceneInference(
|
||||
req.getTargetYyyy().toString(),
|
||||
filteredTargetList,
|
||||
req.getMapSheetScope(),
|
||||
req.getDetectOption());
|
||||
|
||||
// AI 추론 실행
|
||||
return executeInference(req, targetDtoList, filteredTargetList, compareScene, targetScene);
|
||||
}
|
||||
|
||||
/**
|
||||
* learn 테이블 저장 및 AI 추론 API 호출
|
||||
*
|
||||
* @param req
|
||||
* @param targetDtoList
|
||||
* @param filteredTargetList
|
||||
* @param modelComparePath
|
||||
* @param modelTargetPath
|
||||
* @return
|
||||
*/
|
||||
private UUID executeInference(
|
||||
InferenceResultDto.RegReq req,
|
||||
List<MngListDto> targetDtoList,
|
||||
List<String> filteredTargetList,
|
||||
Scene modelComparePath,
|
||||
Scene modelTargetPath) {
|
||||
Set<String> filteredSet = new HashSet<>(filteredTargetList);
|
||||
|
||||
List<MngListDto> newTargetList =
|
||||
targetDtoList.stream()
|
||||
.filter(m -> m.getMapSheetNum() != null)
|
||||
.filter(m -> filteredSet.contains(m.getMapSheetNum()))
|
||||
.toList();
|
||||
|
||||
UUID uuid = inferenceResultCoreService.saveInferenceInfo(req, newTargetList);
|
||||
|
||||
pred_requests_areas predRequestsAreas = new pred_requests_areas();
|
||||
predRequestsAreas.setInput1_year(req.getCompareYyyy());
|
||||
predRequestsAreas.setInput2_year(req.getTargetYyyy());
|
||||
predRequestsAreas.setInput1_scene_path(modelComparePath.getFilePath());
|
||||
predRequestsAreas.setInput2_scene_path(modelTargetPath.getFilePath());
|
||||
|
||||
InferenceSendDto m1 = this.getModelInfo(req.getModel1Uuid());
|
||||
m1.setPred_requests_areas(predRequestsAreas);
|
||||
|
||||
log.info("[INFERENCE] Start m1 = {}", m1);
|
||||
|
||||
Long batchId = ensureAccepted(m1);
|
||||
|
||||
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
|
||||
saveInferenceAiDto.setUuid(uuid);
|
||||
saveInferenceAiDto.setBatchId(batchId);
|
||||
saveInferenceAiDto.setStatus(Status.IN_PROGRESS.getId());
|
||||
saveInferenceAiDto.setType(ModelType.G1.getId());
|
||||
saveInferenceAiDto.setInferStartDttm(ZonedDateTime.now());
|
||||
saveInferenceAiDto.setModelComparePath(modelComparePath.getFilePath());
|
||||
saveInferenceAiDto.setModelTargetPath(modelTargetPath.getFilePath());
|
||||
saveInferenceAiDto.setModelStartDttm(ZonedDateTime.now());
|
||||
|
||||
inferenceResultCoreService.update(saveInferenceAiDto);
|
||||
|
||||
return uuid;
|
||||
}
|
||||
|
||||
/**
|
||||
* EXCL 로그
|
||||
*
|
||||
* @param targetList
|
||||
* @param compareList
|
||||
* @param filteredTargetList
|
||||
*/
|
||||
private void logYearComparison(
|
||||
List<String> targetList, List<String> compareList, List<String> filteredTargetList) {
|
||||
Set<String> targetSet = new HashSet<>(targetList);
|
||||
|
||||
long compareOnlyCount = compareList.stream().filter(s -> !targetSet.contains(s)).count();
|
||||
|
||||
log.info(
|
||||
"""
|
||||
===== MapSheet Year Comparison =====
|
||||
target Total: {}
|
||||
compare Total: {}
|
||||
Intersection: {}
|
||||
target Only (Excluded): {}
|
||||
compare Only: {}
|
||||
====================================
|
||||
""",
|
||||
targetList.size(), // target count
|
||||
compareList.size(), // compare count
|
||||
filteredTargetList.size(), // target 기준으로 compare 비교하여 최종 추론할 도엽 count
|
||||
targetList.size() - filteredTargetList.size(), // compare에 존재하지 않는 target 도엽 수
|
||||
compareOnlyCount); // target 에 존재하지 않는 compare 도엽수
|
||||
}
|
||||
|
||||
/**
|
||||
* 변화탐지 실행 정보 생성
|
||||
*
|
||||
@@ -238,6 +508,8 @@ public class InferenceResultService {
|
||||
predRequestsAreas.setInput2_scene_path(modelTargetPath.getFilePath());
|
||||
|
||||
InferenceSendDto m1 = this.getModelInfo(req.getModel1Uuid());
|
||||
|
||||
log.info("[INFERENCE] Start m1 = {}", m1);
|
||||
m1.setPred_requests_areas(predRequestsAreas);
|
||||
|
||||
// ai 추론 실행 api 호출
|
||||
@@ -248,7 +520,7 @@ public class InferenceResultService {
|
||||
saveInferenceAiDto.setUuid(uuid);
|
||||
saveInferenceAiDto.setBatchId(batchId);
|
||||
saveInferenceAiDto.setStatus(Status.IN_PROGRESS.getId());
|
||||
saveInferenceAiDto.setType("M1");
|
||||
saveInferenceAiDto.setType(ModelType.G1.getId());
|
||||
saveInferenceAiDto.setInferStartDttm(ZonedDateTime.now());
|
||||
saveInferenceAiDto.setModelComparePath(modelComparePath.getFilePath());
|
||||
saveInferenceAiDto.setModelTargetPath(modelTargetPath.getFilePath());
|
||||
@@ -321,10 +593,11 @@ public class InferenceResultService {
|
||||
}
|
||||
|
||||
/**
|
||||
* 추론 AI API 호출
|
||||
* 추론 AI API 호출 batch id를 리턴
|
||||
*
|
||||
* @param dto
|
||||
*/
|
||||
// 같은함수가 왜 두개지
|
||||
private Long ensureAccepted(InferenceSendDto dto) {
|
||||
|
||||
if (dto == null) {
|
||||
@@ -332,6 +605,14 @@ public class InferenceResultService {
|
||||
throw new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
|
||||
// [중복]운영환경일때 경로수정 dean 260226
|
||||
if (profile != null && profile.equals("prod")) {
|
||||
log.info("========================================================");
|
||||
log.info("[CHANGE INFERENCE] profile = {} Inforence req", profile);
|
||||
log.info("========================================================");
|
||||
log.info("");
|
||||
}
|
||||
|
||||
// 1) 요청 로그
|
||||
try {
|
||||
log.debug("Inference request dto={}", objectMapper.writeValueAsString(dto));
|
||||
@@ -340,19 +621,22 @@ public class InferenceResultService {
|
||||
}
|
||||
|
||||
// 2) local 환경 임시 처리
|
||||
if ("local".equals(profile)) {
|
||||
if (dto.getPred_requests_areas() == null) {
|
||||
throw new IllegalStateException("pred_requests_areas is null");
|
||||
}
|
||||
dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson");
|
||||
dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson");
|
||||
}
|
||||
// if ("local".equals(profile)) {
|
||||
// if (dto.getPred_requests_areas() == null) {
|
||||
// throw new IllegalStateException("pred_requests_areas is null");
|
||||
// }
|
||||
//
|
||||
// dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson");
|
||||
//
|
||||
// dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson");
|
||||
// }
|
||||
|
||||
// 3) HTTP 호출
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
headers.setContentType(MediaType.APPLICATION_JSON);
|
||||
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
|
||||
|
||||
// 추론 실행 API 호출
|
||||
ExternalCallResult<String> result =
|
||||
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
|
||||
|
||||
@@ -367,12 +651,14 @@ public class InferenceResultService {
|
||||
objectMapper.readValue(result.body(), new TypeReference<>() {});
|
||||
|
||||
if (list.isEmpty()) {
|
||||
throw new IllegalStateException("Inference response is empty");
|
||||
throw new CustomApiException(
|
||||
"NOT_FOUND", HttpStatus.NOT_FOUND, "Inference response is empty");
|
||||
}
|
||||
|
||||
Object batchIdObj = list.get(0).get("batch_id");
|
||||
if (batchIdObj == null) {
|
||||
throw new IllegalStateException("batch_id not found in response");
|
||||
throw new CustomApiException(
|
||||
"NOT_FOUND", HttpStatus.NOT_FOUND, "batch_id not found in response");
|
||||
}
|
||||
|
||||
return Long.valueOf(batchIdObj.toString());
|
||||
@@ -391,6 +677,7 @@ public class InferenceResultService {
|
||||
*/
|
||||
private InferenceSendDto getModelInfo(UUID uuid) {
|
||||
|
||||
// 모델정보 조회
|
||||
Basic modelInfo = modelMngCoreService.findByModelUuid(uuid);
|
||||
|
||||
String cdModelPath = "";
|
||||
@@ -414,12 +701,12 @@ public class InferenceResultService {
|
||||
|
||||
String modelType = "";
|
||||
|
||||
if (modelInfo.getModelType().equals(ModelType.M1.getId())) {
|
||||
modelType = "G1";
|
||||
} else if (modelInfo.getModelType().equals(ModelType.M2.getId())) {
|
||||
modelType = "G2";
|
||||
if (modelInfo.getModelType().equals(ModelType.G1.getId())) {
|
||||
modelType = ModelType.G1.getId();
|
||||
} else if (modelInfo.getModelType().equals(ModelType.G2.getId())) {
|
||||
modelType = ModelType.G2.getId();
|
||||
} else {
|
||||
modelType = "G3";
|
||||
modelType = ModelType.G3.getId();
|
||||
}
|
||||
|
||||
InferenceSendDto sendDto = new InferenceSendDto();
|
||||
@@ -429,7 +716,8 @@ public class InferenceResultService {
|
||||
sendDto.setCls_model_path(cdClsModelPath);
|
||||
sendDto.setCls_model_version(modelInfo.getModelVer());
|
||||
sendDto.setCd_model_type(modelType);
|
||||
sendDto.setPriority(modelInfo.getPriority());
|
||||
sendDto.setPriority(5d);
|
||||
log.info("[Inference Send]SendDto={}", sendDto);
|
||||
return sendDto;
|
||||
}
|
||||
|
||||
@@ -447,6 +735,23 @@ public class InferenceResultService {
|
||||
yyyy, mapSheetNums, mapSheetScope, detectOption);
|
||||
}
|
||||
|
||||
/**
|
||||
* 년도 별로 조회하여 geojson 파일 생성
|
||||
*
|
||||
* @param yearDtos
|
||||
* @param year
|
||||
* @param mapSheetScope
|
||||
* @param detectOption
|
||||
* @return
|
||||
*/
|
||||
private Scene getSceneInference(
|
||||
List<MapSheetFallbackYearDto> yearDtos,
|
||||
String year,
|
||||
String mapSheetScope,
|
||||
String detectOption) {
|
||||
return mapSheetMngCoreService.getSceneInference(yearDtos, year, mapSheetScope, detectOption);
|
||||
}
|
||||
|
||||
/**
|
||||
* 분석결과 요약정보
|
||||
*
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package com.kamco.cd.kamcoback.label;
|
||||
|
||||
import com.kamco.cd.kamcoback.common.download.DownloadExecutor;
|
||||
import com.kamco.cd.kamcoback.common.download.dto.DownloadSpec;
|
||||
import com.kamco.cd.kamcoback.common.download.RangeDownloadResponder;
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InferenceDetail;
|
||||
@@ -23,10 +22,13 @@ import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponses;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.validation.Valid;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
@@ -34,7 +36,6 @@ import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.coyote.BadRequestException;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
@@ -43,7 +44,6 @@ import org.springframework.web.bind.annotation.RequestBody;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody;
|
||||
|
||||
@Slf4j
|
||||
@Tag(name = "라벨링 작업 관리", description = "라벨링 작업 배정 및 통계 조회 API")
|
||||
@@ -53,7 +53,7 @@ import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBo
|
||||
public class LabelAllocateApiController {
|
||||
|
||||
private final LabelAllocateService labelAllocateService;
|
||||
private final DownloadExecutor downloadExecutor;
|
||||
private final RangeDownloadResponder rangeDownloadResponder;
|
||||
|
||||
@Value("${file.dataset-response}")
|
||||
private String responsePath;
|
||||
@@ -382,19 +382,17 @@ public class LabelAllocateApiController {
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@GetMapping("/download/{uuid}")
|
||||
public ResponseEntity<StreamingResponseBody> download(
|
||||
@Parameter(example = "6d8d49dc-0c9d-4124-adc7-b9ca610cc394") @PathVariable UUID uuid)
|
||||
public ResponseEntity<?> download(@PathVariable UUID uuid, HttpServletRequest request)
|
||||
throws IOException {
|
||||
|
||||
if (!labelAllocateService.isDownloadable(uuid)) {
|
||||
throw new BadRequestException();
|
||||
}
|
||||
|
||||
String uid = labelAllocateService.findLearnUid(uuid);
|
||||
Path zipPath = Paths.get(responsePath).resolve(uid + ".zip");
|
||||
|
||||
return downloadExecutor.stream(
|
||||
new DownloadSpec(uuid, zipPath, uid + ".zip", MediaType.APPLICATION_OCTET_STREAM));
|
||||
if (!Files.isRegularFile(zipPath)) {
|
||||
throw new BadRequestException();
|
||||
}
|
||||
|
||||
return rangeDownloadResponder.buildZipResponse(zipPath, uid + ".zip", request);
|
||||
}
|
||||
|
||||
@Operation(summary = "라벨 파일 다운로드 이력 조회", description = "라벨 파일 다운로드 이력 조회")
|
||||
@@ -461,4 +459,43 @@ public class LabelAllocateApiController {
|
||||
UUID uuid) {
|
||||
return ApiResponseDto.ok(labelAllocateService.isDownloadable(uuid));
|
||||
}
|
||||
|
||||
@Operation(
|
||||
summary = "라벨링작업 관리 > 추가 작업 배정(실태조사 추가되면)",
|
||||
description = "라벨링작업 관리 > 추가 작업 배정(실태조사 추가되면)")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "201",
|
||||
description = "등록 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema = @Schema(implementation = Long.class))),
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
|
||||
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@PostMapping("/allocate-add-stblt")
|
||||
public ApiResponseDto<ApiResponseDto.ResponseObj> labelAllocateAddStblt(
|
||||
@RequestBody @Valid LabelAllocateDto.AllocateAddStbltDto dto) {
|
||||
|
||||
return ApiResponseDto.okObject(
|
||||
labelAllocateService.allocateAddStbltYn(
|
||||
dto.getTotalCnt(), dto.getUuid(), dto.getLabelers(), dto.getBaseDate()));
|
||||
}
|
||||
|
||||
@Operation(summary = "라벨링 추가 할당 가능한 건수", description = "라벨링 추가 할당 가능한 건수 API")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(responseCode = "200", description = "조회 성공"),
|
||||
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음"),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류")
|
||||
})
|
||||
@GetMapping("/allocate-add-cnt")
|
||||
public ApiResponseDto<Long> allocateAddCnt(
|
||||
@RequestParam UUID uuid, @RequestParam LocalDate baseDate) {
|
||||
|
||||
return ApiResponseDto.ok(labelAllocateService.findAllocateAddCnt(uuid, baseDate));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package com.kamco.cd.kamcoback.label.dto;
|
||||
import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose;
|
||||
import com.kamco.cd.kamcoback.common.utils.enums.EnumType;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
@@ -365,9 +366,35 @@ public class LabelAllocateDto {
|
||||
@AllArgsConstructor
|
||||
@NoArgsConstructor
|
||||
public static class InferenceLearnDto {
|
||||
|
||||
private UUID analUuid;
|
||||
private String learnUid;
|
||||
private String analState;
|
||||
private Long analId;
|
||||
}
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@AllArgsConstructor
|
||||
public static class AllocateAddStbltDto {
|
||||
|
||||
@Schema(description = "총 잔여 건수", example = "179")
|
||||
private Integer totalCnt;
|
||||
|
||||
@Schema(
|
||||
description = "추가할당할 라벨러",
|
||||
example =
|
||||
"""
|
||||
[
|
||||
"123454", "654321", "222233", "777222"
|
||||
]
|
||||
""")
|
||||
private List<String> labelers;
|
||||
|
||||
@Schema(description = "회차 마스터 key", example = "c0e77cc7-8c28-46ba-9ca4-11e90246ab44")
|
||||
private UUID uuid;
|
||||
|
||||
@Schema(description = "기준일자", example = "2026-02-20")
|
||||
private LocalDate baseDate;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import com.kamco.cd.kamcoback.log.dto.AuditLogDto;
|
||||
import com.kamco.cd.kamcoback.log.dto.AuditLogDto.DownloadReq;
|
||||
import com.kamco.cd.kamcoback.postgres.core.AuditLogCoreService;
|
||||
import com.kamco.cd.kamcoback.postgres.core.LabelAllocateCoreService;
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.UUID;
|
||||
@@ -27,11 +28,12 @@ import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@Transactional
|
||||
@Transactional(readOnly = true)
|
||||
@RequiredArgsConstructor
|
||||
public class LabelAllocateService {
|
||||
|
||||
@@ -276,6 +278,7 @@ public class LabelAllocateService {
|
||||
return labelAllocateCoreService.findLabelingIngProcessCnt();
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.NOT_SUPPORTED)
|
||||
public String findLearnUid(UUID uuid) {
|
||||
return labelAllocateCoreService.findLearnUid(uuid);
|
||||
}
|
||||
@@ -300,4 +303,52 @@ public class LabelAllocateService {
|
||||
public boolean isDownloadable(UUID uuid) {
|
||||
return labelAllocateCoreService.isDownloadable(uuid);
|
||||
}
|
||||
|
||||
/**
|
||||
* 실태조사가 값 들어온 기간만큼 할당하는 로직 (최초 할당 이후 작업)
|
||||
*
|
||||
* @param uuid
|
||||
* @param targetUsers
|
||||
* @return
|
||||
*/
|
||||
@Transactional
|
||||
public ApiResponseDto.ResponseObj allocateAddStbltYn(
|
||||
Integer totalCnt, UUID uuid, List<String> targetUsers, LocalDate baseDate) {
|
||||
|
||||
int userCount = targetUsers.size();
|
||||
if (userCount == 0) {
|
||||
return new ApiResponseDto.ResponseObj(ApiResponseCode.BAD_REQUEST, "추가 할당할 라벨러를 선택해주세요.");
|
||||
}
|
||||
|
||||
int base = totalCnt / userCount;
|
||||
int remainder = totalCnt % userCount;
|
||||
Long lastId = null;
|
||||
List<AllocateInfoDto> allIds =
|
||||
labelAllocateCoreService.fetchNextIdsAddStbltYn(
|
||||
uuid, baseDate, lastId, totalCnt.longValue());
|
||||
|
||||
// MapSheetAnalInferenceEntity analUid 가져오기
|
||||
Long analUid = labelAllocateCoreService.findMapSheetAnalInferenceUid(uuid);
|
||||
|
||||
int index = 0;
|
||||
for (int i = 0; i < userCount; i++) {
|
||||
int assignCount = base;
|
||||
// 마지막 사람에게 나머지 몰아주기
|
||||
if (i == userCount - 1) {
|
||||
assignCount += remainder;
|
||||
}
|
||||
|
||||
int end = index + assignCount;
|
||||
List<AllocateInfoDto> sub = allIds.subList(index, end);
|
||||
|
||||
labelAllocateCoreService.assignOwner(sub, targetUsers.get(i), analUid);
|
||||
index = end;
|
||||
}
|
||||
|
||||
return new ApiResponseDto.ResponseObj(ApiResponseCode.OK, "추가 할당이 완료되었습니다.");
|
||||
}
|
||||
|
||||
public Long findAllocateAddCnt(UUID uuid, LocalDate baseDate) {
|
||||
return labelAllocateCoreService.findAllocateAddCnt(uuid, baseDate);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,6 +47,9 @@ public class MapSheetMngService {
|
||||
private final UploadService uploadService;
|
||||
private final UserUtil userUtil = new UserUtil();
|
||||
|
||||
@Value("${file.root}")
|
||||
private String nfsRootDir;
|
||||
|
||||
@Value("${file.sync-root-dir}")
|
||||
private String syncRootDir;
|
||||
|
||||
@@ -111,7 +114,6 @@ public class MapSheetMngService {
|
||||
public DmlReturn uploadPair(
|
||||
MultipartFile tfwFile, String tifFile, Long hstUid, Long tifFileSize) {
|
||||
|
||||
String rootPath = syncRootDir;
|
||||
String tmpPath = syncTmpDir;
|
||||
|
||||
DmlReturn dmlReturn = new DmlReturn("success", "UPLOAD COMPLETE");
|
||||
@@ -133,8 +135,8 @@ public class MapSheetMngService {
|
||||
return dmlReturn;
|
||||
}
|
||||
|
||||
// TODO 삭제?
|
||||
MngDto mngDto = mapSheetMngCoreService.findMapSheetMng(errDto.getMngYyyy());
|
||||
String targetYearDir = mngDto.getMngPath();
|
||||
|
||||
// 중복체크 -> 도엽50k/uuid 경로에 업로드 할 거라 overwrite 되지 않음
|
||||
// if (!overwrite) {
|
||||
@@ -337,12 +339,11 @@ public class MapSheetMngService {
|
||||
|
||||
public FoldersDto getFolderAll(SrchFoldersDto srchDto) {
|
||||
|
||||
Path startPath = Paths.get(syncRootDir + srchDto.getDirPath());
|
||||
String dirPath = syncRootDir + srchDto.getDirPath();
|
||||
String sortType = "name desc";
|
||||
|
||||
log.info("[FIND_FOLDER] DIR : {}", dirPath);
|
||||
List<FIleChecker.Folder> folderList =
|
||||
FIleChecker.getFolderAll(dirPath).stream()
|
||||
FIleChecker.getFolderAll(dirPath, nfsRootDir).stream()
|
||||
.filter(dir -> dir.getIsValid().equals(true))
|
||||
.toList();
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ import com.kamco.cd.kamcoback.members.dto.MembersDto;
|
||||
import com.kamco.cd.kamcoback.members.dto.MembersDto.Basic;
|
||||
import com.kamco.cd.kamcoback.members.service.AdminService;
|
||||
import com.kamco.cd.kamcoback.members.service.MembersService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.MemberInactiveJobService;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
@@ -35,7 +34,6 @@ public class MembersApiController {
|
||||
|
||||
private final MembersService membersService;
|
||||
private final AdminService adminService;
|
||||
private final MemberInactiveJobService memberInactiveJobService;
|
||||
|
||||
@Operation(summary = "회원정보 목록", description = "회원정보 조회")
|
||||
@ApiResponses(
|
||||
@@ -159,13 +157,4 @@ public class MembersApiController {
|
||||
String employeeNo) {
|
||||
return ApiResponseDto.ok(adminService.existsByEmployeeNo(employeeNo));
|
||||
}
|
||||
|
||||
@Operation(
|
||||
summary = "라벨러/검수자 최종로그인 28일 경과 이후 사용중지(스케줄링 실행)",
|
||||
description = "라벨러/검수자 최종로그인 28일 경과 이후 사용중지 처리")
|
||||
@GetMapping("/member-inactive-job")
|
||||
public ApiResponseDto<Void> memberInactiveJob() {
|
||||
memberInactiveJobService.memberActive28daysToInactive();
|
||||
return ApiResponseDto.ok(null);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,21 +41,6 @@ public class ModelMngApiController {
|
||||
|
||||
private final ModelMngService modelMngService;
|
||||
|
||||
@Value("${file.sync-root-dir}")
|
||||
private String syncRootDir;
|
||||
|
||||
@Value("${file.sync-tmp-dir}")
|
||||
private String syncTmpDir;
|
||||
|
||||
@Value("${file.sync-file-extention}")
|
||||
private String syncFileExtention;
|
||||
|
||||
@Value("${file.dataset-dir}")
|
||||
private String datasetDir;
|
||||
|
||||
@Value("${file.dataset-tmp-dir}")
|
||||
private String datasetTmpDir;
|
||||
|
||||
@Value("${file.model-dir}")
|
||||
private String modelDir;
|
||||
|
||||
|
||||
@@ -21,9 +21,9 @@ public class ModelMngDto {
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
public enum ModelType implements EnumType {
|
||||
M1("모델 M1"),
|
||||
M2("모델 M2"),
|
||||
M3("모델 M3");
|
||||
G1("G1"),
|
||||
G2("G2"),
|
||||
G3("G3");
|
||||
|
||||
private final String desc;
|
||||
|
||||
|
||||
@@ -35,27 +35,6 @@ public class ModelMngService {
|
||||
|
||||
private final UploadService uploadService;
|
||||
|
||||
@Value("${file.sync-root-dir}")
|
||||
private String syncRootDir;
|
||||
|
||||
@Value("${file.sync-tmp-dir}")
|
||||
private String syncTmpDir;
|
||||
|
||||
@Value("${file.sync-file-extention}")
|
||||
private String syncFileExtention;
|
||||
|
||||
@Value("${file.dataset-dir}")
|
||||
private String datasetDir;
|
||||
|
||||
@Value("${file.dataset-tmp-dir}")
|
||||
private String datasetTmpDir;
|
||||
|
||||
@Value("${file.model-dir}")
|
||||
private String modelDir;
|
||||
|
||||
@Value("${file.model-tmp-dir}")
|
||||
private String modelTmpDir;
|
||||
|
||||
@Value("${file.pt-path}")
|
||||
private String ptPath;
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
|
||||
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapScaleType;
|
||||
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapSheetList;
|
||||
import com.kamco.cd.kamcoback.common.enums.DetectionClassification;
|
||||
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.repository.changedetection.ChangeDetectionRepository;
|
||||
import java.util.List;
|
||||
@@ -15,6 +16,7 @@ import java.util.stream.Collectors;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
import org.locationtech.jts.geom.Point;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Service
|
||||
@@ -96,4 +98,42 @@ public class ChangeDetectionCoreService {
|
||||
public List<MapSheetList> getChangeDetectionMapSheet50kList(UUID uuid) {
|
||||
return changeDetectionRepository.getChangeDetectionMapSheet50kList(uuid);
|
||||
}
|
||||
|
||||
/**
|
||||
* 선택 폴리곤 조회 by object id
|
||||
*
|
||||
* @param chnDtctId 회차 uid 32자
|
||||
* @param cdObjectId geo object uid 32자
|
||||
* @param cdObjectIds geo object uids 32자
|
||||
* @return
|
||||
*/
|
||||
public ChangeDetectionDto.PolygonFeatureList getPolygonListByCd(
|
||||
String chnDtctId, String cdObjectId, List<String> cdObjectIds) {
|
||||
return changeDetectionRepository.getPolygonListByCd(chnDtctId, cdObjectId, cdObjectIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* 선택 Point 조회 by object id
|
||||
*
|
||||
* @param chnDtctId 회차 uid 32자
|
||||
* @param cdObjectId geo object uid 32자
|
||||
* @param cdObjectIds geo object uids 32자
|
||||
* @return
|
||||
*/
|
||||
public ChangeDetectionDto.PointFeatureList getPointListByCd(
|
||||
String chnDtctId, String cdObjectId, List<String> cdObjectIds) {
|
||||
return changeDetectionRepository.getPointListByCd(chnDtctId, cdObjectId, cdObjectIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* learn uuid 조회
|
||||
*
|
||||
* @param chnDtctId
|
||||
* @return
|
||||
*/
|
||||
public UUID getLearnUuid(String chnDtctId) {
|
||||
return changeDetectionRepository
|
||||
.getLearnUuid(chnDtctId)
|
||||
.orElseThrow(() -> new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.kamco.cd.kamcoback.postgres.core;
|
||||
|
||||
import com.kamco.cd.kamcoback.gukyuin.dto.GukYuinDto.GeomUidDto;
|
||||
import com.kamco.cd.kamcoback.postgres.repository.gukyuin.GukYuinLabelJobRepository;
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
@@ -15,8 +16,8 @@ public class GukYuinLabelJobCoreService {
|
||||
this.gukYuinLabelRepository = gukYuinLabelRepository;
|
||||
}
|
||||
|
||||
public List<GeomUidDto> findYesterdayLabelingCompleteList() {
|
||||
return gukYuinLabelRepository.findYesterdayLabelingCompleteList();
|
||||
public List<GeomUidDto> findYesterdayLabelingCompleteList(LocalDate baseDate) {
|
||||
return gukYuinLabelRepository.findYesterdayLabelingCompleteList(baseDate);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
|
||||
@@ -19,6 +19,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListDto;
|
||||
import com.kamco.cd.kamcoback.model.dto.ModelMngDto.ModelType;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
|
||||
@@ -110,7 +111,7 @@ public class InferenceResultCoreService {
|
||||
|
||||
MapSheetLearnEntity mapSheetLearnEntity = new MapSheetLearnEntity();
|
||||
mapSheetLearnEntity.setTitle(req.getTitle());
|
||||
mapSheetLearnEntity.setRunningModelType("M1");
|
||||
mapSheetLearnEntity.setRunningModelType(ModelType.G1.getId());
|
||||
mapSheetLearnEntity.setM1ModelUuid(req.getModel1Uuid());
|
||||
mapSheetLearnEntity.setM2ModelUuid(req.getModel2Uuid());
|
||||
mapSheetLearnEntity.setM3ModelUuid(req.getModel3Uuid());
|
||||
@@ -301,7 +302,7 @@ public class InferenceResultCoreService {
|
||||
|
||||
private void applyModelUpdate(MapSheetLearnEntity entity, SaveInferenceAiDto request) {
|
||||
switch (request.getType()) {
|
||||
case "M1" ->
|
||||
case "G1" ->
|
||||
applyModelFields(
|
||||
request,
|
||||
entity::setM1ModelBatchId,
|
||||
@@ -311,7 +312,7 @@ public class InferenceResultCoreService {
|
||||
entity::setM1RunningJobs,
|
||||
entity::setM1CompletedJobs,
|
||||
entity::setM1FailedJobs);
|
||||
case "M2" ->
|
||||
case "G2" ->
|
||||
applyModelFields(
|
||||
request,
|
||||
entity::setM2ModelBatchId,
|
||||
@@ -321,7 +322,7 @@ public class InferenceResultCoreService {
|
||||
entity::setM2RunningJobs,
|
||||
entity::setM2CompletedJobs,
|
||||
entity::setM2FailedJobs);
|
||||
case "M3" ->
|
||||
case "G3" ->
|
||||
applyModelFields(
|
||||
request,
|
||||
entity::setM3ModelBatchId,
|
||||
@@ -368,6 +369,12 @@ public class InferenceResultCoreService {
|
||||
return mapSheetLearnRepository.getInferenceServerStatusList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 진행중 배치 조회
|
||||
*
|
||||
* @param status
|
||||
* @return
|
||||
*/
|
||||
public InferenceBatchSheet getInferenceResultByStatus(String status) {
|
||||
MapSheetLearnEntity entity =
|
||||
mapSheetLearnRepository.getInferenceResultByStatus(status).orElse(null);
|
||||
@@ -402,6 +409,12 @@ public class InferenceResultCoreService {
|
||||
return mapSheetLearnRepository.getInferenceAiResultById(id, modelUuid);
|
||||
}
|
||||
|
||||
/**
|
||||
* 추론 진행 현황 상세
|
||||
*
|
||||
* @param uuid
|
||||
* @return
|
||||
*/
|
||||
public InferenceStatusDetailDto getInferenceStatus(UUID uuid) {
|
||||
return mapSheetLearnRepository.getInferenceStatus(uuid);
|
||||
}
|
||||
@@ -497,15 +510,16 @@ public class InferenceResultCoreService {
|
||||
}
|
||||
|
||||
/**
|
||||
* 추론 결과 shp파일 생성위해서 조회
|
||||
* 추론 결과 조회
|
||||
*
|
||||
* @param batchIds
|
||||
* @return
|
||||
*/
|
||||
public List<InferenceResultsTestingDto.ShpDto> getInferenceResults(List<Long> batchIds) {
|
||||
public List<InferenceResultsTestingDto.Basic> getInferenceResults(List<Long> batchIds) {
|
||||
List<InferenceResultsTestingEntity> list =
|
||||
inferenceResultsTestingRepository.getInferenceResultList(batchIds);
|
||||
return list.stream().map(InferenceResultsTestingDto.ShpDto::fromEntity).toList();
|
||||
|
||||
return list.stream().map(InferenceResultsTestingEntity::toDto).toList();
|
||||
}
|
||||
|
||||
public Long getInferenceResultCnt(List<Long> batchIds) {
|
||||
|
||||
@@ -257,16 +257,20 @@ public class LabelAllocateCoreService {
|
||||
|
||||
// 파일이 있는지만 확인
|
||||
Path path = Paths.get(responsePath).resolve(dto.getLearnUid() + ".zip");
|
||||
|
||||
if (!Files.exists(path) || !Files.isRegularFile(path)) {
|
||||
// 실제 파일만 true (디렉터리는 제외)
|
||||
return false;
|
||||
if (!Files.isRegularFile(path)) {
|
||||
return false; // exists 포함
|
||||
}
|
||||
|
||||
// 다운로드 확인할 학습데이터가 라벨링중인 경우 파일 생성여부가 정상인지 확인
|
||||
if (dto.getAnalState().equals(LabelMngState.ASSIGNED.getId())
|
||||
|| dto.getAnalState().equals(LabelMngState.ING.getId())) {
|
||||
return batchStepHistoryRepository.isDownloadable(dto.getAnalId());
|
||||
String state = dto.getAnalState();
|
||||
boolean isLabelingIng =
|
||||
LabelMngState.ASSIGNED.getId().equals(state) || LabelMngState.ING.getId().equals(state);
|
||||
|
||||
if (isLabelingIng) {
|
||||
Long analId = dto.getAnalId();
|
||||
if (analId == null) {
|
||||
return false;
|
||||
}
|
||||
return batchStepHistoryRepository.isDownloadable(analId);
|
||||
}
|
||||
|
||||
return true;
|
||||
@@ -277,4 +281,13 @@ public class LabelAllocateCoreService {
|
||||
.findLearnUid(uuid)
|
||||
.orElseThrow(() -> new CustomApiException("NOT_FOUND_DATA", HttpStatus.NOT_FOUND));
|
||||
}
|
||||
|
||||
public List<AllocateInfoDto> fetchNextIdsAddStbltYn(
|
||||
UUID uuid, LocalDate baseDate, Long lastId, Long totalCnt) {
|
||||
return labelAllocateRepository.fetchNextIdsAddStbltYn(uuid, baseDate, lastId, totalCnt);
|
||||
}
|
||||
|
||||
public Long findAllocateAddCnt(UUID uuid, LocalDate baseDate) {
|
||||
return labelAllocateRepository.findAllocateAddCnt(uuid, baseDate);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature;
|
||||
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.Scene;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.DetectOption;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetFallbackYearDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListCompareDto;
|
||||
@@ -24,13 +25,14 @@ import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
@@ -233,27 +235,52 @@ public class MapSheetMngCoreService {
|
||||
}
|
||||
|
||||
/**
|
||||
* 추론 실행에 필요한 geojson 파일 생성
|
||||
* geojson 생성
|
||||
*
|
||||
* @param yyyy 영상관리 파일별 년도
|
||||
* @param scenes 5k 도엽 번호 리스트
|
||||
* @param mapSheetScope EXCL : 추론제외, PREV 이전 년도 도엽 사용
|
||||
* @param yyyy
|
||||
* @param scenes
|
||||
* @param mapSheetScope
|
||||
* @param detectOption
|
||||
* @return
|
||||
*/
|
||||
public Scene getSceneInference(
|
||||
String yyyy, List<String> scenes, String mapSheetScope, String detectOption) {
|
||||
List<ImageFeature> features = loadSceneInferenceBySheets(yyyy, scenes);
|
||||
return writeSceneGeoJson(yyyy, mapSheetScope, detectOption, features);
|
||||
}
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
/**
|
||||
* geojson 생성
|
||||
*
|
||||
* @param yearDtos
|
||||
* @param yyyy
|
||||
* @param mapSheetScope
|
||||
* @param detectOption
|
||||
* @return
|
||||
*/
|
||||
public Scene getSceneInference(
|
||||
List<MapSheetFallbackYearDto> yearDtos,
|
||||
String yyyy,
|
||||
String mapSheetScope,
|
||||
String detectOption) {
|
||||
List<ImageFeature> features = loadSceneInferenceByFallbackYears(yearDtos);
|
||||
return writeSceneGeoJson(yyyy, mapSheetScope, detectOption, features);
|
||||
}
|
||||
|
||||
/**
|
||||
* 파일 경로/이름 , 파일 생성 , 도엽번호 반환
|
||||
*
|
||||
* @param yyyy
|
||||
* @param mapSheetScope
|
||||
* @param detectOption
|
||||
* @param sceneInference
|
||||
* @return Scene
|
||||
*/
|
||||
private Scene writeSceneGeoJson(
|
||||
String yyyy, String mapSheetScope, String detectOption, List<ImageFeature> sceneInference) {
|
||||
boolean isAll = MapSheetScope.ALL.getId().equals(mapSheetScope);
|
||||
String optionSuffix = buildOptionSuffix(detectOption);
|
||||
|
||||
String optionSuffix = "";
|
||||
if (DetectOption.EXCL.getId().equals(detectOption)) {
|
||||
optionSuffix = "_EXCL";
|
||||
} else if (DetectOption.PREV.getId().equals(detectOption)) {
|
||||
optionSuffix = "_PREV";
|
||||
}
|
||||
|
||||
// 1) 경로/파일명 결정
|
||||
String targetDir =
|
||||
"local".equals(activeEnv) ? System.getProperty("user.home") + "/geojson" : inferenceDir;
|
||||
|
||||
@@ -264,25 +291,21 @@ public class MapSheetMngCoreService {
|
||||
|
||||
Path outputPath = Paths.get(targetDir, filename);
|
||||
|
||||
// 2) ALL일 때만 재사용
|
||||
// if (isAll && Files.exists(outputPath)) {
|
||||
// return outputPath.toString();
|
||||
// }
|
||||
|
||||
// 3) 데이터 조회
|
||||
List<ImageFeature> sceneInference = mapSheetMngRepository.getSceneInference(yyyy, scenes);
|
||||
|
||||
if (sceneInference == null || sceneInference.isEmpty()) {
|
||||
log.warn(
|
||||
"NOT_FOUND_TARGET_YEAR: yyyy={}, isAll={}, scenesSize={}",
|
||||
yyyy,
|
||||
isAll,
|
||||
scenes == null ? 0 : scenes.size());
|
||||
log.warn("NOT_FOUND_TARGET_YEAR: yyyy={}, isAll={}, featuresSize={}", yyyy, isAll, 0);
|
||||
throw new CustomApiException("NOT_FOUND_TARGET_YEAR", HttpStatus.NOT_FOUND);
|
||||
}
|
||||
|
||||
// 4) 파일 생성
|
||||
try {
|
||||
log.info("create Directories outputPath: {}", outputPath);
|
||||
log.info(
|
||||
"activeEnv={}, inferenceDir={}, targetDir={}, filename={}",
|
||||
activeEnv,
|
||||
inferenceDir,
|
||||
targetDir,
|
||||
filename);
|
||||
log.info("outputPath={}, parent={}", outputPath.toAbsolutePath(), outputPath.getParent());
|
||||
|
||||
Files.createDirectories(outputPath.getParent());
|
||||
|
||||
new GeoJsonFileWriter()
|
||||
@@ -291,16 +314,89 @@ public class MapSheetMngCoreService {
|
||||
Scene scene = new Scene();
|
||||
scene.setFeatures(sceneInference);
|
||||
scene.setFilePath(outputPath.toString());
|
||||
|
||||
return scene;
|
||||
|
||||
} catch (IOException e) {
|
||||
log.error(
|
||||
"FAIL_CREATE_MAP_SHEET_FILE: yyyy={}, isAll={}, path={}", yyyy, isAll, outputPath, e);
|
||||
throw new CustomApiException("FAIL_CREATE_MAP_SHEET_FILE", HttpStatus.INTERNAL_SERVER_ERROR);
|
||||
throw new CustomApiException("INTERNAL_SERVER_ERROR", HttpStatus.INTERNAL_SERVER_ERROR, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* geojson 파일명 Suffix
|
||||
*
|
||||
* @param detectOption
|
||||
* @return
|
||||
*/
|
||||
private String buildOptionSuffix(String detectOption) {
|
||||
if (DetectOption.EXCL.getId().equals(detectOption)) return "_EXCL";
|
||||
if (DetectOption.PREV.getId().equals(detectOption)) return "_PREV";
|
||||
return "";
|
||||
}
|
||||
|
||||
/**
|
||||
* 년도, 도엽번호로 조회
|
||||
*
|
||||
* @param yyyy
|
||||
* @param scenes
|
||||
* @return ImageFeature
|
||||
*/
|
||||
private List<ImageFeature> loadSceneInferenceBySheets(String yyyy, List<String> scenes) {
|
||||
List<ImageFeature> sceneInference = mapSheetMngRepository.getSceneInference(yyyy, scenes);
|
||||
|
||||
if (sceneInference == null || sceneInference.isEmpty()) {
|
||||
log.warn(
|
||||
"NOT_FOUND_TARGET_YEAR: yyyy={}, scenesSize={}",
|
||||
yyyy,
|
||||
scenes == null ? 0 : scenes.size());
|
||||
throw new CustomApiException("NOT_FOUND_TARGET_YEAR", HttpStatus.NOT_FOUND);
|
||||
}
|
||||
return sceneInference;
|
||||
}
|
||||
|
||||
/**
|
||||
* 년도별로 나눠 조회
|
||||
*
|
||||
* @param yearDtos
|
||||
* @return ImageFeature
|
||||
*/
|
||||
private List<ImageFeature> loadSceneInferenceByFallbackYears(
|
||||
List<MapSheetFallbackYearDto> yearDtos) {
|
||||
if (yearDtos == null || yearDtos.isEmpty()) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
// 년도 별로 루프를 돌리기위해 년도별 정리
|
||||
Map<Integer, List<MapSheetFallbackYearDto>> groupedByYear =
|
||||
yearDtos.stream()
|
||||
.filter(d -> d.getMngYyyy() != null && d.getMapSheetNum() != null)
|
||||
.collect(Collectors.groupingBy(MapSheetFallbackYearDto::getMngYyyy));
|
||||
|
||||
List<ImageFeature> sceneInference = new ArrayList<>();
|
||||
|
||||
for (Map.Entry<Integer, List<MapSheetFallbackYearDto>> entry : groupedByYear.entrySet()) {
|
||||
Integer year = entry.getKey();
|
||||
|
||||
// 년도별 mapSheetNum 만들기
|
||||
List<String> sheetNums =
|
||||
entry.getValue().stream()
|
||||
.map(MapSheetFallbackYearDto::getMapSheetNum)
|
||||
.filter(Objects::nonNull)
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
// tif파일 정보 조회
|
||||
List<ImageFeature> temp = mapSheetMngRepository.getSceneInference(year.toString(), sheetNums);
|
||||
|
||||
if (temp != null && !temp.isEmpty()) {
|
||||
sceneInference.addAll(temp);
|
||||
}
|
||||
}
|
||||
|
||||
return sceneInference;
|
||||
}
|
||||
|
||||
/**
|
||||
* 변화탐지 실행 가능 기준 년도 조회
|
||||
*
|
||||
@@ -334,4 +430,31 @@ public class MapSheetMngCoreService {
|
||||
public List<MngListCompareDto> getByHstMapSheetCompareList(int mngYyyy, List<String> mapId) {
|
||||
return mapSheetMngYearRepository.findByHstMapSheetCompareList(mngYyyy, mapId);
|
||||
}
|
||||
|
||||
public List<String> getMapSheetNumByHst(Integer year) {
|
||||
List<MapSheetMngHstEntity> entity = mapSheetMngRepository.getMapSheetMngHst(year);
|
||||
return entity.stream().map(MapSheetMngHstEntity::getMapSheetNum).toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 특정 연도의 도엽 이력 데이터를 조회
|
||||
*
|
||||
* @param year
|
||||
* @return
|
||||
*/
|
||||
public List<MapSheetFallbackYearDto> getMapSheetNumDtoByHst(Integer year) {
|
||||
List<MapSheetMngHstEntity> entity = mapSheetMngRepository.getMapSheetMngHst(year);
|
||||
return entity.stream()
|
||||
.map(
|
||||
e ->
|
||||
new MapSheetFallbackYearDto(
|
||||
e.getMapSheetNum(), e.getMngYyyy() // 조회 기준 연도
|
||||
))
|
||||
.toList();
|
||||
}
|
||||
|
||||
public List<MapSheetFallbackYearDto> findFallbackCompareYearByMapSheets(
|
||||
Integer year, List<String> mapIds) {
|
||||
return mapSheetMngRepository.findFallbackCompareYearByMapSheets(year, mapIds);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import com.kamco.cd.kamcoback.postgres.repository.mapsheet.MapSheetMngYearReposi
|
||||
import com.kamco.cd.kamcoback.postgres.repository.scheduler.MapSheetMngFileJobRepository;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto.MngHstDto;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto.YearMinMax;
|
||||
import jakarta.validation.Valid;
|
||||
import java.util.List;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
@@ -67,9 +68,10 @@ public class MapSheetMngFileJobCoreService {
|
||||
return mapSheetMngFileJobRepository.findNotYetMapSheetMng();
|
||||
}
|
||||
|
||||
public Long findByHstMapSheetBeforeYyyyListCount(int strtYyyy, int endYyyy, String mapSheetNum) {
|
||||
public Long findByHstMapSheetBeforeYyyyListCount(
|
||||
int mngYyyy, int strtYyyy, int endYyyy, String mapSheetNum) {
|
||||
return mapSheetMngFileJobRepository.findByHstMapSheetBeforeYyyyListCount(
|
||||
strtYyyy, endYyyy, mapSheetNum);
|
||||
mngYyyy, strtYyyy, endYyyy, mapSheetNum);
|
||||
}
|
||||
|
||||
public void updateException5kMapSheet(String mapSheetNum, CommonUseStatus commonUseStatus) {
|
||||
@@ -79,4 +81,16 @@ public class MapSheetMngFileJobCoreService {
|
||||
public void saveSheetMngYear() {
|
||||
mapSheetMngYearRepository.saveFileInfo();
|
||||
}
|
||||
|
||||
public YearMinMax findYearMinMaxInfo() {
|
||||
return mapSheetMngYearRepository.findYearMinMaxInfo();
|
||||
}
|
||||
|
||||
public Long findMngYyyyCnt(Integer mngYyyy) {
|
||||
return mapSheetMngFileJobRepository.findMngYyyyCnt(mngYyyy);
|
||||
}
|
||||
|
||||
public Long findMapSheetUseExceptCnt(String mapSheetNum) {
|
||||
return mapSheetMngFileJobRepository.findMapSheetUseExceptCnt(mapSheetNum);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package com.kamco.cd.kamcoback.postgres.core;
|
||||
import com.kamco.cd.kamcoback.postgres.repository.scheduler.TrainingDataLabelJobRepository;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.InspectorPendingDto;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.Tasks;
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
@@ -14,8 +15,8 @@ public class TrainingDataLabelJobCoreService {
|
||||
|
||||
private final TrainingDataLabelJobRepository trainingDataLabelJobRepository;
|
||||
|
||||
public List<Tasks> findCompletedYesterdayUnassigned() {
|
||||
return trainingDataLabelJobRepository.findCompletedYesterdayUnassigned();
|
||||
public List<Tasks> findCompletedYesterdayUnassigned(LocalDate baseDate) {
|
||||
return trainingDataLabelJobRepository.findCompletedYesterdayUnassigned(baseDate);
|
||||
}
|
||||
|
||||
public void assignReviewerBatch(List<UUID> assignmentUids, String reviewerId) {
|
||||
|
||||
@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.postgres.repository.scheduler.TrainingDataReviewJo
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.AnalCntInfo;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.AnalMapSheetList;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.CompleteLabelData;
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.stereotype.Service;
|
||||
@@ -15,12 +16,13 @@ public class TrainingDataReviewJobCoreService {
|
||||
private final TrainingDataReviewJobRepository trainingDataReviewJobRepository;
|
||||
|
||||
public List<CompleteLabelData> findCompletedYesterdayLabelingList(
|
||||
Long analUid, String mapSheetNum) {
|
||||
return trainingDataReviewJobRepository.findCompletedYesterdayLabelingList(analUid, mapSheetNum);
|
||||
Long analUid, String mapSheetNum, LocalDate baseDate) {
|
||||
return trainingDataReviewJobRepository.findCompletedYesterdayLabelingList(
|
||||
analUid, mapSheetNum, baseDate);
|
||||
}
|
||||
|
||||
public List<AnalMapSheetList> findCompletedAnalMapSheetList(Long analUid) {
|
||||
return trainingDataReviewJobRepository.findCompletedAnalMapSheetList(analUid);
|
||||
public List<AnalMapSheetList> findCompletedAnalMapSheetList(Long analUid, LocalDate baseDate) {
|
||||
return trainingDataReviewJobRepository.findCompletedAnalMapSheetList(analUid, baseDate);
|
||||
}
|
||||
|
||||
public List<AnalCntInfo> findAnalCntInfoList() {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.kamco.cd.kamcoback.postgres.entity;
|
||||
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Entity;
|
||||
import jakarta.persistence.Id;
|
||||
@@ -84,4 +85,28 @@ public class InferenceResultsTestingEntity {
|
||||
|
||||
@Column(name = "geometry", columnDefinition = "geometry")
|
||||
private Geometry geometry;
|
||||
|
||||
public InferenceResultsTestingDto.Basic toDto() {
|
||||
return new InferenceResultsTestingDto.Basic(
|
||||
this.probability,
|
||||
this.beforeYear,
|
||||
this.afterYear,
|
||||
this.mapId,
|
||||
this.modelVersion,
|
||||
this.clsModelPath,
|
||||
this.clsModelVersion,
|
||||
this.cdModelType,
|
||||
this.id,
|
||||
this.modelName,
|
||||
this.batchId,
|
||||
this.area,
|
||||
this.beforeC,
|
||||
this.beforeP,
|
||||
this.afterC,
|
||||
this.afterP,
|
||||
this.seq,
|
||||
this.createdDate,
|
||||
this.uid,
|
||||
this.geometry);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,17 +35,17 @@ public class MapSheetLearn5kRepositoryImpl implements MapSheetLearn5kRepositoryC
|
||||
final StringPath errorMsgPath;
|
||||
|
||||
switch (type) {
|
||||
case "M1" -> {
|
||||
case "G1" -> {
|
||||
failPath = mapSheetLearn5kEntity.isM1Fail;
|
||||
jobIdPath = mapSheetLearn5kEntity.m1JobId;
|
||||
errorMsgPath = mapSheetLearn5kEntity.m1ErrorMessage;
|
||||
}
|
||||
case "M2" -> {
|
||||
case "G2" -> {
|
||||
failPath = mapSheetLearn5kEntity.isM2Fail;
|
||||
jobIdPath = mapSheetLearn5kEntity.m2JobId;
|
||||
errorMsgPath = mapSheetLearn5kEntity.m2ErrorMessage;
|
||||
}
|
||||
case "M3" -> {
|
||||
case "G3" -> {
|
||||
failPath = mapSheetLearn5kEntity.isM3Fail;
|
||||
jobIdPath = mapSheetLearn5kEntity.m3JobId;
|
||||
errorMsgPath = mapSheetLearn5kEntity.m3ErrorMessage;
|
||||
@@ -85,15 +85,15 @@ public class MapSheetLearn5kRepositoryImpl implements MapSheetLearn5kRepositoryC
|
||||
final StringPath errorMsgPath;
|
||||
|
||||
switch (type) {
|
||||
case "M1" -> {
|
||||
case "G1" -> {
|
||||
failPath = mapSheetLearn5kEntity.isM1Fail;
|
||||
jobIdPath = mapSheetLearn5kEntity.m1JobId;
|
||||
}
|
||||
case "M2" -> {
|
||||
case "G2" -> {
|
||||
failPath = mapSheetLearn5kEntity.isM2Fail;
|
||||
jobIdPath = mapSheetLearn5kEntity.m2JobId;
|
||||
}
|
||||
case "M3" -> {
|
||||
case "G3" -> {
|
||||
failPath = mapSheetLearn5kEntity.isM3Fail;
|
||||
jobIdPath = mapSheetLearn5kEntity.m3JobId;
|
||||
}
|
||||
@@ -135,15 +135,15 @@ public class MapSheetLearn5kRepositoryImpl implements MapSheetLearn5kRepositoryC
|
||||
BooleanPath failPath;
|
||||
|
||||
switch (type) {
|
||||
case "M1" -> {
|
||||
case "G1" -> {
|
||||
jobIdPath = mapSheetLearn5kEntity.m1JobId;
|
||||
failPath = mapSheetLearn5kEntity.isM1Fail;
|
||||
}
|
||||
case "M2" -> {
|
||||
case "G2" -> {
|
||||
jobIdPath = mapSheetLearn5kEntity.m2JobId;
|
||||
failPath = mapSheetLearn5kEntity.isM2Fail;
|
||||
}
|
||||
case "M3" -> {
|
||||
case "G3" -> {
|
||||
jobIdPath = mapSheetLearn5kEntity.m3JobId;
|
||||
failPath = mapSheetLearn5kEntity.isM3Fail;
|
||||
}
|
||||
@@ -180,13 +180,13 @@ public class MapSheetLearn5kRepositoryImpl implements MapSheetLearn5kRepositoryC
|
||||
BooleanPath failPath;
|
||||
|
||||
switch (type) {
|
||||
case "M1" -> {
|
||||
case "G1" -> {
|
||||
jobIdPath = mapSheetLearn5kEntity.m1JobId;
|
||||
}
|
||||
case "M2" -> {
|
||||
case "G2" -> {
|
||||
jobIdPath = mapSheetLearn5kEntity.m2JobId;
|
||||
}
|
||||
case "M3" -> {
|
||||
case "G3" -> {
|
||||
jobIdPath = mapSheetLearn5kEntity.m3JobId;
|
||||
}
|
||||
default -> {
|
||||
|
||||
@@ -18,29 +18,99 @@ import org.springframework.data.domain.Page;
|
||||
|
||||
public interface MapSheetLearnRepositoryCustom {
|
||||
|
||||
/**
|
||||
* 추론 관리 목록 조회
|
||||
*
|
||||
* @param req 추론관리 목록 화면 조회 조건
|
||||
* @return 추론 관리 목록
|
||||
*/
|
||||
Page<MapSheetLearnEntity> getInferenceMgnResultList(InferenceResultDto.SearchListReq req);
|
||||
|
||||
/**
|
||||
* uuid 조건으로 추론 실행 정보 조회
|
||||
*
|
||||
* @param uuid uuid
|
||||
* @return 추론 실행 정보
|
||||
*/
|
||||
Optional<MapSheetLearnEntity> getInferenceResultByUuid(UUID uuid);
|
||||
|
||||
/**
|
||||
* 추론 실행중 서버정보 조회 cpu, gpu
|
||||
*
|
||||
* @return cpu, gpu 정보
|
||||
*/
|
||||
List<InferenceServerStatusDto> getInferenceServerStatusList();
|
||||
|
||||
/**
|
||||
* 추론 실행 목록 진행 상태별 조회
|
||||
*
|
||||
* @param status 추론 진행 상태
|
||||
* @return 추론 실행 정보
|
||||
*/
|
||||
Optional<MapSheetLearnEntity> getInferenceResultByStatus(String status);
|
||||
|
||||
/**
|
||||
* 등록된 추론 실행목록 및 등록된 모델 정보 조회
|
||||
*
|
||||
* @param id 추론 실행 테이블 id
|
||||
* @param modelUuid 모델 uuid
|
||||
* @return 모델 정보
|
||||
*/
|
||||
InferenceProgressDto getInferenceAiResultById(Long id, UUID modelUuid);
|
||||
|
||||
/**
|
||||
* 진행중인 추론 정보 상세 조회
|
||||
*
|
||||
* @param uuid 추론진행 uuid
|
||||
* @return 진행중인 추론정보 상세 정보
|
||||
*/
|
||||
InferenceStatusDetailDto getInferenceStatus(UUID uuid);
|
||||
|
||||
/**
|
||||
* 진행중인 추론이 있는지 조회
|
||||
*
|
||||
* @return 진행중인 추론 정보
|
||||
*/
|
||||
MapSheetLearnEntity getProcessing();
|
||||
|
||||
Integer getLearnStage(Integer compareYear, Integer targetYear);
|
||||
|
||||
/**
|
||||
* 추론 결과 정보 조회
|
||||
*
|
||||
* @param uuid 추론 uuid
|
||||
* @return 추론 결과 및 사용 모델 정보
|
||||
*/
|
||||
AnalResultInfo getInferenceResultInfo(UUID uuid);
|
||||
|
||||
/**
|
||||
* 추론 결과 bbox, point 조회
|
||||
*
|
||||
* @param uuid 추론 uuid
|
||||
* @return bbox, pont 정보
|
||||
*/
|
||||
BboxPointDto getBboxPoint(UUID uuid);
|
||||
|
||||
/**
|
||||
* 분류별 탐지건수 조회
|
||||
*
|
||||
* @param uuid 추론 uuid
|
||||
* @return 분류별 탐지건수 정보
|
||||
*/
|
||||
List<Dashboard> getInferenceClassCountList(UUID uuid);
|
||||
|
||||
/**
|
||||
* 추론 결과 상세 geom 목록 조회
|
||||
*
|
||||
* @param uuid 추론 uuid
|
||||
* @param searchGeoReq 추론 결과 상세화면 geom 조회 조건
|
||||
* @return geom 목록 정보
|
||||
*/
|
||||
Page<Geom> getInferenceGeomList(UUID uuid, SearchGeoReq searchGeoReq);
|
||||
|
||||
/**
|
||||
* 국유in연동 가능여부 확인 조회
|
||||
*
|
||||
* @param uuid 추론 uuid
|
||||
* @return 추론 존재여부, 부분도엽 여부, 추론 진행중 여부, 국유인 작업 진행중 여부
|
||||
*/
|
||||
GukYuinLinkFacts findLinkFacts(UUID uuid);
|
||||
}
|
||||
|
||||
@@ -291,24 +291,6 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
.fetchOne();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getLearnStage(Integer compareYear, Integer targetYear) {
|
||||
Integer stage =
|
||||
queryFactory
|
||||
.select(mapSheetLearnEntity.stage)
|
||||
.from(mapSheetLearnEntity)
|
||||
.where(
|
||||
mapSheetLearnEntity
|
||||
.compareYyyy
|
||||
.eq(compareYear)
|
||||
.and(mapSheetLearnEntity.targetYyyy.eq(targetYear)))
|
||||
.orderBy(mapSheetLearnEntity.id.desc())
|
||||
.limit(1)
|
||||
.fetchOne();
|
||||
|
||||
return stage == null ? 1 : stage + 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AnalResultInfo getInferenceResultInfo(UUID uuid) {
|
||||
QModelMngEntity m1 = new QModelMngEntity("m1");
|
||||
@@ -528,6 +510,7 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
@Override
|
||||
public GukYuinLinkFacts findLinkFacts(UUID uuid) {
|
||||
|
||||
// 해당 추론 있는지 확인
|
||||
MapSheetLearnEntity learn =
|
||||
queryFactory
|
||||
.selectFrom(QMapSheetLearnEntity.mapSheetLearnEntity)
|
||||
@@ -538,12 +521,14 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
return new GukYuinLinkFacts(false, false, false, false);
|
||||
}
|
||||
|
||||
// 부분 도엽 실행인지 확인
|
||||
boolean isPartScope = MapSheetScope.PART.getId().equals(learn.getMapSheetScope());
|
||||
|
||||
QMapSheetAnalInferenceEntity inf = QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
|
||||
QMapSheetLearnEntity learn2 = new QMapSheetLearnEntity("learn2");
|
||||
QMapSheetLearnEntity learnQ = QMapSheetLearnEntity.mapSheetLearnEntity;
|
||||
|
||||
// 실행중인 추론 있는지 확인
|
||||
boolean hasRunningInference =
|
||||
queryFactory
|
||||
.selectOne()
|
||||
@@ -557,6 +542,7 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
.fetchFirst()
|
||||
!= null;
|
||||
|
||||
// 국유인 작업 진행중 있는지 확인
|
||||
boolean hasOtherUnfinishedGukYuin =
|
||||
queryFactory
|
||||
.selectOne()
|
||||
|
||||
@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
|
||||
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapScaleType;
|
||||
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapSheetList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
public interface ChangeDetectionRepositoryCustom {
|
||||
@@ -28,4 +29,18 @@ public interface ChangeDetectionRepositoryCustom {
|
||||
List<ChangeDetectionDto.MapSheetList> getChangeDetectionMapSheetList(UUID uuid);
|
||||
|
||||
List<MapSheetList> getChangeDetectionMapSheet50kList(UUID uuid);
|
||||
|
||||
ChangeDetectionDto.PolygonFeatureList getPolygonListByCd(
|
||||
String chnDtctId, String cdObjectId, List<String> cdObjectIds);
|
||||
|
||||
ChangeDetectionDto.PointFeatureList getPointListByCd(
|
||||
String chnDtctId, String cdObjectId, List<String> cdObjectIds);
|
||||
|
||||
ChangeDetectionDto.PolygonFeatureList getSelectedChangeDetectionPolygonListByPnu(
|
||||
String chnDtctId, String pnu);
|
||||
|
||||
ChangeDetectionDto.PointFeatureList getSelectedChangeDetectionPointListByPnu(
|
||||
String chnDtctId, String pnu);
|
||||
|
||||
Optional<UUID> getLearnUuid(String chnDtctId);
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceG
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QPnuEntity.pnuEntity;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
@@ -16,11 +17,14 @@ import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
|
||||
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.DetectSearchType;
|
||||
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapScaleType;
|
||||
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.MapSheetList;
|
||||
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.PointFeatureList;
|
||||
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto.PolygonFeatureList;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity;
|
||||
import com.querydsl.core.BooleanBuilder;
|
||||
import com.querydsl.core.types.Projections;
|
||||
import com.querydsl.core.types.dsl.BooleanExpression;
|
||||
import com.querydsl.core.types.dsl.CaseBuilder;
|
||||
@@ -30,10 +34,13 @@ import com.querydsl.jpa.JPAExpressions;
|
||||
import com.querydsl.jpa.impl.JPAQueryFactory;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
@Repository
|
||||
public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
|
||||
implements ChangeDetectionRepositoryCustom {
|
||||
|
||||
@@ -226,7 +233,9 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
|
||||
mapSheetAnalDataInferenceGeomEntity.targetYyyy,
|
||||
mapSheetAnalDataInferenceGeomEntity.classAfterProb,
|
||||
mapSheetAnalDataInferenceGeomEntity.classAfterCd.toUpperCase(),
|
||||
mapSheetAnalDataInferenceGeomEntity.cdProb))
|
||||
mapSheetAnalDataInferenceGeomEntity.cdProb,
|
||||
mapSheetAnalDataInferenceGeomEntity.uuid,
|
||||
mapSheetAnalDataInferenceGeomEntity.resultUid))
|
||||
.from(mapSheetAnalDataInferenceGeomEntity)
|
||||
.innerJoin(mapSheetAnalDataInferenceEntity)
|
||||
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
|
||||
@@ -261,7 +270,9 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
|
||||
data.getAfterYear(),
|
||||
data.getAfterConfidence(),
|
||||
data.getAfterClass(),
|
||||
data.getCdProb());
|
||||
data.getCdProb(),
|
||||
data.getUuid(),
|
||||
data.getResultUid());
|
||||
|
||||
return new ChangeDetectionDto.PolygonFeature(
|
||||
data.getType(), jsonNode, properties);
|
||||
@@ -371,4 +382,275 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
|
||||
"{0} || {1}", imageryEntity.cogMiddlePath, imageryEntity.cogFilename))
|
||||
.otherwise("");
|
||||
}
|
||||
|
||||
@Override
|
||||
public PolygonFeatureList getPolygonListByCd(
|
||||
String chnDtctId, String cdObjectId, List<String> cdObjectIds) {
|
||||
BooleanBuilder builder = new BooleanBuilder();
|
||||
builder.and(mapSheetLearnEntity.uid.eq(chnDtctId));
|
||||
builder.and(
|
||||
mapSheetAnalDataInferenceGeomEntity
|
||||
.resultUid
|
||||
.eq(chnDtctId)
|
||||
.or(mapSheetAnalDataInferenceGeomEntity.resultUid.in(cdObjectIds)));
|
||||
|
||||
List<ChangeDetectionDto.PolygonQueryData> list =
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
ChangeDetectionDto.PolygonQueryData.class,
|
||||
Expressions.stringTemplate("{0}", "Feature"),
|
||||
Expressions.stringTemplate(
|
||||
"ST_AsGeoJSON({0})", mapSheetAnalDataInferenceGeomEntity.geom),
|
||||
mapSheetAnalDataInferenceGeomEntity.geoUid,
|
||||
mapSheetAnalDataInferenceGeomEntity.area,
|
||||
mapSheetAnalDataInferenceGeomEntity.compareYyyy,
|
||||
mapSheetAnalDataInferenceGeomEntity.classBeforeProb,
|
||||
mapSheetAnalDataInferenceGeomEntity.classBeforeCd.toUpperCase(),
|
||||
mapSheetAnalDataInferenceGeomEntity.targetYyyy,
|
||||
mapSheetAnalDataInferenceGeomEntity.classAfterProb,
|
||||
mapSheetAnalDataInferenceGeomEntity.classAfterCd.toUpperCase(),
|
||||
mapSheetAnalDataInferenceGeomEntity.cdProb,
|
||||
mapSheetAnalDataInferenceGeomEntity.uuid,
|
||||
mapSheetAnalDataInferenceGeomEntity.resultUid))
|
||||
.from(mapSheetAnalDataInferenceGeomEntity)
|
||||
.innerJoin(mapSheetAnalDataInferenceEntity)
|
||||
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
|
||||
.innerJoin(mapSheetLearnEntity)
|
||||
.on(mapSheetLearnEntity.id.eq(mapSheetAnalInferenceEntity.learnId))
|
||||
.where(builder)
|
||||
.orderBy(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.asc())
|
||||
.fetch();
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
List<ChangeDetectionDto.PolygonFeature> result =
|
||||
list.stream()
|
||||
.map(
|
||||
data -> {
|
||||
String geoJson = data.getGeometry();
|
||||
JsonNode jsonNode;
|
||||
try {
|
||||
jsonNode = mapper.readTree(geoJson);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
ChangeDetectionDto.PolygonProperties properties =
|
||||
new ChangeDetectionDto.PolygonProperties(
|
||||
data.getGeoUid(),
|
||||
data.getArea(),
|
||||
data.getBeforeYear(),
|
||||
data.getBeforeConfidence(),
|
||||
data.getBeforeClass(),
|
||||
data.getAfterYear(),
|
||||
data.getAfterConfidence(),
|
||||
data.getAfterClass(),
|
||||
data.getCdProb(),
|
||||
data.getUuid(),
|
||||
data.getResultUid());
|
||||
|
||||
return new ChangeDetectionDto.PolygonFeature(
|
||||
data.getType(), jsonNode, properties);
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
|
||||
ChangeDetectionDto.PolygonFeatureList polygonList = new ChangeDetectionDto.PolygonFeatureList();
|
||||
polygonList.setType("FeatureCollection");
|
||||
polygonList.setFeatures(result);
|
||||
return polygonList;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PointFeatureList getPointListByCd(
|
||||
String chnDtctId, String cdObjectId, List<String> cdObjectIds) {
|
||||
BooleanBuilder builder = new BooleanBuilder();
|
||||
builder.and(mapSheetLearnEntity.uid.eq(chnDtctId));
|
||||
builder.and(
|
||||
mapSheetAnalDataInferenceGeomEntity
|
||||
.resultUid
|
||||
.eq(chnDtctId)
|
||||
.or(mapSheetAnalDataInferenceGeomEntity.resultUid.in(cdObjectIds)));
|
||||
|
||||
List<ChangeDetectionDto.PointQueryData> list =
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
ChangeDetectionDto.PointQueryData.class,
|
||||
Expressions.stringTemplate("{0}", "Feature"),
|
||||
Expressions.stringTemplate(
|
||||
"ST_AsGeoJSON({0})",
|
||||
mapSheetAnalDataInferenceGeomEntity.geomCenter), // point
|
||||
Projections.constructor(
|
||||
ChangeDetectionDto.PointProperties.class,
|
||||
mapSheetAnalDataInferenceGeomEntity.geoUid,
|
||||
mapSheetAnalDataInferenceGeomEntity.classAfterCd.toUpperCase())))
|
||||
.from(mapSheetAnalDataInferenceGeomEntity)
|
||||
.innerJoin(mapSheetAnalDataInferenceEntity)
|
||||
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
|
||||
.innerJoin(mapSheetLearnEntity)
|
||||
.on(mapSheetLearnEntity.id.eq(mapSheetAnalInferenceEntity.learnId))
|
||||
.where(builder)
|
||||
.fetch();
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
List<ChangeDetectionDto.PointFeature> result =
|
||||
list.stream()
|
||||
.map(
|
||||
data -> {
|
||||
String geoJson = data.getGeometry();
|
||||
JsonNode jsonNode;
|
||||
try {
|
||||
jsonNode = mapper.readTree(geoJson);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
return new ChangeDetectionDto.PointFeature(
|
||||
data.getType(), jsonNode, data.getProperties());
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
|
||||
return new ChangeDetectionDto.PointFeatureList("FeatureCollection", result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PolygonFeatureList getSelectedChangeDetectionPolygonListByPnu(
|
||||
String chnDtctId, String pnu) {
|
||||
BooleanBuilder builder = new BooleanBuilder();
|
||||
builder.and(mapSheetLearnEntity.uid.eq(chnDtctId));
|
||||
builder.and(pnuEntity.pnu.eq(pnu));
|
||||
|
||||
List<ChangeDetectionDto.PolygonQueryData> list =
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
ChangeDetectionDto.PolygonQueryData.class,
|
||||
Expressions.stringTemplate("{0}", "Feature"),
|
||||
Expressions.stringTemplate(
|
||||
"ST_AsGeoJSON({0})", mapSheetAnalDataInferenceGeomEntity.geom),
|
||||
mapSheetAnalDataInferenceGeomEntity.geoUid,
|
||||
mapSheetAnalDataInferenceGeomEntity.area,
|
||||
mapSheetAnalDataInferenceGeomEntity.compareYyyy,
|
||||
mapSheetAnalDataInferenceGeomEntity.classBeforeProb,
|
||||
mapSheetAnalDataInferenceGeomEntity.classBeforeCd.toUpperCase(),
|
||||
mapSheetAnalDataInferenceGeomEntity.targetYyyy,
|
||||
mapSheetAnalDataInferenceGeomEntity.classAfterProb,
|
||||
mapSheetAnalDataInferenceGeomEntity.classAfterCd.toUpperCase(),
|
||||
mapSheetAnalDataInferenceGeomEntity.cdProb))
|
||||
.from(mapSheetAnalDataInferenceGeomEntity)
|
||||
.innerJoin(mapSheetAnalDataInferenceEntity)
|
||||
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
|
||||
.innerJoin(mapSheetLearnEntity)
|
||||
.on(mapSheetLearnEntity.id.eq(mapSheetAnalInferenceEntity.learnId))
|
||||
.innerJoin(pnuEntity.geo, mapSheetAnalDataInferenceGeomEntity)
|
||||
.where(builder)
|
||||
.orderBy(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.asc())
|
||||
.fetch();
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
List<ChangeDetectionDto.PolygonFeature> result =
|
||||
list.stream()
|
||||
.map(
|
||||
data -> {
|
||||
String geoJson = data.getGeometry();
|
||||
JsonNode jsonNode;
|
||||
try {
|
||||
jsonNode = mapper.readTree(geoJson);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
ChangeDetectionDto.PolygonProperties properties =
|
||||
new ChangeDetectionDto.PolygonProperties(
|
||||
data.getGeoUid(),
|
||||
data.getArea(),
|
||||
data.getBeforeYear(),
|
||||
data.getBeforeConfidence(),
|
||||
data.getBeforeClass(),
|
||||
data.getAfterYear(),
|
||||
data.getAfterConfidence(),
|
||||
data.getAfterClass(),
|
||||
data.getCdProb(),
|
||||
data.getUuid(),
|
||||
data.getResultUid());
|
||||
|
||||
return new ChangeDetectionDto.PolygonFeature(
|
||||
data.getType(), jsonNode, properties);
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
|
||||
ChangeDetectionDto.PolygonFeatureList polygonList = new ChangeDetectionDto.PolygonFeatureList();
|
||||
polygonList.setType("FeatureCollection");
|
||||
polygonList.setFeatures(result);
|
||||
return polygonList;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PointFeatureList getSelectedChangeDetectionPointListByPnu(String chnDtctId, String pnu) {
|
||||
BooleanBuilder builder = new BooleanBuilder();
|
||||
builder.and(mapSheetLearnEntity.uid.eq(chnDtctId));
|
||||
builder.and(pnuEntity.pnu.eq(pnu));
|
||||
|
||||
List<ChangeDetectionDto.PointQueryData> list =
|
||||
queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
ChangeDetectionDto.PointQueryData.class,
|
||||
Expressions.stringTemplate("{0}", "Feature"),
|
||||
Expressions.stringTemplate(
|
||||
"ST_AsGeoJSON({0})",
|
||||
mapSheetAnalDataInferenceGeomEntity.geomCenter), // point
|
||||
Projections.constructor(
|
||||
ChangeDetectionDto.PointProperties.class,
|
||||
mapSheetAnalDataInferenceGeomEntity.geoUid,
|
||||
mapSheetAnalDataInferenceGeomEntity.classAfterCd.toUpperCase())))
|
||||
.from(mapSheetAnalDataInferenceGeomEntity)
|
||||
.innerJoin(mapSheetAnalDataInferenceEntity)
|
||||
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
|
||||
.innerJoin(mapSheetLearnEntity)
|
||||
.on(mapSheetLearnEntity.id.eq(mapSheetAnalInferenceEntity.learnId))
|
||||
.innerJoin(pnuEntity.geo, mapSheetAnalDataInferenceGeomEntity)
|
||||
.where(builder)
|
||||
.fetch();
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
List<ChangeDetectionDto.PointFeature> result =
|
||||
list.stream()
|
||||
.map(
|
||||
data -> {
|
||||
String geoJson = data.getGeometry();
|
||||
JsonNode jsonNode;
|
||||
try {
|
||||
jsonNode = mapper.readTree(geoJson);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
return new ChangeDetectionDto.PointFeature(
|
||||
data.getType(), jsonNode, data.getProperties());
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
|
||||
return new ChangeDetectionDto.PointFeatureList("FeatureCollection", result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<UUID> getLearnUuid(String chnDtctId) {
|
||||
return Optional.ofNullable(
|
||||
queryFactory
|
||||
.select(mapSheetAnalInferenceEntity.uuid)
|
||||
.from(mapSheetLearnEntity)
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(mapSheetAnalInferenceEntity.learnId.eq(mapSheetLearnEntity.id))
|
||||
.where(mapSheetLearnEntity.uid.eq(chnDtctId))
|
||||
.fetchOne());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
package com.kamco.cd.kamcoback.postgres.repository.gukyuin;
|
||||
|
||||
import com.kamco.cd.kamcoback.gukyuin.dto.GukYuinDto.GeomUidDto;
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
|
||||
public interface GukYuinLabelJobRepositoryCustom {
|
||||
|
||||
List<GeomUidDto> findYesterdayLabelingCompleteList();
|
||||
List<GeomUidDto> findYesterdayLabelingCompleteList(LocalDate baseDate);
|
||||
|
||||
void updateAnalDataInferenceGeomSendDttm(Long geoUid);
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import com.querydsl.core.types.dsl.BooleanExpression;
|
||||
import com.querydsl.jpa.impl.JPAQueryFactory;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.PersistenceContext;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.List;
|
||||
@@ -27,22 +28,21 @@ public class GukYuinLabelJobRepositoryImpl implements GukYuinLabelJobRepositoryC
|
||||
@PersistenceContext private EntityManager em;
|
||||
|
||||
@Override
|
||||
public List<GeomUidDto> findYesterdayLabelingCompleteList() {
|
||||
public List<GeomUidDto> findYesterdayLabelingCompleteList(LocalDate baseDate) {
|
||||
ZoneId zone = ZoneId.of("Asia/Seoul");
|
||||
ZonedDateTime todayStart = ZonedDateTime.now(zone).toLocalDate().atStartOfDay(zone);
|
||||
ZonedDateTime tomorrowStart = todayStart.plusDays(1);
|
||||
ZonedDateTime yesterdayStart = todayStart.minusDays(1);
|
||||
|
||||
// BooleanExpression isYesterday =
|
||||
// labelingAssignmentEntity
|
||||
// .inspectStatDttm
|
||||
// .goe(yesterdayStart)
|
||||
// .and(labelingAssignmentEntity.inspectStatDttm.lt(todayStart));
|
||||
BooleanExpression isYesterday =
|
||||
// baseDate가 null이면 기존처럼 "어제"로 처리
|
||||
LocalDate targetDate =
|
||||
(baseDate != null) ? baseDate : ZonedDateTime.now(zone).toLocalDate().minusDays(1);
|
||||
|
||||
ZonedDateTime targetStart = targetDate.atStartOfDay(zone);
|
||||
ZonedDateTime nextStart = targetStart.plusDays(1);
|
||||
|
||||
BooleanExpression inTargetDay =
|
||||
labelingAssignmentEntity
|
||||
.inspectStatDttm
|
||||
.goe(todayStart)
|
||||
.and(labelingAssignmentEntity.inspectStatDttm.lt(tomorrowStart));
|
||||
.goe(targetStart)
|
||||
.and(labelingAssignmentEntity.inspectStatDttm.lt(nextStart));
|
||||
|
||||
return queryFactory
|
||||
.select(
|
||||
@@ -62,7 +62,7 @@ public class GukYuinLabelJobRepositoryImpl implements GukYuinLabelJobRepositoryC
|
||||
mapSheetAnalInferenceEntity.learnId.eq(mapSheetLearnEntity.id),
|
||||
mapSheetLearnEntity.applyStatus.in(
|
||||
GukYuinStatus.GUK_COMPLETED.getId(), GukYuinStatus.PNU_COMPLETED.getId()))
|
||||
.where(labelingAssignmentEntity.inspectState.eq(InspectState.COMPLETE.getId()), isYesterday)
|
||||
.where(labelingAssignmentEntity.inspectState.eq(InspectState.COMPLETE.getId()), inTargetDay)
|
||||
.fetch();
|
||||
}
|
||||
|
||||
|
||||
@@ -110,4 +110,9 @@ public interface LabelAllocateRepositoryCustom {
|
||||
InferenceLearnDto findLabelingIngProcessId(UUID uuid);
|
||||
|
||||
Optional<String> findLearnUid(UUID uuid);
|
||||
|
||||
List<AllocateInfoDto> fetchNextIdsAddStbltYn(
|
||||
UUID uuid, LocalDate baseDate, Long lastId, Long totalCnt);
|
||||
|
||||
Long findAllocateAddCnt(UUID uuid, LocalDate baseDate);
|
||||
}
|
||||
|
||||
@@ -1856,4 +1856,56 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
|
||||
.where(mapSheetAnalInferenceEntity.uuid.eq(uuid))
|
||||
.fetchOne());
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AllocateInfoDto> fetchNextIdsAddStbltYn(
|
||||
UUID uuid, LocalDate baseDate, Long lastId, Long totalCnt) {
|
||||
ZoneId zone = ZoneId.of("Asia/Seoul"); // 기준 타임존 명확히
|
||||
ZonedDateTime nextDayStart = baseDate.plusDays(1).atStartOfDay(zone);
|
||||
|
||||
return queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
AllocateInfoDto.class,
|
||||
mapSheetAnalDataInferenceGeomEntity.geoUid,
|
||||
mapSheetAnalDataInferenceGeomEntity.mapSheetNum,
|
||||
mapSheetAnalDataInferenceGeomEntity.pnu))
|
||||
.from(mapSheetAnalInferenceEntity)
|
||||
.innerJoin(mapSheetAnalDataInferenceEntity)
|
||||
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
|
||||
.innerJoin(mapSheetAnalDataInferenceGeomEntity)
|
||||
.on(
|
||||
mapSheetAnalDataInferenceEntity.id.eq(mapSheetAnalDataInferenceGeomEntity.dataUid),
|
||||
mapSheetAnalDataInferenceGeomEntity.pnu.gt(0),
|
||||
mapSheetAnalDataInferenceGeomEntity.fitState.eq(ImageryFitStatus.UNFIT.getId()),
|
||||
mapSheetAnalDataInferenceGeomEntity.fitStateDttm.lt(nextDayStart),
|
||||
mapSheetAnalDataInferenceGeomEntity.labelState.isNull())
|
||||
.where(
|
||||
mapSheetAnalInferenceEntity.uuid.eq(uuid),
|
||||
lastId == null ? null : mapSheetAnalDataInferenceGeomEntity.geoUid.gt(lastId))
|
||||
.orderBy(mapSheetAnalDataInferenceGeomEntity.mapSheetNum.asc())
|
||||
.limit(totalCnt)
|
||||
.fetch();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long findAllocateAddCnt(UUID uuid, LocalDate baseDate) {
|
||||
ZoneId zone = ZoneId.of("Asia/Seoul"); // 기준 타임존 명확히
|
||||
ZonedDateTime nextDayStart = baseDate.plusDays(1).atStartOfDay(zone);
|
||||
|
||||
return queryFactory
|
||||
.select(mapSheetAnalDataInferenceGeomEntity.geoUid.count())
|
||||
.from(mapSheetAnalInferenceEntity)
|
||||
.innerJoin(mapSheetAnalDataInferenceEntity)
|
||||
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
|
||||
.innerJoin(mapSheetAnalDataInferenceGeomEntity)
|
||||
.on(
|
||||
mapSheetAnalDataInferenceEntity.id.eq(mapSheetAnalDataInferenceGeomEntity.dataUid),
|
||||
mapSheetAnalDataInferenceGeomEntity.pnu.gt(0),
|
||||
mapSheetAnalDataInferenceGeomEntity.fitState.eq(ImageryFitStatus.UNFIT.getId()),
|
||||
mapSheetAnalDataInferenceGeomEntity.fitStateDttm.lt(nextDayStart),
|
||||
mapSheetAnalDataInferenceGeomEntity.labelState.isNull())
|
||||
.where(mapSheetAnalInferenceEntity.uuid.eq(uuid))
|
||||
.fetchOne();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -142,8 +142,8 @@ public class MapLayerRepositoryImpl implements MapLayerRepositoryCustom {
|
||||
Projections.constructor(
|
||||
LayerDto.TileUrlDto.class,
|
||||
mapSheetMngTileEntity.mngYyyy,
|
||||
mapSheetMngTileEntity.tag,
|
||||
mapSheetMngTileEntity.url,
|
||||
mapSheetMngTileEntity.tag,
|
||||
mapSheetMngTileEntity.minLon,
|
||||
mapSheetMngTileEntity.minLat,
|
||||
mapSheetMngTileEntity.maxLon,
|
||||
@@ -168,8 +168,8 @@ public class MapLayerRepositoryImpl implements MapLayerRepositoryCustom {
|
||||
Projections.constructor(
|
||||
LayerDto.TileUrlDto.class,
|
||||
mapSheetMngTileEntity.mngYyyy,
|
||||
mapSheetMngTileEntity.tag,
|
||||
mapSheetMngTileEntity.url,
|
||||
mapSheetMngTileEntity.tag,
|
||||
mapSheetMngTileEntity.minLon,
|
||||
mapSheetMngTileEntity.minLat,
|
||||
mapSheetMngTileEntity.maxLon,
|
||||
@@ -202,8 +202,8 @@ public class MapLayerRepositoryImpl implements MapLayerRepositoryCustom {
|
||||
Projections.constructor(
|
||||
LayerDto.TileUrlDto.class,
|
||||
mapSheetMngTileEntity.mngYyyy,
|
||||
mapSheetMngTileEntity.tag,
|
||||
mapSheetMngTileEntity.url,
|
||||
mapSheetMngTileEntity.tag,
|
||||
mapSheetMngTileEntity.minLon,
|
||||
mapSheetMngTileEntity.minLat,
|
||||
mapSheetMngTileEntity.maxLon,
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.kamco.cd.kamcoback.postgres.repository.mapsheet;
|
||||
|
||||
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetFallbackYearDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.AddReq;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListDto;
|
||||
@@ -63,6 +64,12 @@ public interface MapSheetMngRepositoryCustom {
|
||||
|
||||
List<MapSheetMngDto.MngFilesDto> findByHstUidMapSheetFileList(Long hstUid);
|
||||
|
||||
/**
|
||||
* 변화탐지 실행 가능 기준 연도 조회
|
||||
*
|
||||
* @param req 조회 연도, 도엽번호 목록,
|
||||
* @return
|
||||
*/
|
||||
List<MngListDto> findByHstMapSheetTargetList(InferenceResultDto.RegReq req);
|
||||
|
||||
List<MngListDto> findByHstMapSheetTargetList(int mngYyyy, List<String> mapIds);
|
||||
@@ -80,4 +87,9 @@ public interface MapSheetMngRepositoryCustom {
|
||||
void updateMapSheetMngHstUploadId(Long hstUid, UUID uuid, String uploadId);
|
||||
|
||||
void insertMapSheetMngTile(@Valid AddReq addReq);
|
||||
|
||||
List<MapSheetMngHstEntity> getMapSheetMngHst(Integer year);
|
||||
|
||||
List<MapSheetFallbackYearDto> findFallbackCompareYearByMapSheets(
|
||||
Integer year, List<String> mapIds);
|
||||
}
|
||||
|
||||
@@ -5,12 +5,14 @@ import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngEntity.mapSheet
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngFileEntity.mapSheetMngFileEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngHstEntity.mapSheetMngHstEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngTileEntity.mapSheetMngTileEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngYearYnEntity.mapSheetMngYearYnEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QYearEntity.yearEntity;
|
||||
import static com.querydsl.core.types.dsl.Expressions.nullExpression;
|
||||
|
||||
import com.kamco.cd.kamcoback.common.enums.CommonUseStatus;
|
||||
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetFallbackYearDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.AddReq;
|
||||
@@ -1099,4 +1101,44 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
|
||||
"{0} like '%" + searchReq.getSearchValue() + "%'",
|
||||
mapSheetMngHstEntity.mapSheetNum));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<MapSheetMngHstEntity> getMapSheetMngHst(Integer year) {
|
||||
return queryFactory
|
||||
.select(mapSheetMngHstEntity)
|
||||
.from(mapSheetMngHstEntity)
|
||||
.innerJoin(mapSheetMngFileEntity)
|
||||
.on(mapSheetMngFileEntity.hstUid.eq(mapSheetMngHstEntity.hstUid))
|
||||
.where(
|
||||
mapSheetMngHstEntity
|
||||
.mngYyyy
|
||||
.eq(year)
|
||||
.and(
|
||||
mapSheetMngHstEntity
|
||||
.syncState
|
||||
.eq("DONE")
|
||||
.or(mapSheetMngHstEntity.syncCheckState.eq("DONE")))
|
||||
.and(mapSheetMngFileEntity.fileExt.eq("tif")))
|
||||
.fetch();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<MapSheetFallbackYearDto> findFallbackCompareYearByMapSheets(
|
||||
Integer year, List<String> mapIds) {
|
||||
BooleanBuilder builder = new BooleanBuilder();
|
||||
builder.and(mapSheetMngYearYnEntity.id.mapSheetNum.in(mapIds));
|
||||
builder.and(mapSheetMngYearYnEntity.id.mngYyyy.lt(year));
|
||||
builder.and(mapSheetMngYearYnEntity.yn.eq("Y"));
|
||||
|
||||
return queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
MapSheetFallbackYearDto.class,
|
||||
mapSheetMngYearYnEntity.id.mapSheetNum,
|
||||
mapSheetMngYearYnEntity.id.mngYyyy.max()))
|
||||
.from(mapSheetMngYearYnEntity)
|
||||
.where(builder)
|
||||
.groupBy(mapSheetMngYearYnEntity.id.mapSheetNum)
|
||||
.fetch();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
package com.kamco.cd.kamcoback.postgres.repository.mapsheet;
|
||||
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListCompareDto;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto.YearMinMax;
|
||||
import java.util.List;
|
||||
|
||||
public interface MapSheetMngYearRepositoryCustom {
|
||||
|
||||
void saveFileInfo();
|
||||
|
||||
List<MngListCompareDto> findByHstMapSheetCompareList(int mngYyyy, List<String> mapIds);
|
||||
|
||||
YearMinMax findYearMinMaxInfo();
|
||||
}
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
package com.kamco.cd.kamcoback.postgres.repository.mapsheet;
|
||||
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QYearEntity.yearEntity;
|
||||
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListCompareDto;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngYearYnEntity;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto.YearMinMax;
|
||||
import com.querydsl.core.types.Projections;
|
||||
import com.querydsl.core.types.dsl.Expressions;
|
||||
import com.querydsl.core.types.dsl.StringExpression;
|
||||
@@ -26,52 +29,52 @@ public class MapSheetMngYearRepositoryImpl implements MapSheetMngYearRepositoryC
|
||||
|
||||
String sql =
|
||||
"""
|
||||
WITH bounds AS (
|
||||
WITH bounds AS (
|
||||
SELECT
|
||||
map_sheet_num,
|
||||
MIN(mng_yyyy::int) AS min_y,
|
||||
MAX(mng_yyyy::int) AS max_y
|
||||
FROM tb_map_sheet_mng_files
|
||||
GROUP BY map_sheet_num
|
||||
),
|
||||
years AS (
|
||||
SELECT
|
||||
b.map_sheet_num,
|
||||
gs.y AS mng_yyyy
|
||||
FROM bounds b
|
||||
CROSS JOIN LATERAL generate_series(b.min_y, b.max_y) AS gs(y)
|
||||
),
|
||||
exist AS (
|
||||
SELECT DISTINCT
|
||||
map_sheet_num,
|
||||
mng_yyyy::int AS mng_yyyy
|
||||
FROM tb_map_sheet_mng_files
|
||||
),
|
||||
src AS (
|
||||
SELECT
|
||||
y.map_sheet_num,
|
||||
y.mng_yyyy,
|
||||
CASE
|
||||
WHEN e.map_sheet_num IS NULL THEN 'N'
|
||||
ELSE 'Y'
|
||||
END AS yn
|
||||
FROM years y
|
||||
LEFT JOIN exist e
|
||||
ON e.map_sheet_num = y.map_sheet_num
|
||||
AND e.mng_yyyy = y.mng_yyyy
|
||||
)
|
||||
INSERT INTO tb_map_sheet_mng_year_yn
|
||||
(map_sheet_num, mng_yyyy, yn)
|
||||
SELECT
|
||||
map_sheet_num,
|
||||
MIN(mng_yyyy::int) AS min_y,
|
||||
MAX(mng_yyyy::int) AS max_y
|
||||
FROM tb_map_sheet_mng_files
|
||||
GROUP BY map_sheet_num
|
||||
),
|
||||
years AS (
|
||||
SELECT
|
||||
b.map_sheet_num,
|
||||
gs.y AS mng_yyyy
|
||||
FROM bounds b
|
||||
CROSS JOIN LATERAL generate_series(b.min_y, b.max_y) AS gs(y)
|
||||
),
|
||||
exist AS (
|
||||
SELECT DISTINCT
|
||||
map_sheet_num,
|
||||
mng_yyyy::int AS mng_yyyy
|
||||
FROM tb_map_sheet_mng_files
|
||||
),
|
||||
src AS (
|
||||
SELECT
|
||||
y.map_sheet_num,
|
||||
y.mng_yyyy,
|
||||
CASE
|
||||
WHEN e.map_sheet_num IS NULL THEN 'N'
|
||||
ELSE 'Y'
|
||||
END AS yn
|
||||
FROM years y
|
||||
LEFT JOIN exist e
|
||||
ON e.map_sheet_num = y.map_sheet_num
|
||||
AND e.mng_yyyy = y.mng_yyyy
|
||||
)
|
||||
INSERT INTO tb_map_sheet_mng_year_yn
|
||||
(map_sheet_num, mng_yyyy, yn)
|
||||
SELECT
|
||||
map_sheet_num,
|
||||
mng_yyyy,
|
||||
yn
|
||||
FROM src
|
||||
ON CONFLICT (map_sheet_num, mng_yyyy)
|
||||
DO UPDATE SET
|
||||
yn = EXCLUDED.yn,
|
||||
updated_dttm = now()
|
||||
""";
|
||||
mng_yyyy,
|
||||
yn
|
||||
FROM src
|
||||
ON CONFLICT (map_sheet_num, mng_yyyy)
|
||||
DO UPDATE SET
|
||||
yn = EXCLUDED.yn,
|
||||
updated_dttm = now()
|
||||
""";
|
||||
|
||||
em.createNativeQuery(sql).executeUpdate();
|
||||
}
|
||||
@@ -98,4 +101,13 @@ public class MapSheetMngYearRepositoryImpl implements MapSheetMngYearRepositoryC
|
||||
.groupBy(y.id.mapSheetNum)
|
||||
.fetch();
|
||||
}
|
||||
|
||||
@Override
|
||||
public YearMinMax findYearMinMaxInfo() {
|
||||
return queryFactory
|
||||
.select(
|
||||
Projections.constructor(YearMinMax.class, yearEntity.yyyy.min(), yearEntity.yyyy.max()))
|
||||
.from(yearEntity)
|
||||
.fetchOne();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,12 @@ public interface MapSheetMngFileJobRepositoryCustom {
|
||||
|
||||
public Integer findNotYetMapSheetMng();
|
||||
|
||||
public Long findByHstMapSheetBeforeYyyyListCount(int strtYyyy, int endYyyy, String mapSheetNum);
|
||||
public Long findByHstMapSheetBeforeYyyyListCount(
|
||||
int mngYyyy, int strtYyyy, int endYyyy, String mapSheetNum);
|
||||
|
||||
public void updateException5kMapSheet(String mapSheetNum, CommonUseStatus commonUseStatus);
|
||||
|
||||
Long findMngYyyyCnt(Integer mngYyyy);
|
||||
|
||||
Long findMapSheetUseExceptCnt(String mapSheetNum);
|
||||
}
|
||||
|
||||
@@ -233,7 +233,8 @@ public class MapSheetMngFileJobRepositoryImpl extends QuerydslRepositorySupport
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long findByHstMapSheetBeforeYyyyListCount(int strtYyyy, int endYyyy, String mapSheetNum) {
|
||||
public Long findByHstMapSheetBeforeYyyyListCount(
|
||||
int mngYyyy, int strtYyyy, int endYyyy, String mapSheetNum) {
|
||||
|
||||
Long countQuery =
|
||||
queryFactory
|
||||
@@ -244,8 +245,8 @@ public class MapSheetMngFileJobRepositoryImpl extends QuerydslRepositorySupport
|
||||
.mngYyyy
|
||||
.goe(strtYyyy)
|
||||
.and(mapSheetMngHstEntity.mngYyyy.loe(endYyyy))
|
||||
.and(mapSheetMngHstEntity.mngYyyy.ne(mngYyyy))
|
||||
.and(mapSheetMngHstEntity.mapSheetNum.eq(mapSheetNum))
|
||||
.and(mapSheetMngHstEntity.useInference.eq("USE"))
|
||||
.and(
|
||||
mapSheetMngHstEntity
|
||||
.syncState
|
||||
@@ -266,4 +267,26 @@ public class MapSheetMngFileJobRepositoryImpl extends QuerydslRepositorySupport
|
||||
.where(mapInkx5kEntity.mapidcdNo.eq(mapSheetNum))
|
||||
.execute();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long findMngYyyyCnt(Integer mngYyyy) {
|
||||
return queryFactory
|
||||
.select(mapSheetMngEntity.mngYyyy.count())
|
||||
.from(mapSheetMngEntity)
|
||||
.where(mapSheetMngEntity.mngYyyy.ne(mngYyyy))
|
||||
.fetchOne();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long findMapSheetUseExceptCnt(String mapSheetNum) {
|
||||
return queryFactory
|
||||
.select(
|
||||
new CaseBuilder()
|
||||
.when(mapInkx5kEntity.useInference.eq(CommonUseStatus.USE))
|
||||
.then(1L)
|
||||
.otherwise(0L))
|
||||
.from(mapInkx5kEntity)
|
||||
.where(mapInkx5kEntity.mapidcdNo.eq(mapSheetNum))
|
||||
.fetchOne();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,12 +2,13 @@ package com.kamco.cd.kamcoback.postgres.repository.scheduler;
|
||||
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.InspectorPendingDto;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.Tasks;
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public interface TrainingDataLabelJobRepositoryCustom {
|
||||
|
||||
List<Tasks> findCompletedYesterdayUnassigned();
|
||||
List<Tasks> findCompletedYesterdayUnassigned(LocalDate baseDate);
|
||||
|
||||
List<InspectorPendingDto> findInspectorPendingByRound(Long analUid);
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ import com.querydsl.core.types.dsl.BooleanExpression;
|
||||
import com.querydsl.core.types.dsl.Expressions;
|
||||
import com.querydsl.core.types.dsl.StringExpression;
|
||||
import com.querydsl.jpa.impl.JPAQueryFactory;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.List;
|
||||
@@ -34,16 +35,23 @@ public class TrainingDataLabelJobRepositoryImpl extends QuerydslRepositorySuppor
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Tasks> findCompletedYesterdayUnassigned() {
|
||||
public List<Tasks> findCompletedYesterdayUnassigned(LocalDate baseDate) {
|
||||
ZoneId zone = ZoneId.of("Asia/Seoul");
|
||||
ZonedDateTime todayStart = ZonedDateTime.now(zone).toLocalDate().atStartOfDay(zone);
|
||||
ZonedDateTime yesterdayStart = todayStart.minusDays(1);
|
||||
|
||||
// baseDate가 null이면 "어제"를 타겟으로, 아니면 baseDate
|
||||
LocalDate targetDate = (baseDate != null) ? baseDate : LocalDate.now(zone).minusDays(1);
|
||||
|
||||
// end: targetDate + 1일 00:00
|
||||
ZonedDateTime baseStart = targetDate.plusDays(1).atStartOfDay(zone);
|
||||
|
||||
// start: targetDate 00:00
|
||||
ZonedDateTime yesterdayStart = baseStart.minusDays(1);
|
||||
|
||||
BooleanExpression isYesterday =
|
||||
labelingAssignmentEntity
|
||||
.workStatDttm
|
||||
.goe(yesterdayStart)
|
||||
.and(labelingAssignmentEntity.workStatDttm.lt(todayStart));
|
||||
.and(labelingAssignmentEntity.workStatDttm.lt(baseStart));
|
||||
|
||||
return queryFactory
|
||||
.select(
|
||||
|
||||
@@ -3,13 +3,15 @@ package com.kamco.cd.kamcoback.postgres.repository.scheduler;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.AnalCntInfo;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.AnalMapSheetList;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.CompleteLabelData;
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
|
||||
public interface TrainingDataReviewJobRepositoryCustom {
|
||||
|
||||
List<CompleteLabelData> findCompletedYesterdayLabelingList(Long analUid, String mapSheetNum);
|
||||
List<CompleteLabelData> findCompletedYesterdayLabelingList(
|
||||
Long analUid, String mapSheetNum, LocalDate baseDate);
|
||||
|
||||
List<AnalMapSheetList> findCompletedAnalMapSheetList(Long analUid);
|
||||
List<AnalMapSheetList> findCompletedAnalMapSheetList(Long analUid, LocalDate baseDate);
|
||||
|
||||
List<AnalCntInfo> findAnalCntInfoList();
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapShe
|
||||
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.InspectState;
|
||||
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelMngState;
|
||||
import com.kamco.cd.kamcoback.model.dto.ModelMngDto.ModelType;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.LabelingAssignmentEntity;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.AnalCntInfo;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.AnalMapSheetList;
|
||||
@@ -40,12 +41,13 @@ public class TrainingDataReviewJobRepositoryImpl extends QuerydslRepositorySuppo
|
||||
|
||||
@Override
|
||||
public List<CompleteLabelData> findCompletedYesterdayLabelingList(
|
||||
Long analUid, String mapSheetNum) {
|
||||
Long analUid, String mapSheetNum, LocalDate baseDate) {
|
||||
ZoneId zoneId = ZoneId.of("Asia/Seoul");
|
||||
|
||||
// 오늘 날짜 (시간 없음)
|
||||
LocalDate today = LocalDate.now(zoneId);
|
||||
ZonedDateTime end = today.atStartOfDay(zoneId); // 오늘 00:00
|
||||
// baseDate가 null이면 기존처럼 오늘 기준
|
||||
LocalDate targetDate = (baseDate != null) ? baseDate : LocalDate.now(zoneId);
|
||||
|
||||
ZonedDateTime end = targetDate.plusDays(1).atStartOfDay(zoneId);
|
||||
|
||||
return queryFactory
|
||||
.select(
|
||||
@@ -58,10 +60,10 @@ public class TrainingDataReviewJobRepositoryImpl extends QuerydslRepositorySuppo
|
||||
Properties.class,
|
||||
new CaseBuilder()
|
||||
.when(mapSheetLearnDataGeomEntity.classAfterCd.in("building", "container"))
|
||||
.then("M1")
|
||||
.then(ModelType.G1.getId())
|
||||
.when(mapSheetLearnDataGeomEntity.classAfterCd.eq("waste"))
|
||||
.then("M2")
|
||||
.otherwise("M3"),
|
||||
.then(ModelType.G2.getId())
|
||||
.otherwise(ModelType.G3.getId()),
|
||||
mapSheetLearnDataGeomEntity.classBeforeCd,
|
||||
mapSheetLearnDataGeomEntity.classAfterCd)))
|
||||
.from(labelingAssignmentEntity)
|
||||
@@ -76,10 +78,13 @@ public class TrainingDataReviewJobRepositoryImpl extends QuerydslRepositorySuppo
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AnalMapSheetList> findCompletedAnalMapSheetList(Long analUid) {
|
||||
public List<AnalMapSheetList> findCompletedAnalMapSheetList(Long analUid, LocalDate baseDate) {
|
||||
ZoneId zoneId = ZoneId.of("Asia/Seoul");
|
||||
LocalDate today = LocalDate.now(zoneId);
|
||||
ZonedDateTime end = today.atStartOfDay(zoneId); // 오늘 00:00
|
||||
|
||||
// baseDate가 null이면 기존처럼 오늘 기준
|
||||
LocalDate targetDate = (baseDate != null) ? baseDate : LocalDate.now(zoneId);
|
||||
|
||||
ZonedDateTime end = targetDate.plusDays(1).atStartOfDay(zoneId);
|
||||
|
||||
return queryFactory
|
||||
.select(
|
||||
|
||||
@@ -120,6 +120,7 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
|
||||
.orderBy(
|
||||
mapSheetAnalInferenceEntity.targetYyyy.asc(),
|
||||
mapSheetAnalInferenceEntity.compareYyyy.asc(),
|
||||
labelingAssignmentEntity.assignGroupId.asc(),
|
||||
labelingAssignmentEntity.createdDate.asc(),
|
||||
labelingAssignmentEntity.inferenceGeomUid.asc())
|
||||
.fetch();
|
||||
@@ -653,6 +654,7 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
|
||||
queryFactory
|
||||
.select(
|
||||
labelingAssignmentEntity.assignmentUid,
|
||||
labelingAssignmentEntity.assignGroupId,
|
||||
labelingAssignmentEntity.createdDate,
|
||||
labelingAssignmentEntity.inferenceGeomUid,
|
||||
mapSheetAnalInferenceEntity.targetYyyy,
|
||||
@@ -671,6 +673,7 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
|
||||
.orderBy(
|
||||
mapSheetAnalInferenceEntity.targetYyyy.asc(),
|
||||
mapSheetAnalInferenceEntity.compareYyyy.asc(),
|
||||
labelingAssignmentEntity.assignGroupId.asc(),
|
||||
labelingAssignmentEntity.createdDate.asc(),
|
||||
labelingAssignmentEntity.inferenceGeomUid.asc())
|
||||
.limit(1)
|
||||
@@ -685,6 +688,7 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
|
||||
Long inferenceGeomUid = firstAssigned.get(labelingAssignmentEntity.inferenceGeomUid);
|
||||
Integer targetYyyy = firstAssigned.get(mapSheetAnalInferenceEntity.targetYyyy);
|
||||
Integer compareYyyy = firstAssigned.get(mapSheetAnalInferenceEntity.compareYyyy);
|
||||
String assignGroupId = firstAssigned.get(labelingAssignmentEntity.assignGroupId);
|
||||
|
||||
BooleanExpression beforeCondition =
|
||||
mapSheetAnalInferenceEntity
|
||||
@@ -700,12 +704,20 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
|
||||
.targetYyyy
|
||||
.eq(targetYyyy)
|
||||
.and(mapSheetAnalInferenceEntity.compareYyyy.eq(compareYyyy))
|
||||
.and(labelingAssignmentEntity.assignGroupId.lt(assignGroupId)))
|
||||
.or(
|
||||
mapSheetAnalInferenceEntity
|
||||
.targetYyyy
|
||||
.eq(targetYyyy)
|
||||
.and(mapSheetAnalInferenceEntity.compareYyyy.eq(compareYyyy))
|
||||
.and(labelingAssignmentEntity.assignGroupId.eq(assignGroupId))
|
||||
.and(labelingAssignmentEntity.createdDate.lt(createdDttm)))
|
||||
.or(
|
||||
mapSheetAnalInferenceEntity
|
||||
.targetYyyy
|
||||
.eq(targetYyyy)
|
||||
.and(mapSheetAnalInferenceEntity.compareYyyy.eq(compareYyyy))
|
||||
.and(labelingAssignmentEntity.assignGroupId.eq(assignGroupId))
|
||||
.and(labelingAssignmentEntity.createdDate.eq(createdDttm))
|
||||
.and(labelingAssignmentEntity.inferenceGeomUid.lt(inferenceGeomUid)));
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
|
||||
import com.kamco.cd.kamcoback.code.service.CommonCodeService;
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.MapSheetInferenceJobService;
|
||||
import io.swagger.v3.oas.annotations.Hidden;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
@@ -17,6 +18,8 @@ import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
/** SchedulerApiController로 다 옮김 */
|
||||
@Hidden
|
||||
@Tag(name = "스캐쥴러 API", description = "스캐쥴러 API")
|
||||
@RestController
|
||||
@RequiredArgsConstructor
|
||||
|
||||
@@ -0,0 +1,120 @@
|
||||
package com.kamco.cd.kamcoback.scheduler;
|
||||
|
||||
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiLabelJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiPnuJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiStatusJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiStbltJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.MemberInactiveJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.TrainingDataLabelJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.TrainingDataReviewJobService;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponses;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import java.time.LocalDate;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PutMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
@Tag(name = "스케줄링 수동 호출 테스트", description = "스케줄링 수동 호출 테스트 API")
|
||||
@RestController
|
||||
@RequiredArgsConstructor
|
||||
@RequestMapping("/api/schedule")
|
||||
public class SchedulerApiController {
|
||||
|
||||
private final GukYuinApiPnuJobService gukYuinApiPnuJobService;
|
||||
private final GukYuinApiStatusJobService gukYuinApiStatusJobService;
|
||||
private final GukYuinApiLabelJobService gukYuinApiLabelJobService;
|
||||
private final GukYuinApiStbltJobService gukYuinApiStbltJobService;
|
||||
private final TrainingDataLabelJobService trainingDataLabelJobService;
|
||||
private final TrainingDataReviewJobService trainingDataReviewJobService;
|
||||
private final MemberInactiveJobService memberInactiveJobService;
|
||||
private final MapSheetMngFileJobController mapSheetMngFileJobController;
|
||||
|
||||
@Operation(summary = "국유인 탐지객체 조회 PNU 업데이트 스케줄링", description = "국유인 탐지객체 조회 PNU 업데이트 스케줄링")
|
||||
@GetMapping("/gukyuin/pnu")
|
||||
public ApiResponseDto<Void> findGukYuinContListPnuUpdate() {
|
||||
gukYuinApiPnuJobService.findGukYuinContListPnuUpdate();
|
||||
return ApiResponseDto.ok(null);
|
||||
}
|
||||
|
||||
@Operation(summary = "국유인 등록 상태 체크 스케줄링", description = "국유인 등록 상태 체크 스케줄링")
|
||||
@GetMapping("/gukyuin/status")
|
||||
public ApiResponseDto<Void> findGukYuinMastCompleteYn() {
|
||||
gukYuinApiStatusJobService.findGukYuinMastCompleteYn();
|
||||
return ApiResponseDto.ok(null);
|
||||
}
|
||||
|
||||
@Operation(summary = "국유인 라벨 완료 전송 스케줄링", description = "국유인 라벨 완료 전송 스케줄링")
|
||||
@GetMapping("/gukyuin/label")
|
||||
public ApiResponseDto<Void> findLabelingCompleteSend(
|
||||
@RequestParam(required = false) LocalDate baseDate) {
|
||||
gukYuinApiLabelJobService.findLabelingCompleteSend(baseDate);
|
||||
return ApiResponseDto.ok(null);
|
||||
}
|
||||
|
||||
@Operation(summary = "국유인 실태조사 적합여부 업데이트 스케줄링", description = "국유인 실태조사 적합여부 업데이트 스케줄링")
|
||||
@GetMapping("/gukyuin/stblt")
|
||||
public ApiResponseDto<Void> findGukYuinEligibleForSurvey(
|
||||
@RequestParam(required = false) LocalDate baseDate) {
|
||||
gukYuinApiStbltJobService.findGukYuinEligibleForSurvey(baseDate);
|
||||
return ApiResponseDto.ok(null);
|
||||
}
|
||||
|
||||
@Operation(
|
||||
summary = "라벨완료 -> 검수할당 스케줄링",
|
||||
description = "스케줄링이 실패한 경우 수동 호출하는 API, 어제 라벨링 완료된 것을 해당 검수자들에게 할당함")
|
||||
@GetMapping("/label-to-review")
|
||||
public ApiResponseDto<Void> runTrainingReviewSchedule(
|
||||
@RequestParam(required = false) LocalDate baseDate) {
|
||||
trainingDataLabelJobService.assignReviewerYesterdayLabelComplete(baseDate);
|
||||
return ApiResponseDto.ok(null);
|
||||
}
|
||||
|
||||
@Operation(summary = "검수완료된 라벨링 geojson 생성 스케줄링", description = "검수완료된 라벨링 geojson 생성")
|
||||
@GetMapping("/review-to-geojson")
|
||||
public ApiResponseDto<Long> runExportGeojsonLabelingGeom(
|
||||
@RequestParam(required = false) LocalDate baseDate) {
|
||||
trainingDataReviewJobService.exportGeojsonLabelingGeom(baseDate);
|
||||
return ApiResponseDto.ok(0L);
|
||||
}
|
||||
|
||||
@Operation(
|
||||
summary = "라벨러/검수자 최종로그인 28일 경과 이후 사용중지 스케줄링",
|
||||
description = "라벨러/검수자 최종로그인 28일 경과 이후 사용중지 처리")
|
||||
@GetMapping("/member-inactive-job")
|
||||
public ApiResponseDto<Void> memberInactiveJob() {
|
||||
memberInactiveJobService.memberActive28daysToInactive();
|
||||
return ApiResponseDto.ok(null);
|
||||
}
|
||||
|
||||
@Operation(summary = "영상관리 파일 싱크 스캐쥴러 Start/Stop", description = "영상관리 파일 싱크 스캐쥴러 Start/Stop API")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "조회 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
|
||||
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@PutMapping("/mng-sync-job")
|
||||
public ApiResponseDto<String> mngSyncOnOff(
|
||||
@RequestParam boolean jobStart, @RequestParam int pageSize) {
|
||||
|
||||
mapSheetMngFileJobController.setSchedulerEnabled(jobStart);
|
||||
mapSheetMngFileJobController.setMngSyncPageSize(pageSize);
|
||||
|
||||
return ApiResponseDto.createOK("OK");
|
||||
}
|
||||
}
|
||||
@@ -60,10 +60,10 @@ select msldge1_0.geo_uid,
|
||||
st_asgeojson(msldge1_0.geom),
|
||||
case
|
||||
when (msldge1_0.class_after_cd in ('building', 'container'))
|
||||
then cast('M1' as varchar)
|
||||
then cast('G1' as varchar)
|
||||
when (msldge1_0.class_after_cd = 'waste')
|
||||
then cast('M2' as varchar)
|
||||
else 'M3'
|
||||
then cast('G2' as varchar)
|
||||
else 'G3'
|
||||
end,
|
||||
msldge1_0.class_before_cd,
|
||||
msldge1_0.class_after_cd
|
||||
|
||||
@@ -20,4 +20,15 @@ public class AsyncConfig {
|
||||
ex.initialize();
|
||||
return ex;
|
||||
}
|
||||
|
||||
@Bean(name = "auditLogExecutor")
|
||||
public Executor auditLogExecutor() {
|
||||
ThreadPoolTaskExecutor exec = new ThreadPoolTaskExecutor();
|
||||
exec.setCorePoolSize(2);
|
||||
exec.setMaxPoolSize(8);
|
||||
exec.setQueueCapacity(2000);
|
||||
exec.setThreadNamePrefix("auditlog-");
|
||||
exec.initialize();
|
||||
return exec;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,6 +40,7 @@ public class MapSheetMngDto {
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class MngDto {
|
||||
|
||||
private int rowNum;
|
||||
private int mngYyyy;
|
||||
private String mngState;
|
||||
@@ -61,6 +62,7 @@ public class MapSheetMngDto {
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class MngHstDto {
|
||||
|
||||
private long hstUid;
|
||||
private int mngYyyy;
|
||||
private String mapSheetNum;
|
||||
@@ -86,6 +88,7 @@ public class MapSheetMngDto {
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class MngFileAddReq {
|
||||
|
||||
private int mngYyyy;
|
||||
private String mapSheetNum;
|
||||
private String refMapSheetNum;
|
||||
@@ -103,6 +106,7 @@ public class MapSheetMngDto {
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class MngFilesDto {
|
||||
|
||||
private long fileUid;
|
||||
private int mngYyyy;
|
||||
private String mapSheetNum;
|
||||
@@ -132,7 +136,19 @@ public class MapSheetMngDto {
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class DmlReturn {
|
||||
|
||||
private String flag;
|
||||
private String message;
|
||||
}
|
||||
|
||||
@Schema(name = "MngYyyyDto", description = "년도 값")
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class YearMinMax {
|
||||
|
||||
private Integer minYyyy;
|
||||
private Integer maxYyyy;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,11 +4,11 @@ import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectContDto;
|
||||
import com.kamco.cd.kamcoback.gukyuin.dto.GukYuinDto.GeomUidDto;
|
||||
import com.kamco.cd.kamcoback.gukyuin.service.GukYuinApiService;
|
||||
import com.kamco.cd.kamcoback.postgres.core.GukYuinLabelJobCoreService;
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.log4j.Log4j2;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Log4j2
|
||||
@@ -31,21 +31,25 @@ public class GukYuinApiLabelJobService {
|
||||
return "local".equalsIgnoreCase(profile);
|
||||
}
|
||||
|
||||
/** 어제 라벨링 검수 완료된 것 -> 국유인에 전송 */
|
||||
@Scheduled(cron = "0 0 2 * * *")
|
||||
public void findLabelingCompleteSend() {
|
||||
if (isLocalProfile()) {
|
||||
return;
|
||||
}
|
||||
// @Scheduled(cron = "0 0 2 * * *")
|
||||
public void runTask() {
|
||||
findLabelingCompleteSend(null);
|
||||
}
|
||||
|
||||
List<GeomUidDto> list = gukYuinLabelJobCoreService.findYesterdayLabelingCompleteList();
|
||||
/** 어제 라벨링 검수 완료된 것 -> 국유인에 전송 */
|
||||
public void findLabelingCompleteSend(LocalDate baseDate) {
|
||||
// if (isLocalProfile()) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
List<GeomUidDto> list = gukYuinLabelJobCoreService.findYesterdayLabelingCompleteList(baseDate);
|
||||
if (list.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (GeomUidDto gto : list) {
|
||||
ChngDetectContDto.ResultLabelDto dto =
|
||||
gukYuinApiService.updateChnDtctObjtLabelingYn(gto.getResultUid(), "Y");
|
||||
gukYuinApiService.updateChnDtctObjtLabelingYn(gto.getResultUid(), "Y", "Y");
|
||||
if (dto.getSuccess()) {
|
||||
// inference_geom 에 label_send_dttm 업데이트 하기
|
||||
gukYuinLabelJobCoreService.updateAnalDataInferenceGeomSendDttm(gto.getGeoUid());
|
||||
|
||||
@@ -12,7 +12,6 @@ import java.util.List;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.log4j.Log4j2;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Log4j2
|
||||
@@ -36,11 +35,11 @@ public class GukYuinApiPnuJobService {
|
||||
}
|
||||
|
||||
/** 국유인 등록 완료 후, 탐지객체 조회해서 PNU 업데이트 하는 스케줄링 하루 1번 새벽 1시에 실행 */
|
||||
@Scheduled(cron = "0 0 1 * * *")
|
||||
// @Scheduled(cron = "0 0 1 * * *")
|
||||
public void findGukYuinContListPnuUpdate() {
|
||||
if (isLocalProfile()) {
|
||||
return;
|
||||
}
|
||||
// if (isLocalProfile()) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
List<LearnKeyDto> list =
|
||||
gukYuinPnuJobCoreService.findGukyuinApplyStatusUidList(
|
||||
@@ -63,7 +62,7 @@ public class GukYuinApiPnuJobService {
|
||||
}
|
||||
|
||||
private void processUid(String chnDtctId, String uid) {
|
||||
ResultDto result = gukYuinApiService.listChnDtctId(chnDtctId);
|
||||
ResultDto result = gukYuinApiService.listChnDtctId(chnDtctId, "Y");
|
||||
if (result == null || result.getResult() == null || result.getResult().isEmpty()) {
|
||||
return;
|
||||
}
|
||||
@@ -85,7 +84,7 @@ public class GukYuinApiPnuJobService {
|
||||
}
|
||||
|
||||
private void processPage(String uid, int page, int pageSize) {
|
||||
ResultContDto resContList = gukYuinApiService.findChnContList(uid, page, pageSize);
|
||||
ResultContDto resContList = gukYuinApiService.findChnContList(uid, page, pageSize, "Y");
|
||||
|
||||
if (resContList.getResult() == null || resContList.getResult().isEmpty()) {
|
||||
return; // 외부 API 이상 방어
|
||||
|
||||
@@ -9,7 +9,6 @@ import java.util.List;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.log4j.Log4j2;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Log4j2
|
||||
@@ -33,11 +32,11 @@ public class GukYuinApiStatusJobService {
|
||||
}
|
||||
|
||||
/** 국유인 연동 후, 100% 되었는지 확인하는 스케줄링 매 10분마다 호출 */
|
||||
@Scheduled(cron = "0 0/10 * * * *")
|
||||
// @Scheduled(cron = "0 0/10 * * * *")
|
||||
public void findGukYuinMastCompleteYn() {
|
||||
if (isLocalProfile()) {
|
||||
return;
|
||||
}
|
||||
// if (isLocalProfile()) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
List<LearnKeyDto> list =
|
||||
gukYuinJobCoreService.findGukyuinApplyStatusUidList(
|
||||
@@ -48,7 +47,7 @@ public class GukYuinApiStatusJobService {
|
||||
|
||||
for (LearnKeyDto dto : list) {
|
||||
try {
|
||||
ChngDetectMastDto.ResultDto result = gukYuinApiService.listChnDtctId(dto.getUid());
|
||||
ChngDetectMastDto.ResultDto result = gukYuinApiService.listChnDtctId(dto.getUid(), "Y");
|
||||
|
||||
if (result == null || result.getResult() == null || result.getResult().isEmpty()) {
|
||||
log.warn("[GUKYUIN] empty result chnDtctMstId={}", dto.getChnDtctMstId());
|
||||
|
||||
@@ -16,7 +16,6 @@ import java.util.stream.Collectors;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.log4j.Log4j2;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Log4j2
|
||||
@@ -39,12 +38,16 @@ public class GukYuinApiStbltJobService {
|
||||
return "local".equalsIgnoreCase(profile);
|
||||
}
|
||||
|
||||
// @Scheduled(cron = "0 0 3 * * *")
|
||||
public void runTask() {
|
||||
findGukYuinEligibleForSurvey(null);
|
||||
}
|
||||
|
||||
/** 국유인 연동 후, 실태조사 적합여부 확인하여 update */
|
||||
@Scheduled(cron = "0 0 3 * * *")
|
||||
public void findGukYuinEligibleForSurvey() {
|
||||
if (isLocalProfile()) {
|
||||
return;
|
||||
}
|
||||
public void findGukYuinEligibleForSurvey(LocalDate baseDate) {
|
||||
// if (isLocalProfile()) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
List<LearnKeyDto> list =
|
||||
gukYuinStbltJobCoreService.findGukYuinEligibleForSurveyList(
|
||||
@@ -55,11 +58,16 @@ public class GukYuinApiStbltJobService {
|
||||
|
||||
for (LearnKeyDto dto : list) {
|
||||
try {
|
||||
String yesterday =
|
||||
String targetDate =
|
||||
LocalDate.now(ZoneId.of("Asia/Seoul"))
|
||||
.minusDays(1)
|
||||
.format(DateTimeFormatter.ofPattern("yyyyMMdd"));
|
||||
RlbDtctDto result = gukYuinApiService.findRlbDtctList(dto.getUid(), yesterday);
|
||||
|
||||
if (baseDate != null) { // 파라미터가 있으면
|
||||
targetDate = baseDate.format(DateTimeFormatter.ofPattern("yyyyMMdd"));
|
||||
}
|
||||
|
||||
RlbDtctDto result = gukYuinApiService.findRlbDtctList(dto.getUid(), targetDate, "Y");
|
||||
|
||||
if (result == null || result.getResult() == null || result.getResult().isEmpty()) {
|
||||
log.warn("[GUKYUIN] empty result chnDtctMstId={}", dto.getChnDtctMstId());
|
||||
|
||||
@@ -11,6 +11,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto;
|
||||
import com.kamco.cd.kamcoback.model.dto.ModelMngDto.ModelType;
|
||||
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.BatchStatusDto;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto;
|
||||
@@ -122,7 +123,7 @@ public class MapSheetInferenceJobService {
|
||||
* @return
|
||||
*/
|
||||
private Long resolveBatchId(InferenceBatchSheet sheet) {
|
||||
// M3 > M2 > M1
|
||||
// G3 > G2 > G1
|
||||
if (sheet.getM3BatchId() != null) {
|
||||
return sheet.getM3BatchId();
|
||||
}
|
||||
@@ -216,12 +217,12 @@ public class MapSheetInferenceJobService {
|
||||
updateProcessingEndTimeByModel(job, sheet.getUuid(), now, currentType);
|
||||
|
||||
// M3이면 전체 종료
|
||||
if ("M3".equals(currentType)) {
|
||||
if (ModelType.G3.getId().equals(currentType)) {
|
||||
endAll(sheet, now);
|
||||
return;
|
||||
}
|
||||
|
||||
// 다음 모델 실행 (M1->M2, M2->M3)
|
||||
// 다음 모델 실행 (G1->G2, G2->G3)
|
||||
String nextType = nextModelType(currentType);
|
||||
UUID nextModelUuid = resolveModelUuid(sheet, nextType);
|
||||
|
||||
@@ -240,7 +241,7 @@ public class MapSheetInferenceJobService {
|
||||
save.setUuid(sheet.getUuid());
|
||||
save.setStatus(Status.END.getId());
|
||||
save.setInferEndDttm(now);
|
||||
save.setType("M3"); // 마지막 모델 기준
|
||||
save.setType(ModelType.G3.getId()); // 마지막 모델 기준
|
||||
inferenceResultCoreService.update(save);
|
||||
|
||||
// 추론 종료일때 geom 데이터 저장
|
||||
@@ -266,11 +267,11 @@ public class MapSheetInferenceJobService {
|
||||
* @return
|
||||
*/
|
||||
private String nextModelType(String currentType) {
|
||||
if ("M1".equals(currentType)) {
|
||||
return "M2";
|
||||
if (ModelType.G1.getId().equals(currentType)) {
|
||||
return ModelType.G2.getId();
|
||||
}
|
||||
if ("M2".equals(currentType)) {
|
||||
return "M3";
|
||||
if (ModelType.G2.getId().equals(currentType)) {
|
||||
return ModelType.G3.getId();
|
||||
}
|
||||
throw new IllegalArgumentException("Unknown runningModelType: " + currentType);
|
||||
}
|
||||
@@ -283,13 +284,13 @@ public class MapSheetInferenceJobService {
|
||||
* @return
|
||||
*/
|
||||
private UUID resolveModelUuid(InferenceBatchSheet sheet, String type) {
|
||||
if ("M1".equals(type)) {
|
||||
if (ModelType.G1.getId().equals(type)) {
|
||||
return sheet.getM1ModelUuid();
|
||||
}
|
||||
if ("M2".equals(type)) {
|
||||
if (ModelType.G2.getId().equals(type)) {
|
||||
return sheet.getM2ModelUuid();
|
||||
}
|
||||
if ("M3".equals(type)) {
|
||||
if (ModelType.G3.getId().equals(type)) {
|
||||
return sheet.getM3ModelUuid();
|
||||
}
|
||||
throw new IllegalArgumentException("Unknown type: " + type);
|
||||
@@ -332,9 +333,6 @@ public class MapSheetInferenceJobService {
|
||||
InferenceProgressDto progressDto =
|
||||
inferenceResultCoreService.getInferenceAiResultById(id, modelUuid);
|
||||
|
||||
// ai 에 맞는 모델 명으로 변경
|
||||
String inferenceType = modelToInferenceType(type);
|
||||
|
||||
InferenceSendDto.pred_requests_areas predRequestsAreas =
|
||||
new InferenceSendDto.pred_requests_areas();
|
||||
predRequestsAreas.setInput1_year(progressDto.getPred_requests_areas().getInput1_year());
|
||||
@@ -355,10 +353,9 @@ public class MapSheetInferenceJobService {
|
||||
m.setCls_model_path(
|
||||
Paths.get(progressDto.getCdModelClsPath(), progressDto.getCdModelClsFileName()).toString());
|
||||
m.setCls_model_version(progressDto.getClsModelVersion());
|
||||
m.setCd_model_type(inferenceType);
|
||||
m.setPriority(progressDto.getPriority());
|
||||
|
||||
// log.info("InferenceSendDto={}", m);
|
||||
m.setCd_model_type(type);
|
||||
m.setPriority(5d);
|
||||
log.info("[BEFORE INFERENCE] BEFORE SendDto={}", m);
|
||||
// 추론 실행 api 호출
|
||||
Long batchId = ensureAccepted(m);
|
||||
|
||||
@@ -372,31 +369,13 @@ public class MapSheetInferenceJobService {
|
||||
inferenceResultCoreService.update(saveInferenceAiDto);
|
||||
}
|
||||
|
||||
/**
|
||||
* ai 에 맞는 모델 명으로 변경
|
||||
*
|
||||
* @param type 모델 타입
|
||||
* @return String
|
||||
*/
|
||||
private String modelToInferenceType(String type) {
|
||||
if ("M1".equals(type)) {
|
||||
return "G1";
|
||||
}
|
||||
if ("M2".equals(type)) {
|
||||
return "G2";
|
||||
}
|
||||
if ("M3".equals(type)) {
|
||||
return "G3";
|
||||
}
|
||||
throw new IllegalArgumentException("Unknown type: " + type);
|
||||
}
|
||||
|
||||
/**
|
||||
* api 호출
|
||||
*
|
||||
* @param dto
|
||||
* @return
|
||||
*/
|
||||
// 같은함수가 왜 두개지
|
||||
private Long ensureAccepted(InferenceSendDto dto) {
|
||||
if (dto == null) {
|
||||
log.warn("not InferenceSendDto dto");
|
||||
@@ -404,22 +383,28 @@ public class MapSheetInferenceJobService {
|
||||
}
|
||||
|
||||
// 1) 요청 로그
|
||||
log.info("Inference request dto={}", dto);
|
||||
|
||||
log.info("");
|
||||
log.info("========================================================");
|
||||
log.info("[SEND INFERENCE] Inference request dto= {}", dto);
|
||||
log.info("========================================================");
|
||||
log.info("");
|
||||
// 2) local 환경 임시 처리
|
||||
if ("local".equals(profile)) {
|
||||
if (dto.getPred_requests_areas() == null) {
|
||||
throw new IllegalStateException("pred_requests_areas is null");
|
||||
}
|
||||
dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson");
|
||||
dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson");
|
||||
}
|
||||
// if ("local".equals(profile)) {
|
||||
// if (dto.getPred_requests_areas() == null) {
|
||||
// throw new IllegalStateException("pred_requests_areas is null");
|
||||
// }
|
||||
//
|
||||
// dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson");
|
||||
//
|
||||
// dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson");
|
||||
// }
|
||||
|
||||
// 3) HTTP 호출
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
headers.setContentType(MediaType.APPLICATION_JSON);
|
||||
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
|
||||
|
||||
// TODO 어떤 URL로 어떤파리티러로 요청한 로딩해야지
|
||||
ExternalCallResult<String> result =
|
||||
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
|
||||
|
||||
@@ -434,6 +419,7 @@ public class MapSheetInferenceJobService {
|
||||
objectMapper.readValue(result.body(), new TypeReference<>() {});
|
||||
|
||||
if (list.isEmpty()) {
|
||||
// 어떤 URL로 어떤파리티러로 요청한 정보를 봐야 재현을 할듯하지요
|
||||
throw new IllegalStateException("Inference response is empty");
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ import com.kamco.cd.kamcoback.scheduler.dto.FileDto.SrchFilesDepthDto;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto.DmlReturn;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto.MngFileAddReq;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto.MngHstDto;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.MapSheetMngDto.YearMinMax;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
@@ -63,9 +64,18 @@ public class MapSheetMngFileJobService {
|
||||
|
||||
for (MngHstDto item : mapSheetFileNotYetList) {
|
||||
|
||||
// mng_yyyy 가 완전 최초로 등록되는 것인지 확인
|
||||
Long mngCnt = mapSheetMngFileJobCoreService.findMngYyyyCnt(item.getMngYyyy());
|
||||
|
||||
// 5K도엽 자동추론제외
|
||||
Long exceptCheckCnt =
|
||||
this.mapSheetAutoExceptionUpdate(item.getMngYyyy(), item.getMapSheetNum());
|
||||
Long exceptCheckCnt = 0L;
|
||||
|
||||
if (mngCnt == 0) { // 최초 등록이면 도엽의 추론제외여부를 확인
|
||||
exceptCheckCnt =
|
||||
mapSheetMngFileJobCoreService.findMapSheetUseExceptCnt(item.getMapSheetNum());
|
||||
} else { // 등록된 영상 년도가 있으면 자동추론제외 확인
|
||||
exceptCheckCnt = this.mapSheetAutoExceptionUpdate(item.getMngYyyy(), item.getMapSheetNum());
|
||||
}
|
||||
|
||||
// 도엽별 파일 체크 진행중으로 변경
|
||||
item.setDataState("PROCESSING");
|
||||
@@ -212,28 +222,26 @@ public class MapSheetMngFileJobService {
|
||||
return notyetCnt;
|
||||
}
|
||||
|
||||
public Long mapSheetAutoExceptionUpdate(int mngYyyy, String mapSheetNum) {
|
||||
public Long mapSheetAutoExceptionUpdate(Integer mngYyyy, String mapSheetNum) {
|
||||
|
||||
// 2025년 이전 파일싱크는 무조건 이전3년이 존재하지 않으므로 자동추론제외를 진행하지 않는다.(전년도 파일이 무조건 존재하는 것으로 리턴)
|
||||
// if (syncAutoExceptionStartYear > mngYyyy) {
|
||||
// return 1L;
|
||||
// }
|
||||
// tb_year 에 있는 년도 min,max 가져오기
|
||||
YearMinMax yearInfo = mapSheetMngFileJobCoreService.findYearMinMaxInfo();
|
||||
int strtYyyy = yearInfo.getMinYyyy();
|
||||
int endYyyy = yearInfo.getMaxYyyy();
|
||||
|
||||
// int strtYyyy = mngYyyy - syncAutoExceptionBeforeYearCnt + 1;
|
||||
int strtYyyy = 2020;
|
||||
int endYyyy = mngYyyy;
|
||||
|
||||
// 본년도+이전년도가 3개년인 도엽 확인 -> 2020년도부터 현재까지
|
||||
// tb_map_sheet_mng_hst 에 도엽 정보가 하나라도 DONE 인 게 있는지 count 가져오기
|
||||
Long beforeCnt =
|
||||
mapSheetMngFileJobCoreService.findByHstMapSheetBeforeYyyyListCount(
|
||||
strtYyyy, endYyyy, mapSheetNum);
|
||||
mngYyyy, strtYyyy, endYyyy, mapSheetNum);
|
||||
|
||||
if (beforeCnt == 0) {
|
||||
System.out.println("mapSheetAutoExceptionUpdate inference == 자동추론제외");
|
||||
System.out.println("beforeCnt: 0, mapSheetAutoExceptionUpdate inference == 자동추론제외");
|
||||
mapSheetMngFileJobCoreService.updateException5kMapSheet(
|
||||
mapSheetNum, CommonUseStatus.AUTO_EXCEPT);
|
||||
} else {
|
||||
// 하나라도 있으면 USE
|
||||
System.out.println(
|
||||
"beforeCnt: " + beforeCnt + ", mapSheetAutoExceptionUpdate inference == 자동추론제외 해제");
|
||||
mapSheetMngFileJobCoreService.updateException5kMapSheet(mapSheetNum, CommonUseStatus.USE);
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.postgres.core.TrainingDataLabelJobCoreService;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.InspectorPendingDto;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.TrainingDataReviewJobDto.Tasks;
|
||||
import jakarta.transaction.Transactional;
|
||||
import java.time.LocalDate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
@@ -13,7 +14,7 @@ import java.util.stream.Collectors;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.log4j.Log4j2;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Log4j2
|
||||
@@ -22,6 +23,7 @@ import org.springframework.stereotype.Service;
|
||||
public class TrainingDataLabelJobService {
|
||||
|
||||
private final TrainingDataLabelJobCoreService trainingDataLabelJobCoreService;
|
||||
private final ApplicationContext applicationContext;
|
||||
|
||||
@Value("${spring.profiles.active}")
|
||||
private String profile;
|
||||
@@ -30,16 +32,24 @@ public class TrainingDataLabelJobService {
|
||||
return "local".equalsIgnoreCase(profile);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@Scheduled(cron = "0 0 0 * * *")
|
||||
public void assignReviewerYesterdayLabelComplete() {
|
||||
// @Scheduled(cron = "0 0 0 * * *")
|
||||
public void runTask() {
|
||||
// 프록시를 통해 호출해야 @Transactional이 적용됨
|
||||
applicationContext
|
||||
.getBean(TrainingDataLabelJobService.class)
|
||||
.assignReviewerYesterdayLabelComplete(null);
|
||||
}
|
||||
|
||||
if (isLocalProfile()) {
|
||||
return;
|
||||
}
|
||||
@Transactional
|
||||
public void assignReviewerYesterdayLabelComplete(LocalDate baseDate) {
|
||||
|
||||
// if (isLocalProfile()) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
try {
|
||||
List<Tasks> tasks = trainingDataLabelJobCoreService.findCompletedYesterdayUnassigned();
|
||||
List<Tasks> tasks =
|
||||
trainingDataLabelJobCoreService.findCompletedYesterdayUnassigned(baseDate);
|
||||
|
||||
if (tasks.isEmpty()) {
|
||||
return;
|
||||
@@ -88,6 +98,7 @@ public class TrainingDataLabelJobService {
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("배치 처리 중 예외", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,12 +13,12 @@ import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.log4j.Log4j2;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Log4j2
|
||||
@@ -31,7 +31,7 @@ public class TrainingDataReviewJobService {
|
||||
@Value("${spring.profiles.active}")
|
||||
private String profile;
|
||||
|
||||
@Value("${training-data.geojson-dir}")
|
||||
@Value("${file.training-data.geojson-dir}")
|
||||
private String trainingDataDir;
|
||||
|
||||
private boolean isLocalProfile() {
|
||||
@@ -39,12 +39,16 @@ public class TrainingDataReviewJobService {
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@Scheduled(cron = "0 0 2 * * *")
|
||||
public void exportGeojsonLabelingGeom() {
|
||||
// @Scheduled(cron = "0 0 2 * * *")
|
||||
public void runTask() {
|
||||
exportGeojsonLabelingGeom(null);
|
||||
}
|
||||
|
||||
if (isLocalProfile()) {
|
||||
return;
|
||||
}
|
||||
public void exportGeojsonLabelingGeom(LocalDate baseDate) {
|
||||
|
||||
// if (isLocalProfile()) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
// 1) 경로/파일명 결정
|
||||
String targetDir =
|
||||
@@ -62,7 +66,8 @@ public class TrainingDataReviewJobService {
|
||||
|
||||
// 3) 회차 + 어제까지 검수 완료된 총 데이터의 도엽별 목록 가져오기
|
||||
List<AnalMapSheetList> analMapList =
|
||||
trainingDataReviewJobCoreService.findCompletedAnalMapSheetList(info.getAnalUid());
|
||||
trainingDataReviewJobCoreService.findCompletedAnalMapSheetList(
|
||||
info.getAnalUid(), baseDate);
|
||||
|
||||
if (analMapList.isEmpty()) {
|
||||
continue;
|
||||
@@ -72,7 +77,7 @@ public class TrainingDataReviewJobService {
|
||||
// 4) 도엽별 geom 데이터 가지고 와서 geojson 만들기
|
||||
List<CompleteLabelData> completeList =
|
||||
trainingDataReviewJobCoreService.findCompletedYesterdayLabelingList(
|
||||
info.getAnalUid(), mapSheet.getMapSheetNum());
|
||||
info.getAnalUid(), mapSheet.getMapSheetNum(), baseDate);
|
||||
|
||||
if (!completeList.isEmpty()) {
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@ package com.kamco.cd.kamcoback.trainingdata;
|
||||
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto.ResponseObj;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.TrainingDataLabelJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.TrainingDataReviewJobService;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewGeometryInfo;
|
||||
import com.kamco.cd.kamcoback.trainingdata.dto.TrainingDataReviewDto.ReviewListDto;
|
||||
@@ -33,8 +31,6 @@ import org.springframework.web.bind.annotation.RestController;
|
||||
public class TrainingDataReviewApiController {
|
||||
|
||||
private final TrainingDataReviewService trainingDataReviewService;
|
||||
private final TrainingDataLabelJobService trainingDataLabelJobService;
|
||||
private final TrainingDataReviewJobService trainingDataReviewJobService;
|
||||
|
||||
@Operation(summary = "목록 조회", description = "검수 할당 목록 조회")
|
||||
@ApiResponses(
|
||||
@@ -558,20 +554,4 @@ public class TrainingDataReviewApiController {
|
||||
return ApiResponseDto.ok(
|
||||
trainingDataReviewService.getCogImageUrl(mapSheetNum, beforeYear, afterYear));
|
||||
}
|
||||
|
||||
@Operation(
|
||||
summary = "검수할당 스케줄링(수동 호출)",
|
||||
description = "스케줄링이 실패한 경우 수동 호출하는 API, 어제 라벨링 완료된 것을 해당 검수자들에게 할당함")
|
||||
@GetMapping("/run-schedule")
|
||||
public ApiResponseDto<Void> runTrainingReviewSchedule() {
|
||||
trainingDataLabelJobService.assignReviewerYesterdayLabelComplete();
|
||||
return ApiResponseDto.ok(null);
|
||||
}
|
||||
|
||||
@Operation(summary = "검수완료된 라벨링 geojson 생성(스케줄링 수동 호출)", description = "검수완료된 라벨링 geojson 생성")
|
||||
@GetMapping("/run-label-geojson")
|
||||
public ApiResponseDto<Long> runExportGeojsonLabelingGeom() {
|
||||
trainingDataReviewJobService.exportGeojsonLabelingGeom();
|
||||
return ApiResponseDto.ok(0L);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,27 +29,12 @@ public class UploadApiController {
|
||||
|
||||
private final UploadService uploadService;
|
||||
|
||||
@Value("${file.sync-root-dir}")
|
||||
private String syncRootDir;
|
||||
|
||||
@Value("${file.sync-tmp-dir}")
|
||||
private String syncTmpDir;
|
||||
|
||||
@Value("${file.sync-file-extention}")
|
||||
private String syncFileExtention;
|
||||
|
||||
@Value("${file.dataset-dir}")
|
||||
private String datasetDir;
|
||||
|
||||
@Value("${file.dataset-tmp-dir}")
|
||||
private String datasetTmpDir;
|
||||
|
||||
@Value("${file.model-dir}")
|
||||
private String modelDir;
|
||||
|
||||
@Value("${file.model-tmp-dir}")
|
||||
private String modelTmpDir;
|
||||
|
||||
/*
|
||||
@Operation(summary = "데이터셋 대용량 업로드 세션 시작", description = "데이터셋 대용량 파일 업로드 세션을 시작합니다.")
|
||||
@ApiResponses(
|
||||
|
||||
@@ -16,6 +16,10 @@ spring:
|
||||
format_sql: true # ⚠️ 선택 - SQL 포맷팅 (가독성)
|
||||
jdbc:
|
||||
batch_size: 1000 # ✅ 추가 (JDBC batch)
|
||||
open-in-view: false
|
||||
mvc:
|
||||
async:
|
||||
request-timeout: 300s # 5분 (예: 30s, 120s, 10m 등도 가능)
|
||||
|
||||
datasource:
|
||||
url: jdbc:postgresql://192.168.2.127:15432/kamco_cds
|
||||
@@ -83,37 +87,19 @@ mapsheet:
|
||||
upload:
|
||||
skipGdalValidation: true
|
||||
shp:
|
||||
baseurl: /app/tmp/detect/result
|
||||
|
||||
|
||||
baseurl: /app/tmp/detect/result #현재사용안함
|
||||
|
||||
file:
|
||||
#sync-root-dir: D:/kamco-nfs/images/
|
||||
sync-root-dir: /kamco-nfs/images/
|
||||
sync-tmp-dir: ${file.sync-root-dir}/tmp
|
||||
sync-file-extention: tfw,tif
|
||||
sync-auto-exception-start-year: 2024
|
||||
sync-auto-exception-before-year-cnt: 3
|
||||
|
||||
#dataset-dir: D:/kamco-nfs/dataset/
|
||||
dataset-dir: /kamco-nfs/dataset/export/
|
||||
dataset-tmp-dir: ${file.dataset-dir}tmp/
|
||||
|
||||
#model-dir: D:/kamco-nfs/ckpt/model/
|
||||
model-dir: /kamco-nfs/ckpt/model/
|
||||
model-tmp-dir: ${file.model-dir}tmp/
|
||||
model-file-extention: pth,json,py
|
||||
|
||||
pt-path: /kamco-nfs/ckpt/model/v6-cls-checkpoints/
|
||||
root: kamco-nfs
|
||||
nfs: /kamco-nfs
|
||||
pt-path: ${file.nfs}/ckpt/model/v6-cls-checkpoints/
|
||||
pt-FileName: yolov8_6th-6m.pt
|
||||
|
||||
dataset-response: /kamco-nfs/dataset/response/
|
||||
|
||||
inference:
|
||||
nfs: /kamco-nfs
|
||||
url: http://192.168.2.183:8000/jobs
|
||||
batch-url: http://192.168.2.183:8000/batches
|
||||
geojson-dir: /kamco-nfs/requests/
|
||||
jar-path: /kamco-nfs/dataset/shp/shp-exporter.jar
|
||||
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
|
||||
inference-server-name: server1,server2,server3,server4
|
||||
|
||||
gukyuin:
|
||||
@@ -121,11 +107,10 @@ gukyuin:
|
||||
url: http://192.168.2.129:5301
|
||||
cdi: ${gukyuin.url}/api/kcd/cdi
|
||||
|
||||
training-data:
|
||||
geojson-dir: /kamco-nfs/model_output/labeling/
|
||||
|
||||
layer:
|
||||
geoserver-url: https://kamco.geo-dev.gs.dabeeo.com
|
||||
wms-path: geoserver/cd
|
||||
wmts-path: geoserver/cd/gwc/service
|
||||
workspace: cd
|
||||
|
||||
|
||||
|
||||
@@ -67,33 +67,17 @@ mapsheet:
|
||||
shp:
|
||||
baseurl: /Users/bokmin/detect/result
|
||||
|
||||
|
||||
|
||||
file:
|
||||
sync-root-dir: C:/Users/gypark/kamco-nfs/images/
|
||||
#sync-root-dir: /kamco-nfs/images/
|
||||
sync-tmp-dir: ${file.sync-root-dir}/tmp/
|
||||
sync-file-extention: tfw,tif
|
||||
sync-auto-exception-start-year: 2025
|
||||
sync-auto-exception-before-year-cnt: 3
|
||||
|
||||
dataset-dir: C:/Users/gypark/kamco-nfs/dataset/
|
||||
#dataset-dir: /kamco-nfs/dataset/export/
|
||||
dataset-tmp-dir: ${file.dataset-dir}tmp/
|
||||
|
||||
model-dir: C:/Users/gypark/kamco-nfs/ckpt/model/
|
||||
#model-dir: /kamco-nfs/ckpt/model/
|
||||
model-tmp-dir: ${file.model-dir}tmp/
|
||||
model-file-extention: pth,json,py
|
||||
|
||||
pt-path: /kamco-nfs/ckpt/classification/
|
||||
root: kamco-nfs
|
||||
nfs: C:/Users/gypark/kamco-nfs
|
||||
pt-path: ${file.nfs}/ckpt/classification/
|
||||
pt-FileName: v5-best.pt
|
||||
|
||||
inference:
|
||||
nfs: C:/Users/gyparkkamco-nfs
|
||||
url: http://10.100.0.11:8000/jobs
|
||||
batch-url: http://10.100.0.11:8000/batches
|
||||
geojson-dir: /kamco-nfs/requests/
|
||||
jar-path: jar/makeshp-1.0.0.jar
|
||||
jar-path: jar/shp-exporter.jar
|
||||
inference-server-name: server1,server2,server3,server4
|
||||
|
||||
gukyuin:
|
||||
@@ -101,10 +85,8 @@ gukyuin:
|
||||
url: http://192.168.2.129:5301
|
||||
cdi: ${gukyuin.url}/api/kcd/cdi
|
||||
|
||||
training-data:
|
||||
geojson-dir: /kamco-nfs/model_output/labeling/
|
||||
|
||||
layer:
|
||||
geoserver-url: https://kamco.geo-dev.gs.dabeeo.com
|
||||
path: /geoserver/cd/gwc/service/
|
||||
wms-path: geoserver/cd
|
||||
wmts-path: geoserver/cd/gwc/service
|
||||
workspace: cd
|
||||
|
||||
@@ -18,12 +18,38 @@ spring:
|
||||
batch_size: 1000 # ✅ 추가 (JDBC batch)
|
||||
|
||||
datasource:
|
||||
url: jdbc:postgresql://10.100.0.10:25432/temp
|
||||
username: temp
|
||||
password: temp123!
|
||||
url: jdbc:postgresql://kamco-cd-postgis:5432/kamco_cds
|
||||
#url: jdbc:postgresql://localhost:15432/kamco_cds
|
||||
username: kamco_cds
|
||||
password: kamco_cds_Q!W@E#R$
|
||||
hikari:
|
||||
minimum-idle: 10
|
||||
maximum-pool-size: 20
|
||||
connection-timeout: 60000 # 60초 연결 타임아웃
|
||||
idle-timeout: 300000 # 5분 유휴 타임아웃
|
||||
max-lifetime: 1800000 # 30분 최대 수명
|
||||
leak-detection-threshold: 60000 # 연결 누수 감지
|
||||
|
||||
transaction:
|
||||
default-timeout: 300 # 5분 트랜잭션 타임아웃
|
||||
|
||||
data:
|
||||
redis:
|
||||
host: 127.0.0.1
|
||||
port: 16379
|
||||
password: kamco
|
||||
|
||||
servlet:
|
||||
multipart:
|
||||
enabled: true
|
||||
max-file-size: 4GB
|
||||
max-request-size: 4GB
|
||||
file-size-threshold: 10MB
|
||||
|
||||
server:
|
||||
tomcat:
|
||||
max-swallow-size: 4GB
|
||||
max-http-form-post-size: 4GB
|
||||
|
||||
jwt:
|
||||
secret: "kamco_token_9b71e778-19a3-4c1d-97bf-2d687de17d5b"
|
||||
@@ -34,50 +60,47 @@ token:
|
||||
refresh-cookie-name: kamco # 개발용 쿠키 이름
|
||||
refresh-cookie-secure: true # 로컬 http 테스트면 false
|
||||
|
||||
springdoc:
|
||||
swagger-ui:
|
||||
persist-authorization: true # 스웨거 새로고침해도 토큰 유지, 로컬스토리지에 저장
|
||||
|
||||
logging:
|
||||
level:
|
||||
root: INFO
|
||||
org.springframework.web: INFO
|
||||
org.springframework.security: INFO
|
||||
|
||||
# 헬스체크 노이즈 핵심만 다운
|
||||
org.springframework.security.web.FilterChainProxy: INFO
|
||||
org.springframework.security.web.authentication.AnonymousAuthenticationFilter: INFO
|
||||
org.springframework.security.web.authentication.Http403ForbiddenEntryPoint: INFO
|
||||
org.springframework.web.servlet.DispatcherServlet: INFO
|
||||
|
||||
|
||||
mapsheet:
|
||||
upload:
|
||||
skipGdalValidation: true
|
||||
shp:
|
||||
baseurl: /app/detect/result
|
||||
|
||||
|
||||
baseurl: /app/detect/result #현재사용안함
|
||||
|
||||
file:
|
||||
#sync-root-dir: D:/kamco-nfs/images/
|
||||
sync-root-dir: /kamco-nfs/images/
|
||||
sync-tmp-dir: ${file.sync-root-dir}/tmp
|
||||
sync-file-extention: tfw,tif
|
||||
sync-auto-exception-start-year: 2025
|
||||
sync-auto-exception-before-year-cnt: 3
|
||||
|
||||
#dataset-dir: D:/kamco-nfs/dataset/
|
||||
dataset-dir: /kamco-nfs/dataset/export/
|
||||
dataset-tmp-dir: ${file.dataset-dir}tmp/
|
||||
|
||||
#model-dir: D:/kamco-nfs/ckpt/model/
|
||||
model-dir: /kamco-nfs/ckpt/model/
|
||||
model-tmp-dir: ${file.model-dir}tmp/
|
||||
model-file-extention: pth,json,py
|
||||
|
||||
pt-path: /kamco-nfs/ckpt/model/v6-cls-checkpoints/
|
||||
root: data
|
||||
nfs: /data
|
||||
pt-path: ${file.nfs}/ckpt/model/v6-cls-checkpoints/
|
||||
pt-FileName: yolov8_6th-6m.pt
|
||||
|
||||
dataset-response: /kamco-nfs/dataset/response/
|
||||
|
||||
inference:
|
||||
url: http://192.168.2.183:8000/jobs
|
||||
batch-url: http://192.168.2.183:8000/batches
|
||||
geojson-dir: /kamco-nfs/requests/
|
||||
jar-path: /kamco-nfs/dataset/shp/shp-exporter.jar
|
||||
nfs: /data
|
||||
url: http://172.16.4.56:8000/jobs
|
||||
batch-url: http://172.16.4.56:8000/batches
|
||||
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
|
||||
inference-server-name: server1,server2,server3,server4
|
||||
|
||||
gukyuin:
|
||||
#url: http://localhost:8080
|
||||
url: http://192.168.2.129:5301
|
||||
url: http://127.0.0.1:5301
|
||||
cdi: ${gukyuin.url}/api/kcd/cdi
|
||||
|
||||
training-data:
|
||||
geojson-dir: /kamco-nfs/model_output/labeling/
|
||||
|
||||
|
||||
layer:
|
||||
geoserver-url: https://kamco.geo-dev.gs.dabeeo.com
|
||||
|
||||
120
src/main/resources/application-prod.yml_bak
Normal file
120
src/main/resources/application-prod.yml_bak
Normal file
@@ -0,0 +1,120 @@
|
||||
spring:
|
||||
config:
|
||||
activate:
|
||||
on-profile: prod
|
||||
|
||||
jpa:
|
||||
show-sql: true
|
||||
hibernate:
|
||||
ddl-auto: validate
|
||||
properties:
|
||||
hibernate:
|
||||
default_batch_fetch_size: 100 # ✅ 성능 - N+1 쿼리 방지
|
||||
order_updates: true # ✅ 성능 - 업데이트 순서 정렬로 데드락 방지
|
||||
order_inserts: true
|
||||
use_sql_comments: true # ⚠️ 선택 - SQL에 주석 추가 (디버깅용)
|
||||
format_sql: true # ⚠️ 선택 - SQL 포맷팅 (가독성)
|
||||
jdbc:
|
||||
batch_size: 1000 # ✅ 추가 (JDBC batch)
|
||||
|
||||
datasource:
|
||||
url: jdbc:postgresql://kamco-cd-postgis:5432/kamco_cds
|
||||
#url: jdbc:postgresql://localhost:15432/kamco_cds
|
||||
username: kamco_cds
|
||||
password: kamco_cds_Q!W@E#R$
|
||||
hikari:
|
||||
minimum-idle: 10
|
||||
maximum-pool-size: 20
|
||||
connection-timeout: 60000 # 60초 연결 타임아웃
|
||||
idle-timeout: 300000 # 5분 유휴 타임아웃
|
||||
max-lifetime: 1800000 # 30분 최대 수명
|
||||
leak-detection-threshold: 60000 # 연결 누수 감지
|
||||
|
||||
transaction:
|
||||
default-timeout: 300 # 5분 트랜잭션 타임아웃
|
||||
|
||||
data:
|
||||
redis:
|
||||
host: 127.0.0.1
|
||||
port: 16379
|
||||
password: kamco
|
||||
|
||||
servlet:
|
||||
multipart:
|
||||
enabled: true
|
||||
max-file-size: 4GB
|
||||
max-request-size: 4GB
|
||||
file-size-threshold: 10MB
|
||||
|
||||
server:
|
||||
tomcat:
|
||||
max-swallow-size: 4GB
|
||||
max-http-form-post-size: 4GB
|
||||
|
||||
jwt:
|
||||
secret: "kamco_token_9b71e778-19a3-4c1d-97bf-2d687de17d5b"
|
||||
access-token-validity-in-ms: 86400000 # 1일
|
||||
refresh-token-validity-in-ms: 604800000 # 7일
|
||||
|
||||
token:
|
||||
refresh-cookie-name: kamco # 개발용 쿠키 이름
|
||||
refresh-cookie-secure: true # 로컬 http 테스트면 false
|
||||
|
||||
logging:
|
||||
level:
|
||||
root: INFO
|
||||
org.springframework.web: INFO
|
||||
org.springframework.security: INFO
|
||||
|
||||
# 헬스체크 노이즈 핵심만 다운
|
||||
org.springframework.security.web.FilterChainProxy: INFO
|
||||
org.springframework.security.web.authentication.AnonymousAuthenticationFilter: INFO
|
||||
org.springframework.security.web.authentication.Http403ForbiddenEntryPoint: INFO
|
||||
org.springframework.web.servlet.DispatcherServlet: INFO
|
||||
|
||||
|
||||
mapsheet:
|
||||
upload:
|
||||
skipGdalValidation: true
|
||||
shp:
|
||||
baseurl: /app/detect/result #현재사용안함
|
||||
|
||||
file:
|
||||
#sync-root-dir: D:/kamco-nfs/images/
|
||||
sync-root-dir: /kamco-nfs/images/
|
||||
sync-tmp-dir: ${file.sync-root-dir}/tmp # image upload temp dir
|
||||
sync-file-extention: tfw,tif
|
||||
sync-auto-exception-start-year: 2025
|
||||
sync-auto-exception-before-year-cnt: 3
|
||||
|
||||
#dataset-dir: D:/kamco-nfs/model_output/ #변경 model_output
|
||||
dataset-dir: /kamco-nfs/model_output/export/ # 마운트경로 AI 추론결과
|
||||
dataset-tmp-dir: ${file.dataset-dir}tmp/
|
||||
|
||||
#model-dir: D:/kamco-nfs/ckpt/model/
|
||||
model-dir: /kamco-nfs/ckpt/model/ # 학습서버에서 트레이닝한 모델업로드경로
|
||||
model-tmp-dir: ${file.model-dir}tmp/
|
||||
model-file-extention: pth,json,py
|
||||
|
||||
pt-path: /kamco-nfs/ckpt/model/v6-cls-checkpoints/
|
||||
pt-FileName: yolov8_6th-6m.pt
|
||||
|
||||
inference:
|
||||
url: http://127.0.0.1:8000/jobs
|
||||
batch-url: http://127.0.0.1:8000/batches
|
||||
geojson-dir: /kamco-nfs/requests/ # 학습서버에서 트레이닝한 모델업로드경로
|
||||
jar-path: /kamco-nfs/repo/jar/shp-exporter.jar # 추론실행을 위한 파일생성경로
|
||||
inference-server-name: server1,server2,server3,server4
|
||||
|
||||
gukyuin:
|
||||
url: http://127.0.0.1:5301
|
||||
cdi: ${gukyuin.url}/api/kcd/cdi
|
||||
|
||||
training-data:
|
||||
geojson-dir: /kamco-nfs/dataset/request/
|
||||
|
||||
layer:
|
||||
geoserver-url: https://kamco.geo-dev.gs.dabeeo.com
|
||||
wms-path: geoserver/cd
|
||||
wmts-path: geoserver/cd/gwc/service
|
||||
workspace: cd
|
||||
@@ -67,3 +67,22 @@ management:
|
||||
include:
|
||||
- "health"
|
||||
|
||||
file:
|
||||
root: kamco-nfs
|
||||
nfs: /kamco-nfs
|
||||
sync-root-dir: ${file.nfs}/images/
|
||||
sync-tmp-dir: ${file.nfs}/requests/temp # image upload temp dir
|
||||
sync-file-extention: tfw,tif
|
||||
dataset-dir: ${file.nfs}/model_output/export/ # 마운트경로 AI 추론결과
|
||||
dataset-tmp-dir: ${file.dataset-dir}tmp/
|
||||
model-dir: ${file.nfs}/ckpt/model/ # 학습서버에서 트레이닝한 모델업로드경로
|
||||
model-tmp-dir: ${file.model-dir}tmp/
|
||||
model-file-extention: pth,json,py
|
||||
pt-path: ${file.nfs}/ckpt/model/v6-cls-checkpoints/
|
||||
dataset-response: ${file.nfs}/dataset/response/
|
||||
training-data:
|
||||
geojson-dir: ${file.nfs}/dataset/request/
|
||||
inference:
|
||||
nfs: /kamco-nfs
|
||||
geojson-dir: ${inference.nfs}/requests/ # 추론실행을 위한 파일생성경로
|
||||
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
|
||||
|
||||
83
src/main/resources/static/download_progress_test.html
Normal file
83
src/main/resources/static/download_progress_test.html
Normal file
@@ -0,0 +1,83 @@
|
||||
<!doctype html>
|
||||
<html lang="ko">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>라벨 ZIP 다운로드</title>
|
||||
</head>
|
||||
<body>
|
||||
<h3>라벨 ZIP 다운로드</h3>
|
||||
|
||||
UUID:
|
||||
<input id="uuid" value="6d8d49dc-0c9d-4124-adc7-b9ca610cc394" />
|
||||
<br><br>
|
||||
|
||||
JWT Token:
|
||||
<input id="token" style="width:600px;" placeholder="Bearer 토큰 붙여넣기" />
|
||||
<br><br>
|
||||
|
||||
<button onclick="download()">다운로드</button>
|
||||
|
||||
<br><br>
|
||||
<progress id="bar" value="0" max="100" style="width:400px;"></progress>
|
||||
<div id="status"></div>
|
||||
|
||||
<script>
|
||||
async function download() {
|
||||
const uuid = document.getElementById("uuid").value.trim();
|
||||
const token = document.getElementById("token").value.trim();
|
||||
|
||||
if (!uuid) {
|
||||
alert("UUID 입력하세요");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!token) {
|
||||
alert("토큰 입력하세요");
|
||||
return;
|
||||
}
|
||||
|
||||
const url = `/api/training-data/stage/download/${uuid}`;
|
||||
|
||||
const res = await fetch(url, {
|
||||
headers: {
|
||||
"Authorization": token.startsWith("Bearer ")
|
||||
? token
|
||||
: `Bearer ${token}`,
|
||||
"kamco-download-uuid": uuid
|
||||
}
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
document.getElementById("status").innerText =
|
||||
"실패: " + res.status;
|
||||
return;
|
||||
}
|
||||
|
||||
const total = parseInt(res.headers.get("Content-Length") || "0", 10);
|
||||
const reader = res.body.getReader();
|
||||
const chunks = [];
|
||||
let received = 0;
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
received += value.length;
|
||||
|
||||
if (total) {
|
||||
document.getElementById("bar").value =
|
||||
(received / total) * 100;
|
||||
}
|
||||
}
|
||||
|
||||
const blob = new Blob(chunks);
|
||||
const a = document.createElement("a");
|
||||
a.href = URL.createObjectURL(blob);
|
||||
a.download = uuid + ".zip";
|
||||
a.click();
|
||||
|
||||
document.getElementById("status").innerText = "완료 ✅";
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
Reference in New Issue
Block a user