Compare commits
83 Commits
ba11e4c801
...
feat/dean/
| Author | SHA1 | Date | |
|---|---|---|---|
| 32d56cf8fe | |||
| c3b7daebb7 | |||
| 2188d426d4 | |||
| 5c2ee0974b | |||
| 7980fe1d42 | |||
| c10141e915 | |||
| 97565c5369 | |||
| 30f0e1a885 | |||
| ba562261c3 | |||
| a084c80715 | |||
| a44e93c234 | |||
| a63b81008a | |||
| 2309357c0d | |||
| ee76389d6c | |||
| 7b15e5bb8c | |||
| 001ad73de7 | |||
| 2508f59a72 | |||
| f2307ff0f4 | |||
| 6f44319d33 | |||
| cefacb291b | |||
| 744cbb55a9 | |||
| 4a120ae5fd | |||
| 7c200b057a | |||
| 8ac0a00311 | |||
| 4863091406 | |||
| 70c28e0b54 | |||
| 9197819340 | |||
| f2500c33e6 | |||
| 18dc831b05 | |||
| 48b46035fd | |||
| 1b9c7faf22 | |||
| fcdba49430 | |||
| 7599c99025 | |||
| 8fd1948d7c | |||
| 2c1047a014 | |||
| 8c54e5c176 | |||
| d3faa87d4f | |||
| 8d8d9d7a9f | |||
| 9c3d6c01f7 | |||
| 02b9a97ee8 | |||
| 438fb3ec9b | |||
| 3105b60759 | |||
| 5dddafbe0c | |||
| c2872c7748 | |||
| 7128eb007e | |||
| 815ee57e06 | |||
| ab52256c05 | |||
| 3ee3cf8425 | |||
| 14248b29e7 | |||
| a4c3fc5185 | |||
| d36703fd84 | |||
| 496f9c562d | |||
| 72778d6996 | |||
| 85834f2221 | |||
| 74e6485930 | |||
| 8cb8632a51 | |||
| fad797eea4 | |||
| 670cedda59 | |||
| 78fe7f013b | |||
| 48fa13615e | |||
| 1f9d6861a0 | |||
| 4b04fb64ec | |||
| df0c689243 | |||
| 827f701186 | |||
| ea74203667 | |||
| 9421df2b9b | |||
| 2a3bf9852d | |||
| 21ac562fd5 | |||
| aac8c91cd0 | |||
| b8fc314bff | |||
| a2bb1b2442 | |||
| 4e2e5c0b1d | |||
| 6b65dbdc75 | |||
| 82f08c4240 | |||
| 8bdccfdce6 | |||
| 3aca011104 | |||
| 3f6737706a | |||
| 3724528ea9 | |||
| 079a899822 | |||
| 58a73de9ab | |||
| f4a890bec8 | |||
| 783609b015 | |||
| 92232e13f1 |
@@ -1,7 +1,12 @@
|
||||
# 1단계에서 만든 로컬 베이스 이미지를 사용
|
||||
FROM 127.0.0.1:18082/kamco-cd/base-java21-gdal:1.0
|
||||
# Stage 1: Build stage (gradle build는 Jenkins에서 이미 수행)
|
||||
FROM eclipse-temurin:21-jre-jammy
|
||||
|
||||
# GDAL 설치
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gdal-bin \
|
||||
libgdal-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# 사용자 설정 (앱 별로 다를 수 있으므로 여기에 유지)
|
||||
ARG UID=1000
|
||||
ARG GID=1000
|
||||
|
||||
@@ -18,6 +23,7 @@ COPY build/libs/ROOT.jar app.jar
|
||||
|
||||
# 포트 노출
|
||||
EXPOSE 8080
|
||||
|
||||
# 애플리케이션 실행
|
||||
# dev 프로파일로 실행
|
||||
ENTRYPOINT ["java", "-jar", "-Dspring.profiles.active=prod", "app.jar"]
|
||||
|
||||
23
Dockerfile-prod_bak
Normal file
23
Dockerfile-prod_bak
Normal file
@@ -0,0 +1,23 @@
|
||||
# Stage 1: Build stage (gradle build는 Jenkins에서 이미 수행)
|
||||
FROM kamco-java-gdal:21
|
||||
|
||||
ARG UID=1000
|
||||
ARG GID=1000
|
||||
|
||||
RUN groupadd -g ${GID} kcomu \
|
||||
&& useradd -u ${UID} -g ${GID} -m kcomu
|
||||
|
||||
USER kcomu
|
||||
|
||||
# 작업 디렉토리 설정
|
||||
WORKDIR /app
|
||||
|
||||
# JAR 파일 복사 (Jenkins에서 빌드된 ROOT.jar)
|
||||
COPY build/libs/ROOT.jar app.jar
|
||||
|
||||
# 포트 노출
|
||||
EXPOSE 8080
|
||||
|
||||
# 애플리케이션 실행
|
||||
# dev 프로파일로 실행
|
||||
ENTRYPOINT ["java", "-jar", "-Dspring.profiles.active=prod", "app.jar"]
|
||||
@@ -15,7 +15,7 @@ services:
|
||||
- SPRING_PROFILES_ACTIVE=dev
|
||||
- TZ=Asia/Seoul
|
||||
volumes:
|
||||
- /kamco-nfs:/kamco-nfs
|
||||
- /data:/kamco-nfs
|
||||
networks:
|
||||
- kamco-cds
|
||||
restart: unless-stopped
|
||||
|
||||
@@ -1,18 +1,31 @@
|
||||
services:
|
||||
kamco-changedetection-api:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile-prod
|
||||
args:
|
||||
UID: 1000 # manager01 UID
|
||||
GID: 1000 # manager01 GID
|
||||
image: kamco-changedetection-api:${IMAGE_TAG:-latest}
|
||||
container_name: kamco-changedetection-api
|
||||
user: "1000:1000"
|
||||
nginx:
|
||||
image: nginx:alpine
|
||||
container_name: kamco-cd-api-nginx
|
||||
ports:
|
||||
- "7100:8080"
|
||||
- "12013:443"
|
||||
volumes:
|
||||
- ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
|
||||
- ./nginx/conf.d:/etc/nginx/conf.d:ro
|
||||
- /etc/ssl/certs/globalsign:/etc/ssl/certs/globalsign:ro
|
||||
networks:
|
||||
- kamco-cds
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- kamco-cd-api
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost/health"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 10s
|
||||
|
||||
kamco-cd-api:
|
||||
image: kamco-api-app:260219
|
||||
container_name: kamco-cd-api
|
||||
user: "1000:1000"
|
||||
environment:
|
||||
- SPRING_PROFILES_ACTIVE=dev
|
||||
- SPRING_PROFILES_ACTIVE=prod
|
||||
- TZ=Asia/Seoul
|
||||
volumes:
|
||||
- /data:/kamco-nfs
|
||||
|
||||
122
nginx/README.md
Normal file
122
nginx/README.md
Normal file
@@ -0,0 +1,122 @@
|
||||
# Nginx HTTPS Configuration for KAMCO Change Detection API
|
||||
|
||||
## SSL Certificate Setup
|
||||
|
||||
### Required Files
|
||||
GlobalSign SSL 인증서 파일들을 서버의 `/etc/ssl/certs/globalsign/` 디렉토리에 배치해야 합니다:
|
||||
|
||||
```
|
||||
/etc/ssl/certs/globalsign/
|
||||
├── certificate.crt # SSL 인증서 파일
|
||||
├── private.key # 개인 키 파일
|
||||
└── ca-bundle.crt # CA 번들 파일 (중간 인증서)
|
||||
```
|
||||
|
||||
### Certificate Installation Steps
|
||||
|
||||
1. **디렉토리 생성**
|
||||
```bash
|
||||
sudo mkdir -p /etc/ssl/certs/globalsign
|
||||
sudo chmod 755 /etc/ssl/certs/globalsign
|
||||
```
|
||||
|
||||
2. **인증서 파일 복사**
|
||||
```bash
|
||||
sudo cp your-certificate.crt /etc/ssl/certs/globalsign/certificate.crt
|
||||
sudo cp your-private.key /etc/ssl/certs/globalsign/private.key
|
||||
sudo cp ca-bundle.crt /etc/ssl/certs/globalsign/ca-bundle.crt
|
||||
```
|
||||
|
||||
3. **파일 권한 설정**
|
||||
```bash
|
||||
sudo chmod 644 /etc/ssl/certs/globalsign/certificate.crt
|
||||
sudo chmod 600 /etc/ssl/certs/globalsign/private.key
|
||||
sudo chmod 644 /etc/ssl/certs/globalsign/ca-bundle.crt
|
||||
```
|
||||
|
||||
## Configuration Overview
|
||||
|
||||
### Service Architecture
|
||||
```
|
||||
Internet (HTTPS:12013)
|
||||
↓
|
||||
nginx (443 in container)
|
||||
↓
|
||||
kamco-changedetection-api (8080 in container)
|
||||
```
|
||||
|
||||
### Key Features
|
||||
- **HTTPS/TLS**: TLSv1.2, TLSv1.3 지원
|
||||
- **Port**: 외부 12013 → 내부 443 (nginx)
|
||||
- **Domain**: aicd-api.e-kamco.com:12013
|
||||
- **Reverse Proxy**: kamco-changedetection-api:8080으로 프록시
|
||||
- **Security Headers**: HSTS, X-Frame-Options, X-Content-Type-Options 등
|
||||
- **Health Check**: /health 엔드포인트
|
||||
|
||||
## Deployment
|
||||
|
||||
### Start Services
|
||||
```bash
|
||||
docker-compose -f docker-compose-prod.yml up -d
|
||||
```
|
||||
|
||||
### Check Logs
|
||||
```bash
|
||||
# Nginx logs
|
||||
docker logs kamco-cd-nginx
|
||||
|
||||
# API logs
|
||||
docker logs kamco-changedetection-api
|
||||
```
|
||||
|
||||
### Verify Configuration
|
||||
```bash
|
||||
# Test nginx configuration
|
||||
docker exec kamco-cd-nginx nginx -t
|
||||
|
||||
# Check SSL certificate
|
||||
docker exec kamco-cd-nginx openssl s_client -connect localhost:443 -servername aicd-api.e-kamco.com
|
||||
```
|
||||
|
||||
### Access Service
|
||||
```bash
|
||||
# HTTPS Access
|
||||
curl -k https://aicd-api.e-kamco.com:12013/monitor/health
|
||||
|
||||
# Health Check
|
||||
curl -k https://aicd-api.e-kamco.com:12013/health
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Certificate Issues
|
||||
인증서 파일이 제대로 마운트되었는지 확인:
|
||||
```bash
|
||||
docker exec kamco-cd-nginx ls -la /etc/ssl/certs/globalsign/
|
||||
```
|
||||
|
||||
### Nginx Configuration Test
|
||||
```bash
|
||||
docker exec kamco-cd-nginx nginx -t
|
||||
```
|
||||
|
||||
### Connection Test
|
||||
```bash
|
||||
# Check if nginx is listening
|
||||
docker exec kamco-cd-nginx netstat -tlnp | grep 443
|
||||
|
||||
# Check backend connection
|
||||
docker exec kamco-cd-nginx wget --spider http://kamco-changedetection-api:8080/monitor/health
|
||||
```
|
||||
|
||||
## Configuration Files
|
||||
|
||||
- `nginx/nginx.conf`: Main nginx configuration
|
||||
- `nginx/conf.d/default.conf`: Server block with SSL and proxy settings
|
||||
- `docker-compose-prod.yml`: Docker compose with nginx service
|
||||
|
||||
## Notes
|
||||
|
||||
- 인증서 파일명이 다를 경우 `nginx/conf.d/default.conf`에서 경로를 수정하세요
|
||||
- 인증서 갱신 시 nginx 컨테이너를 재시작하세요: `docker restart kamco-cd-nginx`
|
||||
- 포트 12013이 방화벽에서 허용되어 있는지 확인하세요
|
||||
60
nginx/conf.d/default.conf
Normal file
60
nginx/conf.d/default.conf
Normal file
@@ -0,0 +1,60 @@
|
||||
upstream kamco_api {
|
||||
server kamco-cd-api:8080;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl http2;
|
||||
server_name aicd-api.e-kamco.com;
|
||||
|
||||
# GlobalSign SSL Certificate
|
||||
ssl_certificate /etc/ssl/certs/globalsign/certificate.crt;
|
||||
ssl_certificate_key /etc/ssl/certs/globalsign/private.key;
|
||||
|
||||
# SSL Configuration
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers HIGH:!aNULL:!MD5;
|
||||
ssl_prefer_server_ciphers on;
|
||||
ssl_session_cache shared:SSL:10m;
|
||||
ssl_session_timeout 10m;
|
||||
|
||||
# Security Headers
|
||||
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
|
||||
# Client Body Size
|
||||
client_max_body_size 100M;
|
||||
|
||||
# Proxy Settings
|
||||
location / {
|
||||
proxy_pass http://kamco_api;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
|
||||
# Timeouts
|
||||
proxy_connect_timeout 60s;
|
||||
proxy_send_timeout 60s;
|
||||
proxy_read_timeout 60s;
|
||||
|
||||
# WebSocket Support (if needed)
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
}
|
||||
|
||||
# Health Check Endpoint
|
||||
location /health {
|
||||
access_log off;
|
||||
return 200 "OK";
|
||||
add_header Content-Type text/plain;
|
||||
}
|
||||
|
||||
# Access and Error Logs
|
||||
access_log /var/log/nginx/kamco-api-access.log;
|
||||
error_log /var/log/nginx/kamco-api-error.log;
|
||||
}
|
||||
33
nginx/nginx.conf
Normal file
33
nginx/nginx.conf
Normal file
@@ -0,0 +1,33 @@
|
||||
user nginx;
|
||||
worker_processes auto;
|
||||
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /var/log/nginx/access.log main;
|
||||
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
keepalive_timeout 65;
|
||||
types_hash_max_size 2048;
|
||||
|
||||
gzip on;
|
||||
gzip_vary on;
|
||||
gzip_min_length 1024;
|
||||
gzip_types text/plain text/css text/xml text/javascript application/x-javascript application/xml+rss application/json;
|
||||
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
}
|
||||
@@ -279,18 +279,28 @@ public class FIleChecker {
|
||||
return true;
|
||||
}
|
||||
|
||||
public static List<Folder> getFolderAll(String dirPath, String sortType, int maxDepth) {
|
||||
// kamco-nfs를 확인하는곳이 있어서 파라미터 추가 사용용도확인후 처리
|
||||
public static List<Folder> getFolderAll(
|
||||
String dirPath, String sortType, int maxDepth, String nfsRootDir) {
|
||||
|
||||
Path startPath = Paths.get(dirPath);
|
||||
|
||||
List<Folder> folderList = List.of();
|
||||
|
||||
try (Stream<Path> stream = Files.walk(startPath, maxDepth)) {
|
||||
log.info("[FIND_FOLDER] DIR : {} {} {} {}", dirPath, sortType, maxDepth, startPath);
|
||||
|
||||
int childDirCount = getChildFolderCount(startPath.toFile());
|
||||
log.info("[FIND_FOLDER] START_PATH_CHILD_DIR_COUNT : {}", childDirCount);
|
||||
|
||||
try (Stream<Path> stream = Files.walk(startPath, maxDepth)) {
|
||||
folderList =
|
||||
stream
|
||||
.filter(Files::isDirectory)
|
||||
.filter(p -> !p.toString().equals(dirPath))
|
||||
.filter(
|
||||
p ->
|
||||
!p.toAbsolutePath()
|
||||
.normalize()
|
||||
.equals(startPath.toAbsolutePath().normalize()))
|
||||
.map(
|
||||
path -> {
|
||||
int depth = path.getNameCount();
|
||||
@@ -300,13 +310,12 @@ public class FIleChecker {
|
||||
String parentPath = path.getParent().toString();
|
||||
String fullPath = path.toAbsolutePath().toString();
|
||||
|
||||
// 이것이 필요한건가?
|
||||
// boolean isShowHide =
|
||||
// !parentFolderNm.equals("kamco-nfs"); // 폴더 리스트에
|
||||
// kamco-nfs 하위만 나오도록 처리
|
||||
boolean isShowHide =
|
||||
!parentFolderNm.equals("kamco-nfs"); // 폴더 리스트에 kamco-nfs 하위만 나오도록 처리
|
||||
// boolean isValid =
|
||||
// !NameValidator.containsKorean(folderNm)
|
||||
// && !NameValidator.containsWhitespaceRegex(folderNm)
|
||||
// && !parentFolderNm.equals("kamco-nfs");
|
||||
|
||||
!parentFolderNm.equals(nfsRootDir); // 폴더 리스트에 nfsRootDir 하위만 나오도록 처리
|
||||
File file = new File(fullPath);
|
||||
int childCnt = getChildFolderCount(file);
|
||||
String lastModified = getLastModified(file);
|
||||
@@ -354,24 +363,8 @@ public class FIleChecker {
|
||||
return folderList;
|
||||
}
|
||||
|
||||
public static List<Folder> getFolderAll(String dirPath) {
|
||||
return getFolderAll(dirPath, "name", 1);
|
||||
}
|
||||
|
||||
public static List<Folder> getFolderAll(String dirPath, String sortType) {
|
||||
return getFolderAll(dirPath, sortType, 1);
|
||||
}
|
||||
|
||||
public static int getChildFolderCount(String dirPath) {
|
||||
File directory = new File(dirPath);
|
||||
File[] childFolders = directory.listFiles(File::isDirectory);
|
||||
|
||||
int childCnt = 0;
|
||||
if (childFolders != null) {
|
||||
childCnt = childFolders.length;
|
||||
}
|
||||
|
||||
return childCnt;
|
||||
public static List<Folder> getFolderAll(String dirPath, String nfsRootDir) {
|
||||
return getFolderAll(dirPath, "name", 1, nfsRootDir);
|
||||
}
|
||||
|
||||
public static int getChildFolderCount(File directory) {
|
||||
@@ -385,11 +378,6 @@ public class FIleChecker {
|
||||
return childCnt;
|
||||
}
|
||||
|
||||
public static String getLastModified(String dirPath) {
|
||||
File file = new File(dirPath);
|
||||
return dttmFormat.format(new Date(file.lastModified()));
|
||||
}
|
||||
|
||||
public static String getLastModified(File file) {
|
||||
return dttmFormat.format(new Date(file.lastModified()));
|
||||
}
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
package com.kamco.cd.kamcoback.config;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@Component
|
||||
@ConfigurationProperties(prefix = "file")
|
||||
public class FileProperties {
|
||||
|
||||
private String root;
|
||||
private String nfs;
|
||||
private String syncRootDir;
|
||||
private String syncTmpDir;
|
||||
private String syncFileExtention;
|
||||
private String datasetDir;
|
||||
private String datasetTmpDir;
|
||||
private String modelDir;
|
||||
private String modelTmpDir;
|
||||
private String modelFileExtention;
|
||||
private String ptPath;
|
||||
private String datasetResponse;
|
||||
private TrainingData trainingData;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
public static class TrainingData {
|
||||
private String geojsonDir;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package com.kamco.cd.kamcoback.config;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@Component
|
||||
@ConfigurationProperties(prefix = "inference")
|
||||
public class InferenceProperties {
|
||||
|
||||
private String nfs;
|
||||
private String url;
|
||||
private String batchUrl;
|
||||
private String geojsonDir;
|
||||
private String jarPath;
|
||||
private String inferenceServerName;
|
||||
}
|
||||
@@ -24,7 +24,7 @@ public class OpenApiConfig {
|
||||
@Value("${swagger.dev-url:https://kamco.dev-api.gs.dabeeo.com}")
|
||||
private String devUrl;
|
||||
|
||||
@Value("${swagger.prod-url:https://api.kamco.com}")
|
||||
@Value("${swagger.prod-url:https://aicd-api.e-kamco.com:12013}")
|
||||
private String prodUrl;
|
||||
|
||||
@Bean
|
||||
@@ -51,9 +51,9 @@ public class OpenApiConfig {
|
||||
servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버"));
|
||||
// servers.add(new Server().url(prodUrl).description("운영 서버"));
|
||||
} else if ("prod".equals(profile)) {
|
||||
// servers.add(new Server().url(prodUrl).description("운영 서버"));
|
||||
servers.add(new Server().url(prodUrl).description("운영 서버"));
|
||||
servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버"));
|
||||
servers.add(new Server().url(devUrl).description("개발 서버"));
|
||||
|
||||
} else {
|
||||
servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버"));
|
||||
servers.add(new Server().url(devUrl).description("개발 서버"));
|
||||
|
||||
@@ -105,7 +105,8 @@ public class SecurityConfig {
|
||||
"/api/layer/map/**",
|
||||
"/api/layer/tile-url",
|
||||
"/api/layer/tile-url-year",
|
||||
"/api/common-code/clazz")
|
||||
"/api/common-code/clazz",
|
||||
"/api/inference/download/**")
|
||||
.permitAll()
|
||||
// 로그인한 사용자만 가능 IAM
|
||||
.requestMatchers(
|
||||
|
||||
@@ -16,6 +16,8 @@ public class StartupLogger {
|
||||
|
||||
private final Environment environment;
|
||||
private final DataSource dataSource;
|
||||
private final FileProperties fileProperties;
|
||||
private final InferenceProperties inferenceProperties;
|
||||
|
||||
@EventListener(ApplicationReadyEvent.class)
|
||||
public void logStartupInfo() {
|
||||
@@ -79,6 +81,25 @@ public class StartupLogger {
|
||||
│ DDL Auto : %s
|
||||
│ JDBC Batch Size : %s
|
||||
│ Fetch Batch Size : %s
|
||||
╠════════════════════════════════════════════════════════════════════════════════╣
|
||||
║ FILE CONFIGURATION ║
|
||||
╠────────────────────────────────────────────────────────────────────────────────╣
|
||||
│ Root Directory : %s
|
||||
│ NFS Mount Path : %s
|
||||
│ Sync Root Dir : %s
|
||||
│ Sync Tmp Dir : %s
|
||||
│ Dataset Dir : %s
|
||||
│ Model Dir : %s
|
||||
│ PT Path : %s
|
||||
╠════════════════════════════════════════════════════════════════════════════════╣
|
||||
║ INFERENCE CONFIGURATION ║
|
||||
╠────────────────────────────────────────────────────────────────────────────────╣
|
||||
│ NFS Mount Path : %s
|
||||
│ Inference URL : %s
|
||||
│ Batch URL : %s
|
||||
│ GeoJSON Dir : %s
|
||||
│ JAR Path : %s
|
||||
│ Server Names : %s
|
||||
╚════════════════════════════════════════════════════════════════════════════════╝
|
||||
""",
|
||||
profileInfo,
|
||||
@@ -89,7 +110,24 @@ public class StartupLogger {
|
||||
showSql,
|
||||
ddlAuto,
|
||||
batchSize,
|
||||
batchFetchSize);
|
||||
batchFetchSize,
|
||||
fileProperties.getRoot() != null ? fileProperties.getRoot() : "N/A",
|
||||
fileProperties.getNfs() != null ? fileProperties.getNfs() : "N/A",
|
||||
fileProperties.getSyncRootDir() != null ? fileProperties.getSyncRootDir() : "N/A",
|
||||
fileProperties.getSyncTmpDir() != null ? fileProperties.getSyncTmpDir() : "N/A",
|
||||
fileProperties.getDatasetDir() != null ? fileProperties.getDatasetDir() : "N/A",
|
||||
fileProperties.getModelDir() != null ? fileProperties.getModelDir() : "N/A",
|
||||
fileProperties.getPtPath() != null ? fileProperties.getPtPath() : "N/A",
|
||||
inferenceProperties.getNfs() != null ? inferenceProperties.getNfs() : "N/A",
|
||||
inferenceProperties.getUrl() != null ? inferenceProperties.getUrl() : "N/A",
|
||||
inferenceProperties.getBatchUrl() != null ? inferenceProperties.getBatchUrl() : "N/A",
|
||||
inferenceProperties.getGeojsonDir() != null
|
||||
? inferenceProperties.getGeojsonDir()
|
||||
: "N/A",
|
||||
inferenceProperties.getJarPath() != null ? inferenceProperties.getJarPath() : "N/A",
|
||||
inferenceProperties.getInferenceServerName() != null
|
||||
? inferenceProperties.getInferenceServerName()
|
||||
: "N/A");
|
||||
|
||||
log.info(startupMessage);
|
||||
}
|
||||
|
||||
@@ -83,7 +83,7 @@ public class ChngDetectMastDto {
|
||||
|
||||
@Schema(
|
||||
description = "탐지결과 절대경로명 /kamco_nas/export/{chnDtctId}",
|
||||
example = "/kamco-nfs/dataset/export/D5F192EC76D34F6592035BE63A84F591")
|
||||
example = "{file.nfs}/dataset/export/D5F192EC76D34F6592035BE63A84F591")
|
||||
private String pathNm;
|
||||
|
||||
@Schema(description = "사원번호", example = "123456")
|
||||
|
||||
@@ -66,8 +66,11 @@ public class GukYuinApiService {
|
||||
@Value("${gukyuin.cdi}")
|
||||
private String gukyuinCdiUrl;
|
||||
|
||||
@Value("${file.dataset-dir}")
|
||||
private String datasetDir;
|
||||
@Value("${file.nfs}")
|
||||
private String nfs;
|
||||
|
||||
// @Value("${file.dataset-dir}")
|
||||
// private String datasetDir;
|
||||
|
||||
@Transactional
|
||||
public ChngDetectMastDto.RegistResDto regist(
|
||||
@@ -453,7 +456,10 @@ public class GukYuinApiService {
|
||||
return new ResponseObj(ApiResponseCode.DUPLICATE_DATA, "이미 국유인 연동을 한 회차입니다.");
|
||||
}
|
||||
|
||||
if (!Files.isDirectory(Path.of("/kamco-nfs/dataset/export/" + info.getUid()))) {
|
||||
// String kamconfsDatasetExportPathfsDatasetExportPath = "/kamco-nfs/dataset/export/";
|
||||
String kamconfsDatasetExportPathfsDatasetExportPath =
|
||||
String.format("%s%s", nfs, "/dataset/export/");
|
||||
if (!Files.isDirectory(Path.of(kamconfsDatasetExportPathfsDatasetExportPath + info.getUid()))) {
|
||||
return new ResponseObj(
|
||||
ApiResponseCode.NOT_FOUND_DATA, "파일 경로에 회차 실행 파일이 생성되지 않았습니다. 확인 부탁드립니다.");
|
||||
}
|
||||
@@ -468,7 +474,7 @@ public class GukYuinApiService {
|
||||
reqDto.setCrtrYr(String.valueOf(info.getTargetYyyy()));
|
||||
reqDto.setChnDtctSno(String.valueOf(maxStage + 1));
|
||||
reqDto.setChnDtctId(info.getUid());
|
||||
reqDto.setPathNm("/kamco-nfs/dataset/export/" + info.getUid());
|
||||
reqDto.setPathNm(kamconfsDatasetExportPathfsDatasetExportPath + info.getUid());
|
||||
|
||||
// 1회차를 종료 상태로 처리하고 2회차를 보내야 함
|
||||
// 추론(learn), 학습데이터(inference) 둘 다 종료 처리
|
||||
|
||||
@@ -151,7 +151,7 @@ public class InferenceResultApiController {
|
||||
@RequestBody
|
||||
@Valid
|
||||
InferenceResultDto.RegReq req) {
|
||||
UUID uuid = inferenceResultService.saveInferenceInfo(req);
|
||||
UUID uuid = inferenceResultService.run(req);
|
||||
return ApiResponseDto.ok(uuid);
|
||||
}
|
||||
|
||||
|
||||
@@ -246,15 +246,15 @@ public class InferenceResultDto {
|
||||
@NotBlank
|
||||
private String title;
|
||||
|
||||
@Schema(description = "G1", example = "b40e0f68-c1d8-49fc-93f9-a36270093861")
|
||||
@Schema(description = "G1", example = "643adead-f3d2-4f10-9037-862bee919399")
|
||||
@NotNull
|
||||
private UUID model1Uuid;
|
||||
|
||||
@Schema(description = "G2", example = "ec92b7d2-b5a3-4915-9bdf-35fb3ca8ad27")
|
||||
@Schema(description = "G2", example = "dd86b4ef-28e3-4e3d-9ee4-f60d9cb54e13")
|
||||
@NotNull
|
||||
private UUID model2Uuid;
|
||||
|
||||
@Schema(description = "G3", example = "37f45782-8ccf-4cf6-911c-a055a1510d39")
|
||||
@Schema(description = "G3", example = "58c1153e-dec6-4424-82a1-189083a9d9dc")
|
||||
@NotNull
|
||||
private UUID model3Uuid;
|
||||
|
||||
@@ -676,4 +676,13 @@ public class InferenceResultDto {
|
||||
private Long m2ModelBatchId;
|
||||
private Long m3ModelBatchId;
|
||||
}
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class MapSheetFallbackYearDto {
|
||||
private String mapSheetNum;
|
||||
private Integer mngYyyy;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
package com.kamco.cd.kamcoback.inference.dto;
|
||||
|
||||
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultsTestingEntity;
|
||||
import java.time.ZonedDateTime;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.Setter;
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
|
||||
public class InferenceResultsTestingDto {
|
||||
|
||||
@@ -22,4 +24,31 @@ public class InferenceResultsTestingDto {
|
||||
return new ShpDto(e.getBatchId(), e.getUid(), e.getMapId());
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@AllArgsConstructor
|
||||
@NoArgsConstructor
|
||||
public static class Basic {
|
||||
private Double probability;
|
||||
private Long beforeYear;
|
||||
private Long afterYear;
|
||||
private String mapId;
|
||||
private String modelVersion;
|
||||
private String clsModelPath;
|
||||
private String clsModelVersion;
|
||||
private String cdModelType;
|
||||
private Long id;
|
||||
private String modelName;
|
||||
private Long batchId;
|
||||
private Double area;
|
||||
private String beforeC;
|
||||
private Double beforeP;
|
||||
private String afterC;
|
||||
private Double afterP;
|
||||
private Long seq;
|
||||
private ZonedDateTime createdDate;
|
||||
private String uid;
|
||||
private Geometry geometry;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,8 +5,10 @@ import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.Setter;
|
||||
import lombok.ToString;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
/** AI API 추론 실행 DTO */
|
||||
@Slf4j
|
||||
@Getter
|
||||
@Setter
|
||||
@NoArgsConstructor
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
package com.kamco.cd.kamcoback.inference.service;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
@RestController
|
||||
@RequiredArgsConstructor
|
||||
@RequestMapping("/api/inference/manual")
|
||||
public class InferenceManualApiController {}
|
||||
@@ -0,0 +1,24 @@
|
||||
package com.kamco.cd.kamcoback.inference.service;
|
||||
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
|
||||
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
|
||||
import java.util.List;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class InferenceManualService {
|
||||
private final InferenceResultCoreService inferenceResultCoreService;
|
||||
|
||||
public void getResultsTesting(List<Long> batchIds) {
|
||||
List<InferenceResultsTestingDto.Basic> resultList =
|
||||
inferenceResultCoreService.getInferenceResults(batchIds);
|
||||
|
||||
if (resultList.isEmpty()) {}
|
||||
|
||||
for (InferenceResultsTestingDto.Basic result : resultList) {
|
||||
System.out.println(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -21,6 +21,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.DetectOption;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceLearnDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetFallbackYearDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetNumDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
|
||||
@@ -45,6 +46,7 @@ import java.time.ZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
@@ -63,6 +65,7 @@ import org.springframework.http.MediaType;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
/** 추론 관리 */
|
||||
@Service
|
||||
@Log4j2
|
||||
@RequiredArgsConstructor
|
||||
@@ -116,6 +119,273 @@ public class InferenceResultService {
|
||||
return dto.getUuid();
|
||||
}
|
||||
|
||||
/**
|
||||
* 추론 실행 - 추론제외, 이전년도 도엽 사용 분기
|
||||
*
|
||||
* @param req
|
||||
* @return
|
||||
*/
|
||||
@Transactional
|
||||
public UUID run(InferenceResultDto.RegReq req) {
|
||||
if (req.getDetectOption().equals(DetectOption.EXCL.getId())) {
|
||||
return runExcl(req);
|
||||
}
|
||||
return runPrev(req);
|
||||
}
|
||||
|
||||
/**
|
||||
* 변화탐지 옵션 추론제외 실행
|
||||
*
|
||||
* @param req
|
||||
* @return
|
||||
*/
|
||||
public UUID runExcl(InferenceResultDto.RegReq req) {
|
||||
// target 도엽 조회
|
||||
List<MngListDto> targetDtoList = mapSheetMngCoreService.getHstMapSheetList(req);
|
||||
|
||||
// target 리스트 추출 (null 제거 + 중복 제거)
|
||||
List<String> targetList =
|
||||
targetDtoList.stream()
|
||||
.map(MngListDto::getMapSheetNum)
|
||||
.filter(Objects::nonNull)
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
// compare 도엽번호 리스트 조회 (null 제거 + 중복 제거)
|
||||
List<String> compareList =
|
||||
mapSheetMngCoreService.getMapSheetNumByHst(req.getCompareYyyy()).stream()
|
||||
.filter(Objects::nonNull)
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
// compare 기준
|
||||
Set<String> compareSet = new HashSet<>(compareList);
|
||||
|
||||
// target 기준으로 compare에 존재하는 도엽만 필터링
|
||||
List<String> filteredTargetList = targetList.stream().filter(compareSet::contains).toList();
|
||||
|
||||
// 도엽 비교 로그 출력
|
||||
logYearComparison(targetList, compareList, filteredTargetList);
|
||||
|
||||
// compare geojson 파일 생성
|
||||
Scene compareScene =
|
||||
getSceneInference(
|
||||
req.getCompareYyyy().toString(), // 기준년도
|
||||
filteredTargetList, // 교집합 도엽
|
||||
req.getMapSheetScope(), // ALL / 부분
|
||||
req.getDetectOption()); // EXCL / PREV
|
||||
|
||||
// target geojson 파일 생성
|
||||
Scene targetScene =
|
||||
getSceneInference(
|
||||
req.getTargetYyyy().toString(), // 대상년도
|
||||
filteredTargetList, // 교집합 도엽
|
||||
req.getMapSheetScope(),
|
||||
req.getDetectOption());
|
||||
|
||||
// 추론 실행
|
||||
return executeInference(
|
||||
req,
|
||||
targetDtoList, // 전체 target 목록
|
||||
filteredTargetList, // 최종 추론 대상
|
||||
compareScene, // compare geojson
|
||||
targetScene // target geojson
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* 변화탐지 옵션 이전 년도 도엽 사용 실행
|
||||
*
|
||||
* @param req
|
||||
* @return
|
||||
*/
|
||||
@Transactional
|
||||
public UUID runPrev(InferenceResultDto.RegReq req) {
|
||||
// target 목록 조회
|
||||
List<MngListDto> targetDtoList = mapSheetMngCoreService.getHstMapSheetList(req);
|
||||
|
||||
// target 도엽번호 리스트 추출 중복 제거
|
||||
List<String> targetList =
|
||||
targetDtoList.stream()
|
||||
.map(MngListDto::getMapSheetNum)
|
||||
.filter(Objects::nonNull)
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
// compare 목록 조회
|
||||
List<MapSheetFallbackYearDto> compareDtoList =
|
||||
new ArrayList<>(mapSheetMngCoreService.getMapSheetNumDtoByHst(req.getCompareYyyy()));
|
||||
|
||||
// compare 도엽번호 Set 구성
|
||||
Set<String> compareSet =
|
||||
compareDtoList.stream()
|
||||
.map(MapSheetFallbackYearDto::getMapSheetNum)
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
// target에는 있으나 compare에는 없는 도엽 추출
|
||||
List<String> remainingTargetList =
|
||||
targetList.stream().filter(s -> !compareSet.contains(s)).toList();
|
||||
|
||||
// compare에 없을때 이전 년도 사용 가능여부 조회
|
||||
List<MapSheetFallbackYearDto> fallbackYearDtoList =
|
||||
mapSheetMngCoreService.findFallbackCompareYearByMapSheets(
|
||||
req.getTargetYyyy(), // 대상년도 기준
|
||||
remainingTargetList // compare에 없는 도엽들
|
||||
);
|
||||
|
||||
// 기존 compare , 사용가능 이전년도 정보 합치기
|
||||
compareDtoList.addAll(fallbackYearDtoList);
|
||||
|
||||
// 중복제거하여 사용할 compare 도엽 목록
|
||||
Set<String> availableCompareSheets =
|
||||
compareDtoList.stream()
|
||||
.map(MapSheetFallbackYearDto::getMapSheetNum)
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
// 최종 추론 대상 도엽
|
||||
// target 기준으로 compare 에 존재하는 도엽만 추출
|
||||
List<String> filteredTargetList =
|
||||
targetList.stream().filter(availableCompareSheets::contains).toList();
|
||||
|
||||
// compareDtoList도 최종 기준으로 필터
|
||||
Set<String> filteredTargetSet = new HashSet<>(filteredTargetList);
|
||||
|
||||
List<MapSheetFallbackYearDto> filteredCompareDtoList =
|
||||
compareDtoList.stream()
|
||||
.filter(d -> d.getMapSheetNum() != null)
|
||||
.filter(d -> filteredTargetSet.contains(d.getMapSheetNum()))
|
||||
.toList();
|
||||
|
||||
// compare only 계산 (target에는 없는 compare 도엽 수) log 용
|
||||
long compareOnlyCount =
|
||||
compareDtoList.stream()
|
||||
.map(MapSheetFallbackYearDto::getMapSheetNum)
|
||||
.filter(s -> s != null && !targetList.contains(s))
|
||||
.count();
|
||||
|
||||
// 연도별 도엽 비교 로그 출력
|
||||
log.info(
|
||||
"""
|
||||
===== MapSheet Year Comparison =====
|
||||
target Total: {}
|
||||
compare Total: {}
|
||||
Intersection: {}
|
||||
target Only (Excluded): {}
|
||||
compare Only: {}
|
||||
====================================
|
||||
""",
|
||||
targetList.size(), // target count
|
||||
compareDtoList.size(), // compare count
|
||||
filteredTargetList.size(), // target 기준으로 compare 비교하여 최종 추론할 도엽 count
|
||||
targetList.size() - filteredTargetList.size(), // compare에 존재하지 않는 target 도엽 수
|
||||
compareOnlyCount); // target 에 존재하지 않는 compare 도엽수
|
||||
|
||||
// compare 기준 geojson 생성 (년도 fallback 반영)
|
||||
Scene compareScene =
|
||||
getSceneInference(
|
||||
filteredCompareDtoList,
|
||||
req.getCompareYyyy().toString(),
|
||||
req.getMapSheetScope(),
|
||||
req.getDetectOption());
|
||||
|
||||
// target 기준 geojson 생성
|
||||
Scene targetScene =
|
||||
getSceneInference(
|
||||
req.getTargetYyyy().toString(),
|
||||
filteredTargetList,
|
||||
req.getMapSheetScope(),
|
||||
req.getDetectOption());
|
||||
|
||||
// AI 추론 실행
|
||||
return executeInference(req, targetDtoList, filteredTargetList, compareScene, targetScene);
|
||||
}
|
||||
|
||||
/**
|
||||
* learn 테이블 저장 및 AI 추론 API 호출
|
||||
*
|
||||
* @param req
|
||||
* @param targetDtoList
|
||||
* @param filteredTargetList
|
||||
* @param modelComparePath
|
||||
* @param modelTargetPath
|
||||
* @return
|
||||
*/
|
||||
private UUID executeInference(
|
||||
InferenceResultDto.RegReq req,
|
||||
List<MngListDto> targetDtoList,
|
||||
List<String> filteredTargetList,
|
||||
Scene modelComparePath,
|
||||
Scene modelTargetPath) {
|
||||
Set<String> filteredSet = new HashSet<>(filteredTargetList);
|
||||
|
||||
List<MngListDto> newTargetList =
|
||||
targetDtoList.stream()
|
||||
.filter(m -> m.getMapSheetNum() != null)
|
||||
.filter(m -> filteredSet.contains(m.getMapSheetNum()))
|
||||
.toList();
|
||||
|
||||
UUID uuid = inferenceResultCoreService.saveInferenceInfo(req, newTargetList);
|
||||
|
||||
pred_requests_areas predRequestsAreas = new pred_requests_areas();
|
||||
predRequestsAreas.setInput1_year(req.getCompareYyyy());
|
||||
predRequestsAreas.setInput2_year(req.getTargetYyyy());
|
||||
predRequestsAreas.setInput1_scene_path(modelComparePath.getFilePath());
|
||||
predRequestsAreas.setInput2_scene_path(modelTargetPath.getFilePath());
|
||||
|
||||
InferenceSendDto m1 = this.getModelInfo(req.getModel1Uuid());
|
||||
m1.setPred_requests_areas(predRequestsAreas);
|
||||
|
||||
log.info("[INFERENCE] Start m1 = {}", m1);
|
||||
|
||||
Long batchId = ensureAccepted(m1);
|
||||
|
||||
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
|
||||
saveInferenceAiDto.setUuid(uuid);
|
||||
saveInferenceAiDto.setBatchId(batchId);
|
||||
saveInferenceAiDto.setStatus(Status.IN_PROGRESS.getId());
|
||||
saveInferenceAiDto.setType(ModelType.G1.getId());
|
||||
saveInferenceAiDto.setInferStartDttm(ZonedDateTime.now());
|
||||
saveInferenceAiDto.setModelComparePath(modelComparePath.getFilePath());
|
||||
saveInferenceAiDto.setModelTargetPath(modelTargetPath.getFilePath());
|
||||
saveInferenceAiDto.setModelStartDttm(ZonedDateTime.now());
|
||||
|
||||
inferenceResultCoreService.update(saveInferenceAiDto);
|
||||
|
||||
return uuid;
|
||||
}
|
||||
|
||||
/**
|
||||
* EXCL 로그
|
||||
*
|
||||
* @param targetList
|
||||
* @param compareList
|
||||
* @param filteredTargetList
|
||||
*/
|
||||
private void logYearComparison(
|
||||
List<String> targetList, List<String> compareList, List<String> filteredTargetList) {
|
||||
Set<String> targetSet = new HashSet<>(targetList);
|
||||
|
||||
long compareOnlyCount = compareList.stream().filter(s -> !targetSet.contains(s)).count();
|
||||
|
||||
log.info(
|
||||
"""
|
||||
===== MapSheet Year Comparison =====
|
||||
target Total: {}
|
||||
compare Total: {}
|
||||
Intersection: {}
|
||||
target Only (Excluded): {}
|
||||
compare Only: {}
|
||||
====================================
|
||||
""",
|
||||
targetList.size(), // target count
|
||||
compareList.size(), // compare count
|
||||
filteredTargetList.size(), // target 기준으로 compare 비교하여 최종 추론할 도엽 count
|
||||
targetList.size() - filteredTargetList.size(), // compare에 존재하지 않는 target 도엽 수
|
||||
compareOnlyCount); // target 에 존재하지 않는 compare 도엽수
|
||||
}
|
||||
|
||||
/**
|
||||
* 변화탐지 실행 정보 생성
|
||||
*
|
||||
@@ -238,6 +508,8 @@ public class InferenceResultService {
|
||||
predRequestsAreas.setInput2_scene_path(modelTargetPath.getFilePath());
|
||||
|
||||
InferenceSendDto m1 = this.getModelInfo(req.getModel1Uuid());
|
||||
|
||||
log.info("[INFERENCE] Start m1 = {}", m1);
|
||||
m1.setPred_requests_areas(predRequestsAreas);
|
||||
|
||||
// ai 추론 실행 api 호출
|
||||
@@ -321,10 +593,11 @@ public class InferenceResultService {
|
||||
}
|
||||
|
||||
/**
|
||||
* 추론 AI API 호출
|
||||
* 추론 AI API 호출 batch id를 리턴
|
||||
*
|
||||
* @param dto
|
||||
*/
|
||||
// 같은함수가 왜 두개지
|
||||
private Long ensureAccepted(InferenceSendDto dto) {
|
||||
|
||||
if (dto == null) {
|
||||
@@ -332,6 +605,14 @@ public class InferenceResultService {
|
||||
throw new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
|
||||
// [중복]운영환경일때 경로수정 dean 260226
|
||||
if (profile != null && profile.equals("prod")) {
|
||||
log.info("========================================================");
|
||||
log.info("[CHANGE INFERENCE] profile = {} Inforence req", profile);
|
||||
log.info("========================================================");
|
||||
log.info("");
|
||||
}
|
||||
|
||||
// 1) 요청 로그
|
||||
try {
|
||||
log.debug("Inference request dto={}", objectMapper.writeValueAsString(dto));
|
||||
@@ -340,19 +621,22 @@ public class InferenceResultService {
|
||||
}
|
||||
|
||||
// 2) local 환경 임시 처리
|
||||
if ("local".equals(profile)) {
|
||||
if (dto.getPred_requests_areas() == null) {
|
||||
throw new IllegalStateException("pred_requests_areas is null");
|
||||
}
|
||||
dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson");
|
||||
dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson");
|
||||
}
|
||||
// if ("local".equals(profile)) {
|
||||
// if (dto.getPred_requests_areas() == null) {
|
||||
// throw new IllegalStateException("pred_requests_areas is null");
|
||||
// }
|
||||
//
|
||||
// dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson");
|
||||
//
|
||||
// dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson");
|
||||
// }
|
||||
|
||||
// 3) HTTP 호출
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
headers.setContentType(MediaType.APPLICATION_JSON);
|
||||
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
|
||||
|
||||
// 추론 실행 API 호출
|
||||
ExternalCallResult<String> result =
|
||||
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
|
||||
|
||||
@@ -367,12 +651,14 @@ public class InferenceResultService {
|
||||
objectMapper.readValue(result.body(), new TypeReference<>() {});
|
||||
|
||||
if (list.isEmpty()) {
|
||||
throw new IllegalStateException("Inference response is empty");
|
||||
throw new CustomApiException(
|
||||
"NOT_FOUND", HttpStatus.NOT_FOUND, "Inference response is empty");
|
||||
}
|
||||
|
||||
Object batchIdObj = list.get(0).get("batch_id");
|
||||
if (batchIdObj == null) {
|
||||
throw new IllegalStateException("batch_id not found in response");
|
||||
throw new CustomApiException(
|
||||
"NOT_FOUND", HttpStatus.NOT_FOUND, "batch_id not found in response");
|
||||
}
|
||||
|
||||
return Long.valueOf(batchIdObj.toString());
|
||||
@@ -391,6 +677,7 @@ public class InferenceResultService {
|
||||
*/
|
||||
private InferenceSendDto getModelInfo(UUID uuid) {
|
||||
|
||||
// 모델정보 조회
|
||||
Basic modelInfo = modelMngCoreService.findByModelUuid(uuid);
|
||||
|
||||
String cdModelPath = "";
|
||||
@@ -429,7 +716,8 @@ public class InferenceResultService {
|
||||
sendDto.setCls_model_path(cdClsModelPath);
|
||||
sendDto.setCls_model_version(modelInfo.getModelVer());
|
||||
sendDto.setCd_model_type(modelType);
|
||||
sendDto.setPriority(modelInfo.getPriority());
|
||||
sendDto.setPriority(5d);
|
||||
log.info("[Inference Send]SendDto={}", sendDto);
|
||||
return sendDto;
|
||||
}
|
||||
|
||||
@@ -447,6 +735,23 @@ public class InferenceResultService {
|
||||
yyyy, mapSheetNums, mapSheetScope, detectOption);
|
||||
}
|
||||
|
||||
/**
|
||||
* 년도 별로 조회하여 geojson 파일 생성
|
||||
*
|
||||
* @param yearDtos
|
||||
* @param year
|
||||
* @param mapSheetScope
|
||||
* @param detectOption
|
||||
* @return
|
||||
*/
|
||||
private Scene getSceneInference(
|
||||
List<MapSheetFallbackYearDto> yearDtos,
|
||||
String year,
|
||||
String mapSheetScope,
|
||||
String detectOption) {
|
||||
return mapSheetMngCoreService.getSceneInference(yearDtos, year, mapSheetScope, detectOption);
|
||||
}
|
||||
|
||||
/**
|
||||
* 분석결과 요약정보
|
||||
*
|
||||
|
||||
@@ -47,6 +47,9 @@ public class MapSheetMngService {
|
||||
private final UploadService uploadService;
|
||||
private final UserUtil userUtil = new UserUtil();
|
||||
|
||||
@Value("${file.root}")
|
||||
private String nfsRootDir;
|
||||
|
||||
@Value("${file.sync-root-dir}")
|
||||
private String syncRootDir;
|
||||
|
||||
@@ -111,7 +114,6 @@ public class MapSheetMngService {
|
||||
public DmlReturn uploadPair(
|
||||
MultipartFile tfwFile, String tifFile, Long hstUid, Long tifFileSize) {
|
||||
|
||||
String rootPath = syncRootDir;
|
||||
String tmpPath = syncTmpDir;
|
||||
|
||||
DmlReturn dmlReturn = new DmlReturn("success", "UPLOAD COMPLETE");
|
||||
@@ -133,6 +135,7 @@ public class MapSheetMngService {
|
||||
return dmlReturn;
|
||||
}
|
||||
|
||||
// TODO 삭제?
|
||||
MngDto mngDto = mapSheetMngCoreService.findMapSheetMng(errDto.getMngYyyy());
|
||||
|
||||
// 중복체크 -> 도엽50k/uuid 경로에 업로드 할 거라 overwrite 되지 않음
|
||||
@@ -336,12 +339,11 @@ public class MapSheetMngService {
|
||||
|
||||
public FoldersDto getFolderAll(SrchFoldersDto srchDto) {
|
||||
|
||||
Path startPath = Paths.get(syncRootDir + srchDto.getDirPath());
|
||||
String dirPath = syncRootDir + srchDto.getDirPath();
|
||||
String sortType = "name desc";
|
||||
|
||||
log.info("[FIND_FOLDER] DIR : {}", dirPath);
|
||||
List<FIleChecker.Folder> folderList =
|
||||
FIleChecker.getFolderAll(dirPath).stream()
|
||||
FIleChecker.getFolderAll(dirPath, nfsRootDir).stream()
|
||||
.filter(dir -> dir.getIsValid().equals(true))
|
||||
.toList();
|
||||
|
||||
|
||||
@@ -369,6 +369,12 @@ public class InferenceResultCoreService {
|
||||
return mapSheetLearnRepository.getInferenceServerStatusList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 진행중 배치 조회
|
||||
*
|
||||
* @param status
|
||||
* @return
|
||||
*/
|
||||
public InferenceBatchSheet getInferenceResultByStatus(String status) {
|
||||
MapSheetLearnEntity entity =
|
||||
mapSheetLearnRepository.getInferenceResultByStatus(status).orElse(null);
|
||||
@@ -403,6 +409,12 @@ public class InferenceResultCoreService {
|
||||
return mapSheetLearnRepository.getInferenceAiResultById(id, modelUuid);
|
||||
}
|
||||
|
||||
/**
|
||||
* 추론 진행 현황 상세
|
||||
*
|
||||
* @param uuid
|
||||
* @return
|
||||
*/
|
||||
public InferenceStatusDetailDto getInferenceStatus(UUID uuid) {
|
||||
return mapSheetLearnRepository.getInferenceStatus(uuid);
|
||||
}
|
||||
@@ -498,15 +510,16 @@ public class InferenceResultCoreService {
|
||||
}
|
||||
|
||||
/**
|
||||
* 추론 결과 shp파일 생성위해서 조회
|
||||
* 추론 결과 조회
|
||||
*
|
||||
* @param batchIds
|
||||
* @return
|
||||
*/
|
||||
public List<InferenceResultsTestingDto.ShpDto> getInferenceResults(List<Long> batchIds) {
|
||||
public List<InferenceResultsTestingDto.Basic> getInferenceResults(List<Long> batchIds) {
|
||||
List<InferenceResultsTestingEntity> list =
|
||||
inferenceResultsTestingRepository.getInferenceResultList(batchIds);
|
||||
return list.stream().map(InferenceResultsTestingDto.ShpDto::fromEntity).toList();
|
||||
|
||||
return list.stream().map(InferenceResultsTestingEntity::toDto).toList();
|
||||
}
|
||||
|
||||
public Long getInferenceResultCnt(List<Long> batchIds) {
|
||||
|
||||
@@ -7,6 +7,7 @@ import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature;
|
||||
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.Scene;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.DetectOption;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetFallbackYearDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListCompareDto;
|
||||
@@ -24,13 +25,14 @@ import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
@@ -233,27 +235,52 @@ public class MapSheetMngCoreService {
|
||||
}
|
||||
|
||||
/**
|
||||
* 추론 실행에 필요한 geojson 파일 생성
|
||||
* geojson 생성
|
||||
*
|
||||
* @param yyyy 영상관리 파일별 년도
|
||||
* @param scenes 5k 도엽 번호 리스트
|
||||
* @param mapSheetScope EXCL : 추론제외, PREV 이전 년도 도엽 사용
|
||||
* @param yyyy
|
||||
* @param scenes
|
||||
* @param mapSheetScope
|
||||
* @param detectOption
|
||||
* @return
|
||||
*/
|
||||
public Scene getSceneInference(
|
||||
String yyyy, List<String> scenes, String mapSheetScope, String detectOption) {
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
boolean isAll = MapSheetScope.ALL.getId().equals(mapSheetScope);
|
||||
|
||||
String optionSuffix = "";
|
||||
if (DetectOption.EXCL.getId().equals(detectOption)) {
|
||||
optionSuffix = "_EXCL";
|
||||
} else if (DetectOption.PREV.getId().equals(detectOption)) {
|
||||
optionSuffix = "_PREV";
|
||||
List<ImageFeature> features = loadSceneInferenceBySheets(yyyy, scenes);
|
||||
return writeSceneGeoJson(yyyy, mapSheetScope, detectOption, features);
|
||||
}
|
||||
|
||||
// 1) 경로/파일명 결정
|
||||
/**
|
||||
* geojson 생성
|
||||
*
|
||||
* @param yearDtos
|
||||
* @param yyyy
|
||||
* @param mapSheetScope
|
||||
* @param detectOption
|
||||
* @return
|
||||
*/
|
||||
public Scene getSceneInference(
|
||||
List<MapSheetFallbackYearDto> yearDtos,
|
||||
String yyyy,
|
||||
String mapSheetScope,
|
||||
String detectOption) {
|
||||
List<ImageFeature> features = loadSceneInferenceByFallbackYears(yearDtos);
|
||||
return writeSceneGeoJson(yyyy, mapSheetScope, detectOption, features);
|
||||
}
|
||||
|
||||
/**
|
||||
* 파일 경로/이름 , 파일 생성 , 도엽번호 반환
|
||||
*
|
||||
* @param yyyy
|
||||
* @param mapSheetScope
|
||||
* @param detectOption
|
||||
* @param sceneInference
|
||||
* @return Scene
|
||||
*/
|
||||
private Scene writeSceneGeoJson(
|
||||
String yyyy, String mapSheetScope, String detectOption, List<ImageFeature> sceneInference) {
|
||||
boolean isAll = MapSheetScope.ALL.getId().equals(mapSheetScope);
|
||||
String optionSuffix = buildOptionSuffix(detectOption);
|
||||
|
||||
String targetDir =
|
||||
"local".equals(activeEnv) ? System.getProperty("user.home") + "/geojson" : inferenceDir;
|
||||
|
||||
@@ -264,24 +291,11 @@ public class MapSheetMngCoreService {
|
||||
|
||||
Path outputPath = Paths.get(targetDir, filename);
|
||||
|
||||
// 2) ALL일 때만 재사용
|
||||
// if (isAll && Files.exists(outputPath)) {
|
||||
// return outputPath.toString();
|
||||
// }
|
||||
|
||||
// 3) 데이터 조회
|
||||
List<ImageFeature> sceneInference = mapSheetMngRepository.getSceneInference(yyyy, scenes);
|
||||
|
||||
if (sceneInference == null || sceneInference.isEmpty()) {
|
||||
log.warn(
|
||||
"NOT_FOUND_TARGET_YEAR: yyyy={}, isAll={}, scenesSize={}",
|
||||
yyyy,
|
||||
isAll,
|
||||
scenes == null ? 0 : scenes.size());
|
||||
log.warn("NOT_FOUND_TARGET_YEAR: yyyy={}, isAll={}, featuresSize={}", yyyy, isAll, 0);
|
||||
throw new CustomApiException("NOT_FOUND_TARGET_YEAR", HttpStatus.NOT_FOUND);
|
||||
}
|
||||
|
||||
// 4) 파일 생성
|
||||
try {
|
||||
log.info("create Directories outputPath: {}", outputPath);
|
||||
log.info(
|
||||
@@ -291,15 +305,15 @@ public class MapSheetMngCoreService {
|
||||
targetDir,
|
||||
filename);
|
||||
log.info("outputPath={}, parent={}", outputPath.toAbsolutePath(), outputPath.getParent());
|
||||
|
||||
Files.createDirectories(outputPath.getParent());
|
||||
|
||||
new GeoJsonFileWriter()
|
||||
.exportToFile(sceneInference, "scene_inference_" + yyyy, 5186, outputPath.toString());
|
||||
log.info("GeoJsonFileWriter: {}", "scene_inference_" + yyyy);
|
||||
|
||||
Scene scene = new Scene();
|
||||
scene.setFeatures(sceneInference);
|
||||
scene.setFilePath(outputPath.toString());
|
||||
|
||||
return scene;
|
||||
|
||||
} catch (IOException e) {
|
||||
@@ -309,6 +323,80 @@ public class MapSheetMngCoreService {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* geojson 파일명 Suffix
|
||||
*
|
||||
* @param detectOption
|
||||
* @return
|
||||
*/
|
||||
private String buildOptionSuffix(String detectOption) {
|
||||
if (DetectOption.EXCL.getId().equals(detectOption)) return "_EXCL";
|
||||
if (DetectOption.PREV.getId().equals(detectOption)) return "_PREV";
|
||||
return "";
|
||||
}
|
||||
|
||||
/**
|
||||
* 년도, 도엽번호로 조회
|
||||
*
|
||||
* @param yyyy
|
||||
* @param scenes
|
||||
* @return ImageFeature
|
||||
*/
|
||||
private List<ImageFeature> loadSceneInferenceBySheets(String yyyy, List<String> scenes) {
|
||||
List<ImageFeature> sceneInference = mapSheetMngRepository.getSceneInference(yyyy, scenes);
|
||||
|
||||
if (sceneInference == null || sceneInference.isEmpty()) {
|
||||
log.warn(
|
||||
"NOT_FOUND_TARGET_YEAR: yyyy={}, scenesSize={}",
|
||||
yyyy,
|
||||
scenes == null ? 0 : scenes.size());
|
||||
throw new CustomApiException("NOT_FOUND_TARGET_YEAR", HttpStatus.NOT_FOUND);
|
||||
}
|
||||
return sceneInference;
|
||||
}
|
||||
|
||||
/**
|
||||
* 년도별로 나눠 조회
|
||||
*
|
||||
* @param yearDtos
|
||||
* @return ImageFeature
|
||||
*/
|
||||
private List<ImageFeature> loadSceneInferenceByFallbackYears(
|
||||
List<MapSheetFallbackYearDto> yearDtos) {
|
||||
if (yearDtos == null || yearDtos.isEmpty()) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
// 년도 별로 루프를 돌리기위해 년도별 정리
|
||||
Map<Integer, List<MapSheetFallbackYearDto>> groupedByYear =
|
||||
yearDtos.stream()
|
||||
.filter(d -> d.getMngYyyy() != null && d.getMapSheetNum() != null)
|
||||
.collect(Collectors.groupingBy(MapSheetFallbackYearDto::getMngYyyy));
|
||||
|
||||
List<ImageFeature> sceneInference = new ArrayList<>();
|
||||
|
||||
for (Map.Entry<Integer, List<MapSheetFallbackYearDto>> entry : groupedByYear.entrySet()) {
|
||||
Integer year = entry.getKey();
|
||||
|
||||
// 년도별 mapSheetNum 만들기
|
||||
List<String> sheetNums =
|
||||
entry.getValue().stream()
|
||||
.map(MapSheetFallbackYearDto::getMapSheetNum)
|
||||
.filter(Objects::nonNull)
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
// tif파일 정보 조회
|
||||
List<ImageFeature> temp = mapSheetMngRepository.getSceneInference(year.toString(), sheetNums);
|
||||
|
||||
if (temp != null && !temp.isEmpty()) {
|
||||
sceneInference.addAll(temp);
|
||||
}
|
||||
}
|
||||
|
||||
return sceneInference;
|
||||
}
|
||||
|
||||
/**
|
||||
* 변화탐지 실행 가능 기준 년도 조회
|
||||
*
|
||||
@@ -342,4 +430,31 @@ public class MapSheetMngCoreService {
|
||||
public List<MngListCompareDto> getByHstMapSheetCompareList(int mngYyyy, List<String> mapId) {
|
||||
return mapSheetMngYearRepository.findByHstMapSheetCompareList(mngYyyy, mapId);
|
||||
}
|
||||
|
||||
public List<String> getMapSheetNumByHst(Integer year) {
|
||||
List<MapSheetMngHstEntity> entity = mapSheetMngRepository.getMapSheetMngHst(year);
|
||||
return entity.stream().map(MapSheetMngHstEntity::getMapSheetNum).toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 특정 연도의 도엽 이력 데이터를 조회
|
||||
*
|
||||
* @param year
|
||||
* @return
|
||||
*/
|
||||
public List<MapSheetFallbackYearDto> getMapSheetNumDtoByHst(Integer year) {
|
||||
List<MapSheetMngHstEntity> entity = mapSheetMngRepository.getMapSheetMngHst(year);
|
||||
return entity.stream()
|
||||
.map(
|
||||
e ->
|
||||
new MapSheetFallbackYearDto(
|
||||
e.getMapSheetNum(), e.getMngYyyy() // 조회 기준 연도
|
||||
))
|
||||
.toList();
|
||||
}
|
||||
|
||||
public List<MapSheetFallbackYearDto> findFallbackCompareYearByMapSheets(
|
||||
Integer year, List<String> mapIds) {
|
||||
return mapSheetMngRepository.findFallbackCompareYearByMapSheets(year, mapIds);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.kamco.cd.kamcoback.postgres.entity;
|
||||
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Entity;
|
||||
import jakarta.persistence.Id;
|
||||
@@ -84,4 +85,28 @@ public class InferenceResultsTestingEntity {
|
||||
|
||||
@Column(name = "geometry", columnDefinition = "geometry")
|
||||
private Geometry geometry;
|
||||
|
||||
public InferenceResultsTestingDto.Basic toDto() {
|
||||
return new InferenceResultsTestingDto.Basic(
|
||||
this.probability,
|
||||
this.beforeYear,
|
||||
this.afterYear,
|
||||
this.mapId,
|
||||
this.modelVersion,
|
||||
this.clsModelPath,
|
||||
this.clsModelVersion,
|
||||
this.cdModelType,
|
||||
this.id,
|
||||
this.modelName,
|
||||
this.batchId,
|
||||
this.area,
|
||||
this.beforeC,
|
||||
this.beforeP,
|
||||
this.afterC,
|
||||
this.afterP,
|
||||
this.seq,
|
||||
this.createdDate,
|
||||
this.uid,
|
||||
this.geometry);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,29 +18,99 @@ import org.springframework.data.domain.Page;
|
||||
|
||||
public interface MapSheetLearnRepositoryCustom {
|
||||
|
||||
/**
|
||||
* 추론 관리 목록 조회
|
||||
*
|
||||
* @param req 추론관리 목록 화면 조회 조건
|
||||
* @return 추론 관리 목록
|
||||
*/
|
||||
Page<MapSheetLearnEntity> getInferenceMgnResultList(InferenceResultDto.SearchListReq req);
|
||||
|
||||
/**
|
||||
* uuid 조건으로 추론 실행 정보 조회
|
||||
*
|
||||
* @param uuid uuid
|
||||
* @return 추론 실행 정보
|
||||
*/
|
||||
Optional<MapSheetLearnEntity> getInferenceResultByUuid(UUID uuid);
|
||||
|
||||
/**
|
||||
* 추론 실행중 서버정보 조회 cpu, gpu
|
||||
*
|
||||
* @return cpu, gpu 정보
|
||||
*/
|
||||
List<InferenceServerStatusDto> getInferenceServerStatusList();
|
||||
|
||||
/**
|
||||
* 추론 실행 목록 진행 상태별 조회
|
||||
*
|
||||
* @param status 추론 진행 상태
|
||||
* @return 추론 실행 정보
|
||||
*/
|
||||
Optional<MapSheetLearnEntity> getInferenceResultByStatus(String status);
|
||||
|
||||
/**
|
||||
* 등록된 추론 실행목록 및 등록된 모델 정보 조회
|
||||
*
|
||||
* @param id 추론 실행 테이블 id
|
||||
* @param modelUuid 모델 uuid
|
||||
* @return 모델 정보
|
||||
*/
|
||||
InferenceProgressDto getInferenceAiResultById(Long id, UUID modelUuid);
|
||||
|
||||
/**
|
||||
* 진행중인 추론 정보 상세 조회
|
||||
*
|
||||
* @param uuid 추론진행 uuid
|
||||
* @return 진행중인 추론정보 상세 정보
|
||||
*/
|
||||
InferenceStatusDetailDto getInferenceStatus(UUID uuid);
|
||||
|
||||
/**
|
||||
* 진행중인 추론이 있는지 조회
|
||||
*
|
||||
* @return 진행중인 추론 정보
|
||||
*/
|
||||
MapSheetLearnEntity getProcessing();
|
||||
|
||||
Integer getLearnStage(Integer compareYear, Integer targetYear);
|
||||
|
||||
/**
|
||||
* 추론 결과 정보 조회
|
||||
*
|
||||
* @param uuid 추론 uuid
|
||||
* @return 추론 결과 및 사용 모델 정보
|
||||
*/
|
||||
AnalResultInfo getInferenceResultInfo(UUID uuid);
|
||||
|
||||
/**
|
||||
* 추론 결과 bbox, point 조회
|
||||
*
|
||||
* @param uuid 추론 uuid
|
||||
* @return bbox, pont 정보
|
||||
*/
|
||||
BboxPointDto getBboxPoint(UUID uuid);
|
||||
|
||||
/**
|
||||
* 분류별 탐지건수 조회
|
||||
*
|
||||
* @param uuid 추론 uuid
|
||||
* @return 분류별 탐지건수 정보
|
||||
*/
|
||||
List<Dashboard> getInferenceClassCountList(UUID uuid);
|
||||
|
||||
/**
|
||||
* 추론 결과 상세 geom 목록 조회
|
||||
*
|
||||
* @param uuid 추론 uuid
|
||||
* @param searchGeoReq 추론 결과 상세화면 geom 조회 조건
|
||||
* @return geom 목록 정보
|
||||
*/
|
||||
Page<Geom> getInferenceGeomList(UUID uuid, SearchGeoReq searchGeoReq);
|
||||
|
||||
/**
|
||||
* 국유in연동 가능여부 확인 조회
|
||||
*
|
||||
* @param uuid 추론 uuid
|
||||
* @return 추론 존재여부, 부분도엽 여부, 추론 진행중 여부, 국유인 작업 진행중 여부
|
||||
*/
|
||||
GukYuinLinkFacts findLinkFacts(UUID uuid);
|
||||
}
|
||||
|
||||
@@ -291,24 +291,6 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
.fetchOne();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getLearnStage(Integer compareYear, Integer targetYear) {
|
||||
Integer stage =
|
||||
queryFactory
|
||||
.select(mapSheetLearnEntity.stage)
|
||||
.from(mapSheetLearnEntity)
|
||||
.where(
|
||||
mapSheetLearnEntity
|
||||
.compareYyyy
|
||||
.eq(compareYear)
|
||||
.and(mapSheetLearnEntity.targetYyyy.eq(targetYear)))
|
||||
.orderBy(mapSheetLearnEntity.id.desc())
|
||||
.limit(1)
|
||||
.fetchOne();
|
||||
|
||||
return stage == null ? 1 : stage + 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AnalResultInfo getInferenceResultInfo(UUID uuid) {
|
||||
QModelMngEntity m1 = new QModelMngEntity("m1");
|
||||
@@ -528,6 +510,7 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
@Override
|
||||
public GukYuinLinkFacts findLinkFacts(UUID uuid) {
|
||||
|
||||
// 해당 추론 있는지 확인
|
||||
MapSheetLearnEntity learn =
|
||||
queryFactory
|
||||
.selectFrom(QMapSheetLearnEntity.mapSheetLearnEntity)
|
||||
@@ -538,12 +521,14 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
return new GukYuinLinkFacts(false, false, false, false);
|
||||
}
|
||||
|
||||
// 부분 도엽 실행인지 확인
|
||||
boolean isPartScope = MapSheetScope.PART.getId().equals(learn.getMapSheetScope());
|
||||
|
||||
QMapSheetAnalInferenceEntity inf = QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
|
||||
QMapSheetLearnEntity learn2 = new QMapSheetLearnEntity("learn2");
|
||||
QMapSheetLearnEntity learnQ = QMapSheetLearnEntity.mapSheetLearnEntity;
|
||||
|
||||
// 실행중인 추론 있는지 확인
|
||||
boolean hasRunningInference =
|
||||
queryFactory
|
||||
.selectOne()
|
||||
@@ -557,6 +542,7 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
|
||||
.fetchFirst()
|
||||
!= null;
|
||||
|
||||
// 국유인 작업 진행중 있는지 확인
|
||||
boolean hasOtherUnfinishedGukYuin =
|
||||
queryFactory
|
||||
.selectOne()
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.kamco.cd.kamcoback.postgres.repository.mapsheet;
|
||||
|
||||
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetFallbackYearDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.AddReq;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListDto;
|
||||
@@ -63,6 +64,12 @@ public interface MapSheetMngRepositoryCustom {
|
||||
|
||||
List<MapSheetMngDto.MngFilesDto> findByHstUidMapSheetFileList(Long hstUid);
|
||||
|
||||
/**
|
||||
* 변화탐지 실행 가능 기준 연도 조회
|
||||
*
|
||||
* @param req 조회 연도, 도엽번호 목록,
|
||||
* @return
|
||||
*/
|
||||
List<MngListDto> findByHstMapSheetTargetList(InferenceResultDto.RegReq req);
|
||||
|
||||
List<MngListDto> findByHstMapSheetTargetList(int mngYyyy, List<String> mapIds);
|
||||
@@ -80,4 +87,9 @@ public interface MapSheetMngRepositoryCustom {
|
||||
void updateMapSheetMngHstUploadId(Long hstUid, UUID uuid, String uploadId);
|
||||
|
||||
void insertMapSheetMngTile(@Valid AddReq addReq);
|
||||
|
||||
List<MapSheetMngHstEntity> getMapSheetMngHst(Integer year);
|
||||
|
||||
List<MapSheetFallbackYearDto> findFallbackCompareYearByMapSheets(
|
||||
Integer year, List<String> mapIds);
|
||||
}
|
||||
|
||||
@@ -5,12 +5,14 @@ import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngEntity.mapSheet
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngFileEntity.mapSheetMngFileEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngHstEntity.mapSheetMngHstEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngTileEntity.mapSheetMngTileEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngYearYnEntity.mapSheetMngYearYnEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QYearEntity.yearEntity;
|
||||
import static com.querydsl.core.types.dsl.Expressions.nullExpression;
|
||||
|
||||
import com.kamco.cd.kamcoback.common.enums.CommonUseStatus;
|
||||
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetFallbackYearDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
|
||||
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.AddReq;
|
||||
@@ -1099,4 +1101,44 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
|
||||
"{0} like '%" + searchReq.getSearchValue() + "%'",
|
||||
mapSheetMngHstEntity.mapSheetNum));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<MapSheetMngHstEntity> getMapSheetMngHst(Integer year) {
|
||||
return queryFactory
|
||||
.select(mapSheetMngHstEntity)
|
||||
.from(mapSheetMngHstEntity)
|
||||
.innerJoin(mapSheetMngFileEntity)
|
||||
.on(mapSheetMngFileEntity.hstUid.eq(mapSheetMngHstEntity.hstUid))
|
||||
.where(
|
||||
mapSheetMngHstEntity
|
||||
.mngYyyy
|
||||
.eq(year)
|
||||
.and(
|
||||
mapSheetMngHstEntity
|
||||
.syncState
|
||||
.eq("DONE")
|
||||
.or(mapSheetMngHstEntity.syncCheckState.eq("DONE")))
|
||||
.and(mapSheetMngFileEntity.fileExt.eq("tif")))
|
||||
.fetch();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<MapSheetFallbackYearDto> findFallbackCompareYearByMapSheets(
|
||||
Integer year, List<String> mapIds) {
|
||||
BooleanBuilder builder = new BooleanBuilder();
|
||||
builder.and(mapSheetMngYearYnEntity.id.mapSheetNum.in(mapIds));
|
||||
builder.and(mapSheetMngYearYnEntity.id.mngYyyy.lt(year));
|
||||
builder.and(mapSheetMngYearYnEntity.yn.eq("Y"));
|
||||
|
||||
return queryFactory
|
||||
.select(
|
||||
Projections.constructor(
|
||||
MapSheetFallbackYearDto.class,
|
||||
mapSheetMngYearYnEntity.id.mapSheetNum,
|
||||
mapSheetMngYearYnEntity.id.mngYyyy.max()))
|
||||
.from(mapSheetMngYearYnEntity)
|
||||
.where(builder)
|
||||
.groupBy(mapSheetMngYearYnEntity.id.mapSheetNum)
|
||||
.fetch();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -307,6 +307,10 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
|
||||
queryFactory
|
||||
.select(labelingAssignmentEntity.count())
|
||||
.from(labelingAssignmentEntity)
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(
|
||||
labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id),
|
||||
mapSheetAnalInferenceEntity.analState.ne(LabelMngState.FINISH.getId()))
|
||||
.where(labelingAssignmentEntity.workerUid.eq(userId))
|
||||
.fetchOne();
|
||||
|
||||
@@ -327,6 +331,10 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
|
||||
queryFactory
|
||||
.select(labelingAssignmentEntity.count())
|
||||
.from(labelingAssignmentEntity)
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(
|
||||
labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id),
|
||||
mapSheetAnalInferenceEntity.analState.ne(LabelMngState.FINISH.getId()))
|
||||
.where(
|
||||
labelingAssignmentEntity.workerUid.eq(userId),
|
||||
labelingAssignmentEntity.workState.eq("ASSIGNED"))
|
||||
@@ -355,6 +363,10 @@ public class TrainingDataLabelRepositoryImpl extends QuerydslRepositorySupport
|
||||
queryFactory
|
||||
.select(labelingAssignmentEntity.count())
|
||||
.from(labelingAssignmentEntity)
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(
|
||||
labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id),
|
||||
mapSheetAnalInferenceEntity.analState.ne(LabelMngState.FINISH.getId()))
|
||||
.where(
|
||||
labelingAssignmentEntity.workerUid.eq(userId),
|
||||
labelingAssignmentEntity.workState.in(
|
||||
|
||||
@@ -314,6 +314,10 @@ public class TrainingDataReviewRepositoryImpl extends QuerydslRepositorySupport
|
||||
queryFactory
|
||||
.select(labelingAssignmentEntity.count())
|
||||
.from(labelingAssignmentEntity)
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(
|
||||
labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id),
|
||||
mapSheetAnalInferenceEntity.analState.ne(LabelMngState.FINISH.getId()))
|
||||
.where(labelingAssignmentEntity.inspectorUid.eq(userId))
|
||||
.fetchOne();
|
||||
|
||||
@@ -334,6 +338,10 @@ public class TrainingDataReviewRepositoryImpl extends QuerydslRepositorySupport
|
||||
queryFactory
|
||||
.select(labelingAssignmentEntity.count())
|
||||
.from(labelingAssignmentEntity)
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(
|
||||
labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id),
|
||||
mapSheetAnalInferenceEntity.analState.ne(LabelMngState.FINISH.getId()))
|
||||
.where(
|
||||
labelingAssignmentEntity.inspectorUid.eq(userId),
|
||||
labelingAssignmentEntity.inspectState.eq("UNCONFIRM"))
|
||||
@@ -362,6 +370,10 @@ public class TrainingDataReviewRepositoryImpl extends QuerydslRepositorySupport
|
||||
queryFactory
|
||||
.select(labelingAssignmentEntity.count())
|
||||
.from(labelingAssignmentEntity)
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(
|
||||
labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id),
|
||||
mapSheetAnalInferenceEntity.analState.ne(LabelMngState.FINISH.getId()))
|
||||
.where(
|
||||
labelingAssignmentEntity.inspectorUid.eq(userId),
|
||||
labelingAssignmentEntity.inspectState.in("COMPLETE", "EXCEPT"),
|
||||
|
||||
@@ -354,9 +354,8 @@ public class MapSheetInferenceJobService {
|
||||
Paths.get(progressDto.getCdModelClsPath(), progressDto.getCdModelClsFileName()).toString());
|
||||
m.setCls_model_version(progressDto.getClsModelVersion());
|
||||
m.setCd_model_type(type);
|
||||
m.setPriority(progressDto.getPriority());
|
||||
|
||||
// log.info("InferenceSendDto={}", m);
|
||||
m.setPriority(5d);
|
||||
log.info("[BEFORE INFERENCE] BEFORE SendDto={}", m);
|
||||
// 추론 실행 api 호출
|
||||
Long batchId = ensureAccepted(m);
|
||||
|
||||
@@ -376,6 +375,7 @@ public class MapSheetInferenceJobService {
|
||||
* @param dto
|
||||
* @return
|
||||
*/
|
||||
// 같은함수가 왜 두개지
|
||||
private Long ensureAccepted(InferenceSendDto dto) {
|
||||
if (dto == null) {
|
||||
log.warn("not InferenceSendDto dto");
|
||||
@@ -383,22 +383,28 @@ public class MapSheetInferenceJobService {
|
||||
}
|
||||
|
||||
// 1) 요청 로그
|
||||
log.info("Inference request dto={}", dto);
|
||||
|
||||
log.info("");
|
||||
log.info("========================================================");
|
||||
log.info("[SEND INFERENCE] Inference request dto= {}", dto);
|
||||
log.info("========================================================");
|
||||
log.info("");
|
||||
// 2) local 환경 임시 처리
|
||||
if ("local".equals(profile)) {
|
||||
if (dto.getPred_requests_areas() == null) {
|
||||
throw new IllegalStateException("pred_requests_areas is null");
|
||||
}
|
||||
dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson");
|
||||
dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson");
|
||||
}
|
||||
// if ("local".equals(profile)) {
|
||||
// if (dto.getPred_requests_areas() == null) {
|
||||
// throw new IllegalStateException("pred_requests_areas is null");
|
||||
// }
|
||||
//
|
||||
// dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson");
|
||||
//
|
||||
// dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson");
|
||||
// }
|
||||
|
||||
// 3) HTTP 호출
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
headers.setContentType(MediaType.APPLICATION_JSON);
|
||||
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
|
||||
|
||||
// TODO 어떤 URL로 어떤파리티러로 요청한 로딩해야지
|
||||
ExternalCallResult<String> result =
|
||||
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
|
||||
|
||||
@@ -413,6 +419,7 @@ public class MapSheetInferenceJobService {
|
||||
objectMapper.readValue(result.body(), new TypeReference<>() {});
|
||||
|
||||
if (list.isEmpty()) {
|
||||
// 어떤 URL로 어떤파리티러로 요청한 정보를 봐야 재현을 할듯하지요
|
||||
throw new IllegalStateException("Inference response is empty");
|
||||
}
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ public class TrainingDataReviewJobService {
|
||||
@Value("${spring.profiles.active}")
|
||||
private String profile;
|
||||
|
||||
@Value("${training-data.geojson-dir}")
|
||||
@Value("${file.training-data.geojson-dir}")
|
||||
private String trainingDataDir;
|
||||
|
||||
private boolean isLocalProfile() {
|
||||
|
||||
@@ -89,36 +89,17 @@ mapsheet:
|
||||
shp:
|
||||
baseurl: /app/tmp/detect/result #현재사용안함
|
||||
|
||||
|
||||
|
||||
file:
|
||||
#sync-root-dir: D:/kamco-nfs/images/
|
||||
sync-root-dir: /kamco-nfs/images/
|
||||
sync-tmp-dir: /kamco-nfs/requests/temp # image upload temp dir
|
||||
#sync-tmp-dir: ${file.sync-root-dir}/tmp
|
||||
sync-file-extention: tfw,tif
|
||||
sync-auto-exception-start-year: 2024
|
||||
sync-auto-exception-before-year-cnt: 3
|
||||
|
||||
#dataset-dir: D:/kamco-nfs/model_output/
|
||||
dataset-dir: /kamco-nfs/model_output/export/ # 마운트경로 AI 추론결과
|
||||
dataset-tmp-dir: ${file.dataset-dir}tmp/
|
||||
|
||||
#model-dir: D:/kamco-nfs/ckpt/model/
|
||||
model-dir: /kamco-nfs/ckpt/model/ # 학습서버에서 트레이닝한 모델업로드경로
|
||||
model-tmp-dir: ${file.model-dir}tmp/
|
||||
model-file-extention: pth,json,py
|
||||
|
||||
pt-path: /kamco-nfs/ckpt/model/v6-cls-checkpoints/
|
||||
root: kamco-nfs
|
||||
nfs: /kamco-nfs
|
||||
pt-path: ${file.nfs}/ckpt/model/v6-cls-checkpoints/
|
||||
pt-FileName: yolov8_6th-6m.pt
|
||||
|
||||
dataset-response: /kamco-nfs/dataset/response/
|
||||
|
||||
inference:
|
||||
nfs: /kamco-nfs
|
||||
url: http://192.168.2.183:8000/jobs
|
||||
batch-url: http://192.168.2.183:8000/batches
|
||||
geojson-dir: /kamco-nfs/requests/ # 추론실행을 위한 파일생성경로
|
||||
jar-path: /kamco-nfs/repo/jar/shp-exporter.jar
|
||||
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
|
||||
inference-server-name: server1,server2,server3,server4
|
||||
|
||||
gukyuin:
|
||||
@@ -126,9 +107,6 @@ gukyuin:
|
||||
url: http://192.168.2.129:5301
|
||||
cdi: ${gukyuin.url}/api/kcd/cdi
|
||||
|
||||
training-data:
|
||||
geojson-dir: /kamco-nfs/dataset/request/
|
||||
|
||||
layer:
|
||||
geoserver-url: https://kamco.geo-dev.gs.dabeeo.com
|
||||
wms-path: geoserver/cd
|
||||
|
||||
@@ -67,33 +67,17 @@ mapsheet:
|
||||
shp:
|
||||
baseurl: /Users/bokmin/detect/result
|
||||
|
||||
|
||||
|
||||
file:
|
||||
sync-root-dir: C:/Users/gypark/kamco-nfs/images/
|
||||
#sync-root-dir: /kamco-nfs/images/
|
||||
sync-tmp-dir: ${file.sync-root-dir}/tmp/
|
||||
sync-file-extention: tfw,tif
|
||||
sync-auto-exception-start-year: 2025
|
||||
sync-auto-exception-before-year-cnt: 3
|
||||
|
||||
dataset-dir: C:/Users/gypark/kamco-nfs/dataset/
|
||||
#dataset-dir: /kamco-nfs/dataset/export/
|
||||
dataset-tmp-dir: ${file.dataset-dir}tmp/
|
||||
|
||||
model-dir: C:/Users/gypark/kamco-nfs/ckpt/model/
|
||||
#model-dir: /kamco-nfs/ckpt/model/
|
||||
model-tmp-dir: ${file.model-dir}tmp/
|
||||
model-file-extention: pth,json,py
|
||||
|
||||
pt-path: /kamco-nfs/ckpt/classification/
|
||||
root: kamco-nfs
|
||||
nfs: C:/Users/gypark/kamco-nfs
|
||||
pt-path: ${file.nfs}/ckpt/classification/
|
||||
pt-FileName: v5-best.pt
|
||||
|
||||
inference:
|
||||
nfs: C:/Users/gyparkkamco-nfs
|
||||
url: http://10.100.0.11:8000/jobs
|
||||
batch-url: http://10.100.0.11:8000/batches
|
||||
geojson-dir: /kamco-nfs/requests/
|
||||
jar-path: jar/makeshp-1.0.0.jar
|
||||
jar-path: jar/shp-exporter.jar
|
||||
inference-server-name: server1,server2,server3,server4
|
||||
|
||||
gukyuin:
|
||||
@@ -101,10 +85,8 @@ gukyuin:
|
||||
url: http://192.168.2.129:5301
|
||||
cdi: ${gukyuin.url}/api/kcd/cdi
|
||||
|
||||
training-data:
|
||||
geojson-dir: /kamco-nfs/model_output/labeling/
|
||||
|
||||
layer:
|
||||
geoserver-url: https://kamco.geo-dev.gs.dabeeo.com
|
||||
path: /geoserver/cd/gwc/service/
|
||||
wms-path: geoserver/cd
|
||||
wmts-path: geoserver/cd/gwc/service
|
||||
workspace: cd
|
||||
|
||||
@@ -16,13 +16,9 @@ spring:
|
||||
format_sql: true # ⚠️ 선택 - SQL 포맷팅 (가독성)
|
||||
jdbc:
|
||||
batch_size: 1000 # ✅ 추가 (JDBC batch)
|
||||
open-in-view: false
|
||||
mvc:
|
||||
async:
|
||||
request-timeout: 300s # 5분 (예: 30s, 120s, 10m 등도 가능)
|
||||
|
||||
datasource:
|
||||
url: jdbc:postgresql://127.0.0.1:15432/kamco_cds
|
||||
url: jdbc:postgresql://kamco-cd-postgis:5432/kamco_cds
|
||||
#url: jdbc:postgresql://localhost:15432/kamco_cds
|
||||
username: kamco_cds
|
||||
password: kamco_cds_Q!W@E#R$
|
||||
@@ -64,11 +60,15 @@ token:
|
||||
refresh-cookie-name: kamco # 개발용 쿠키 이름
|
||||
refresh-cookie-secure: true # 로컬 http 테스트면 false
|
||||
|
||||
springdoc:
|
||||
swagger-ui:
|
||||
persist-authorization: true # 스웨거 새로고침해도 토큰 유지, 로컬스토리지에 저장
|
||||
|
||||
logging:
|
||||
level:
|
||||
root: INFO
|
||||
org.springframework.web: DEBUG
|
||||
org.springframework.security: DEBUG
|
||||
org.springframework.web: INFO
|
||||
org.springframework.security: INFO
|
||||
|
||||
# 헬스체크 노이즈 핵심만 다운
|
||||
org.springframework.security.web.FilterChainProxy: INFO
|
||||
@@ -84,40 +84,23 @@ mapsheet:
|
||||
baseurl: /app/detect/result #현재사용안함
|
||||
|
||||
file:
|
||||
#sync-root-dir: D:/kamco-nfs/images/
|
||||
sync-root-dir: /kamco-nfs/images/
|
||||
sync-tmp-dir: ${file.sync-root-dir}/tmp # image upload temp dir
|
||||
sync-file-extention: tfw,tif
|
||||
sync-auto-exception-start-year: 2025
|
||||
sync-auto-exception-before-year-cnt: 3
|
||||
|
||||
#dataset-dir: D:/kamco-nfs/model_output/ #변경 model_output
|
||||
dataset-dir: /kamco-nfs/model_output/export/ # 마운트경로 AI 추론결과
|
||||
dataset-tmp-dir: ${file.dataset-dir}tmp/
|
||||
|
||||
#model-dir: D:/kamco-nfs/ckpt/model/
|
||||
model-dir: /kamco-nfs/ckpt/model/ # 학습서버에서 트레이닝한 모델업로드경로
|
||||
model-tmp-dir: ${file.model-dir}tmp/
|
||||
model-file-extention: pth,json,py
|
||||
|
||||
pt-path: /kamco-nfs/ckpt/model/v6-cls-checkpoints/
|
||||
root: data
|
||||
nfs: /data
|
||||
pt-path: ${file.nfs}/ckpt/model/v6-cls-checkpoints/
|
||||
pt-FileName: yolov8_6th-6m.pt
|
||||
|
||||
dataset-response: /kamco-nfs/dataset/response/
|
||||
|
||||
inference:
|
||||
url: http://127.0.0.1:8000/jobs
|
||||
batch-url: http://127.0.0.1:8000/batches
|
||||
geojson-dir: /kamco-nfs/requests/ # 학습서버에서 트레이닝한 모델업로드경로
|
||||
jar-path: /kamco-nfs/repo/jar/shp-exporter.jar # 추론실행을 위한 파일생성경로
|
||||
nfs: /data
|
||||
url: http://172.16.4.56:8000/jobs
|
||||
batch-url: http://172.16.4.56:8000/batches
|
||||
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
|
||||
inference-server-name: server1,server2,server3,server4
|
||||
|
||||
gukyuin:
|
||||
url: http://127.0.0.1:5301
|
||||
cdi: ${gukyuin.url}/api/kcd/cdi
|
||||
|
||||
training-data:
|
||||
geojson-dir: /kamco-nfs/dataset/request/
|
||||
|
||||
|
||||
layer:
|
||||
geoserver-url: https://kamco.geo-dev.gs.dabeeo.com
|
||||
|
||||
120
src/main/resources/application-prod.yml_bak
Normal file
120
src/main/resources/application-prod.yml_bak
Normal file
@@ -0,0 +1,120 @@
|
||||
spring:
|
||||
config:
|
||||
activate:
|
||||
on-profile: prod
|
||||
|
||||
jpa:
|
||||
show-sql: true
|
||||
hibernate:
|
||||
ddl-auto: validate
|
||||
properties:
|
||||
hibernate:
|
||||
default_batch_fetch_size: 100 # ✅ 성능 - N+1 쿼리 방지
|
||||
order_updates: true # ✅ 성능 - 업데이트 순서 정렬로 데드락 방지
|
||||
order_inserts: true
|
||||
use_sql_comments: true # ⚠️ 선택 - SQL에 주석 추가 (디버깅용)
|
||||
format_sql: true # ⚠️ 선택 - SQL 포맷팅 (가독성)
|
||||
jdbc:
|
||||
batch_size: 1000 # ✅ 추가 (JDBC batch)
|
||||
|
||||
datasource:
|
||||
url: jdbc:postgresql://kamco-cd-postgis:5432/kamco_cds
|
||||
#url: jdbc:postgresql://localhost:15432/kamco_cds
|
||||
username: kamco_cds
|
||||
password: kamco_cds_Q!W@E#R$
|
||||
hikari:
|
||||
minimum-idle: 10
|
||||
maximum-pool-size: 20
|
||||
connection-timeout: 60000 # 60초 연결 타임아웃
|
||||
idle-timeout: 300000 # 5분 유휴 타임아웃
|
||||
max-lifetime: 1800000 # 30분 최대 수명
|
||||
leak-detection-threshold: 60000 # 연결 누수 감지
|
||||
|
||||
transaction:
|
||||
default-timeout: 300 # 5분 트랜잭션 타임아웃
|
||||
|
||||
data:
|
||||
redis:
|
||||
host: 127.0.0.1
|
||||
port: 16379
|
||||
password: kamco
|
||||
|
||||
servlet:
|
||||
multipart:
|
||||
enabled: true
|
||||
max-file-size: 4GB
|
||||
max-request-size: 4GB
|
||||
file-size-threshold: 10MB
|
||||
|
||||
server:
|
||||
tomcat:
|
||||
max-swallow-size: 4GB
|
||||
max-http-form-post-size: 4GB
|
||||
|
||||
jwt:
|
||||
secret: "kamco_token_9b71e778-19a3-4c1d-97bf-2d687de17d5b"
|
||||
access-token-validity-in-ms: 86400000 # 1일
|
||||
refresh-token-validity-in-ms: 604800000 # 7일
|
||||
|
||||
token:
|
||||
refresh-cookie-name: kamco # 개발용 쿠키 이름
|
||||
refresh-cookie-secure: true # 로컬 http 테스트면 false
|
||||
|
||||
logging:
|
||||
level:
|
||||
root: INFO
|
||||
org.springframework.web: INFO
|
||||
org.springframework.security: INFO
|
||||
|
||||
# 헬스체크 노이즈 핵심만 다운
|
||||
org.springframework.security.web.FilterChainProxy: INFO
|
||||
org.springframework.security.web.authentication.AnonymousAuthenticationFilter: INFO
|
||||
org.springframework.security.web.authentication.Http403ForbiddenEntryPoint: INFO
|
||||
org.springframework.web.servlet.DispatcherServlet: INFO
|
||||
|
||||
|
||||
mapsheet:
|
||||
upload:
|
||||
skipGdalValidation: true
|
||||
shp:
|
||||
baseurl: /app/detect/result #현재사용안함
|
||||
|
||||
file:
|
||||
#sync-root-dir: D:/kamco-nfs/images/
|
||||
sync-root-dir: /kamco-nfs/images/
|
||||
sync-tmp-dir: ${file.sync-root-dir}/tmp # image upload temp dir
|
||||
sync-file-extention: tfw,tif
|
||||
sync-auto-exception-start-year: 2025
|
||||
sync-auto-exception-before-year-cnt: 3
|
||||
|
||||
#dataset-dir: D:/kamco-nfs/model_output/ #변경 model_output
|
||||
dataset-dir: /kamco-nfs/model_output/export/ # 마운트경로 AI 추론결과
|
||||
dataset-tmp-dir: ${file.dataset-dir}tmp/
|
||||
|
||||
#model-dir: D:/kamco-nfs/ckpt/model/
|
||||
model-dir: /kamco-nfs/ckpt/model/ # 학습서버에서 트레이닝한 모델업로드경로
|
||||
model-tmp-dir: ${file.model-dir}tmp/
|
||||
model-file-extention: pth,json,py
|
||||
|
||||
pt-path: /kamco-nfs/ckpt/model/v6-cls-checkpoints/
|
||||
pt-FileName: yolov8_6th-6m.pt
|
||||
|
||||
inference:
|
||||
url: http://127.0.0.1:8000/jobs
|
||||
batch-url: http://127.0.0.1:8000/batches
|
||||
geojson-dir: /kamco-nfs/requests/ # 학습서버에서 트레이닝한 모델업로드경로
|
||||
jar-path: /kamco-nfs/repo/jar/shp-exporter.jar # 추론실행을 위한 파일생성경로
|
||||
inference-server-name: server1,server2,server3,server4
|
||||
|
||||
gukyuin:
|
||||
url: http://127.0.0.1:5301
|
||||
cdi: ${gukyuin.url}/api/kcd/cdi
|
||||
|
||||
training-data:
|
||||
geojson-dir: /kamco-nfs/dataset/request/
|
||||
|
||||
layer:
|
||||
geoserver-url: https://kamco.geo-dev.gs.dabeeo.com
|
||||
wms-path: geoserver/cd
|
||||
wmts-path: geoserver/cd/gwc/service
|
||||
workspace: cd
|
||||
@@ -67,3 +67,22 @@ management:
|
||||
include:
|
||||
- "health"
|
||||
|
||||
file:
|
||||
root: kamco-nfs
|
||||
nfs: /kamco-nfs
|
||||
sync-root-dir: ${file.nfs}/images/
|
||||
sync-tmp-dir: ${file.nfs}/requests/temp # image upload temp dir
|
||||
sync-file-extention: tfw,tif
|
||||
dataset-dir: ${file.nfs}/model_output/export/ # 마운트경로 AI 추론결과
|
||||
dataset-tmp-dir: ${file.dataset-dir}tmp/
|
||||
model-dir: ${file.nfs}/ckpt/model/ # 학습서버에서 트레이닝한 모델업로드경로
|
||||
model-tmp-dir: ${file.model-dir}tmp/
|
||||
model-file-extention: pth,json,py
|
||||
pt-path: ${file.nfs}/ckpt/model/v6-cls-checkpoints/
|
||||
dataset-response: ${file.nfs}/dataset/response/
|
||||
training-data:
|
||||
geojson-dir: ${file.nfs}/dataset/request/
|
||||
inference:
|
||||
nfs: /kamco-nfs
|
||||
geojson-dir: ${inference.nfs}/requests/ # 추론실행을 위한 파일생성경로
|
||||
jar-path: ${inference.nfs}/repo/jar/shp-exporter.jar
|
||||
|
||||
Reference in New Issue
Block a user