Merge pull request 'feat/infer_dev_260107' (#311) from feat/infer_dev_260107 into develop

Reviewed-on: https://kamco.gitea.gs.dabeeo.com/dabeeo/kamco-dabeeo-backoffice/pulls/311
This commit is contained in:
2026-01-22 19:37:19 +09:00
35 changed files with 1134 additions and 758 deletions

View File

@@ -0,0 +1,571 @@
#!/bin/bash
# pack_offline_bundle_airgap_macos.sh
# ============================================================================
# Gradle Offline Bundle Packer (macOS)
# ============================================================================
# Version: 4.0
#
# WORKFLOW:
# 1. [ONLINE] Build project (./gradlew bootJar) - downloads all deps
# 2. [ONLINE] Test run (./gradlew bootRun) - verify app works
# 3. [OFFLINE TEST] Verify offline build works
# 4. Create bundle with all cached dependencies
#
# REQUIREMENTS:
# - Internet connection (for initial build)
# - Project with gradlew
# - macOS 10.13+ (High Sierra or later)
# ============================================================================
set -e
# ============================================================================
# Configuration
# ============================================================================
WRAPPER_SEED_PATH="wrapper_jar_seed"
OFFLINE_HOME_NAME="_offline_gradle_home"
BOOTRUN_TIMEOUT_SECONDS=60
# Color codes
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
CYAN='\033[0;36m'
GRAY='\033[0;90m'
WHITE='\033[1;37m'
NC='\033[0m' # No Color
echo ""
echo -e "${CYAN}============================================================${NC}"
echo -e "${CYAN} Gradle Offline Bundle Packer v4.0 (macOS)${NC}"
echo -e "${CYAN}============================================================${NC}"
echo ""
echo -e "${WHITE} This script will:${NC}"
echo -e "${GRAY} 1. Build project with internet (download dependencies)${NC}"
echo -e "${GRAY} 2. Test run application (verify it works)${NC}"
echo -e "${GRAY} 3. Test offline build (verify cache is complete)${NC}"
echo -e "${GRAY} 4. Create offline bundle for air-gapped environment${NC}"
echo ""
echo -e "${CYAN}============================================================${NC}"
echo ""
# ============================================================================
# [1/20] Check Current Directory
# ============================================================================
echo -e "${YELLOW}==[1/20] Check Current Directory ==${NC}"
ROOT="$(pwd)"
echo "ROOT_DIR: $ROOT"
echo ""
# ============================================================================
# [2/20] Check Required Files
# ============================================================================
echo -e "${YELLOW}==[2/20] Check Required Files ==${NC}"
if [ ! -f "./gradlew" ]; then
echo -e "${RED}ERROR: gradlew not found. Run from project root.${NC}"
exit 1
fi
chmod +x ./gradlew
echo -e "${GREEN}[OK] gradlew${NC}"
BUILD_FILE=""
if [ -f "./build.gradle" ]; then
BUILD_FILE="build.gradle"
elif [ -f "./build.gradle.kts" ]; then
BUILD_FILE="build.gradle.kts"
else
echo -e "${RED}ERROR: build.gradle(.kts) not found.${NC}"
exit 1
fi
echo -e "${GREEN}[OK] $BUILD_FILE${NC}"
SETTINGS_FILE=""
if [ -f "./settings.gradle" ]; then
SETTINGS_FILE="settings.gradle"
echo -e "${GREEN}[OK] $SETTINGS_FILE${NC}"
elif [ -f "./settings.gradle.kts" ]; then
SETTINGS_FILE="settings.gradle.kts"
echo -e "${GREEN}[OK] $SETTINGS_FILE${NC}"
fi
echo ""
# ============================================================================
# [3/20] Check Gradle Wrapper
# ============================================================================
echo -e "${YELLOW}==[3/20] Check Gradle Wrapper ==${NC}"
WRAPPER_DIR="$ROOT/gradle/wrapper"
WRAPPER_JAR="$WRAPPER_DIR/gradle-wrapper.jar"
WRAPPER_PROP="$WRAPPER_DIR/gradle-wrapper.properties"
mkdir -p "$WRAPPER_DIR"
if [ ! -f "$WRAPPER_PROP" ]; then
echo -e "${RED}ERROR: gradle-wrapper.properties not found.${NC}"
exit 1
fi
if [ ! -f "$WRAPPER_JAR" ]; then
SEED_JAR="$ROOT/$WRAPPER_SEED_PATH/gradle-wrapper.jar"
if [ -f "$SEED_JAR" ]; then
cp "$SEED_JAR" "$WRAPPER_JAR"
echo -e "${GREEN}[OK] Wrapper jar injected from seed${NC}"
else
echo -e "${RED}ERROR: gradle-wrapper.jar missing${NC}"
exit 1
fi
else
echo -e "${GREEN}[OK] gradle-wrapper.jar exists${NC}"
fi
# Create seed backup
SEED_DIR="$ROOT/$WRAPPER_SEED_PATH"
if [ ! -d "$SEED_DIR" ]; then
mkdir -p "$SEED_DIR"
cp "$WRAPPER_JAR" "$SEED_DIR/gradle-wrapper.jar"
fi
echo ""
# ============================================================================
# [4/20] Set GRADLE_USER_HOME (Project Local)
# ============================================================================
echo -e "${YELLOW}==[4/20] Set GRADLE_USER_HOME ==${NC}"
OFFLINE_HOME="$ROOT/$OFFLINE_HOME_NAME"
mkdir -p "$OFFLINE_HOME"
export GRADLE_USER_HOME="$OFFLINE_HOME"
echo -e "${CYAN}GRADLE_USER_HOME = $GRADLE_USER_HOME${NC}"
echo -e "${GRAY}[INFO] All dependencies will be cached in project folder${NC}"
echo ""
# ============================================================================
# [5/20] Check Internet Connection
# ============================================================================
echo -e "${YELLOW}==[5/20] Check Internet Connection ==${NC}"
HAS_INTERNET=false
TEST_HOSTS=("plugins.gradle.org" "repo.maven.apache.org" "repo1.maven.org")
for TEST_HOST in "${TEST_HOSTS[@]}"; do
# macOS ping doesn't have -W, use -t instead
if ping -c 1 -t 3 "$TEST_HOST" &>/dev/null; then
HAS_INTERNET=true
echo -e "${GREEN}[OK] Connected to $TEST_HOST${NC}"
break
fi
done
if [ "$HAS_INTERNET" = false ]; then
# Try DNS resolution as fallback
if nslookup google.com &>/dev/null || host google.com &>/dev/null; then
HAS_INTERNET=true
echo -e "${GREEN}[OK] Internet available (DNS)${NC}"
fi
fi
if [ "$HAS_INTERNET" = false ]; then
echo ""
echo -e "${RED}============================================================${NC}"
echo -e "${RED} ERROR: No Internet Connection!${NC}"
echo -e "${RED}============================================================${NC}"
echo ""
echo -e "${YELLOW}This script requires internet for initial build.${NC}"
echo -e "${YELLOW}Please connect to internet and run again.${NC}"
echo ""
exit 1
fi
echo ""
# ============================================================================
# [6/20] Initial Gradle Setup
# ============================================================================
echo -e "${YELLOW}==[6/20] Initial Gradle Setup ==${NC}"
echo -e "${GRAY}[INFO] Downloading Gradle distribution...${NC}"
if ./gradlew --version &>/dev/null; then
GRADLE_VERSION=$(./gradlew --version 2>&1 | grep "^Gradle" | awk '{print $2}')
echo -e "${GREEN}[OK] Gradle $GRADLE_VERSION${NC}"
else
echo -e "${RED}[ERROR] Gradle setup failed${NC}"
exit 1
fi
echo ""
# ============================================================================
# [7/20] ONLINE BUILD - bootJar (Download All Dependencies)
# ============================================================================
echo -e "${YELLOW}==[7/20] ONLINE BUILD - bootJar ==${NC}"
echo ""
echo -e "${CYAN}============================================================${NC}"
echo -e "${CYAN} ONLINE BUILD (with Internet)${NC}"
echo -e "${CYAN} Downloading all dependencies to local cache${NC}"
echo -e "${CYAN}============================================================${NC}"
echo ""
BUILD_SUCCESS=false
./gradlew clean bootJar --no-daemon
if [ $? -eq 0 ]; then
BUILD_SUCCESS=true
echo ""
echo -e "${GREEN}============================================================${NC}"
echo -e "${GREEN} ONLINE BUILD SUCCESS!${NC}"
echo -e "${GREEN}============================================================${NC}"
echo ""
if [ -d "./build/libs" ]; then
echo -e "${CYAN}JAR files:${NC}"
ls -lh ./build/libs/*.jar 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
fi
else
echo ""
echo -e "${RED}============================================================${NC}"
echo -e "${RED} BUILD FAILED!${NC}"
echo -e "${RED}============================================================${NC}"
echo ""
echo -e "${YELLOW}Build failed. Cannot continue.${NC}"
exit 1
fi
echo ""
# ============================================================================
# [8/20] Stop Daemons
# ============================================================================
echo -e "${YELLOW}==[8/20] Stop Daemons ==${NC}"
./gradlew --stop &>/dev/null || true
sleep 2
echo -e "${GREEN}[OK] Daemons stopped${NC}"
echo ""
# ============================================================================
# [9/20] ONLINE TEST - bootRun (Verify Application Works)
# ============================================================================
echo -e "${YELLOW}==[9/20] ONLINE TEST - bootRun ==${NC}"
echo ""
echo -e "${CYAN}============================================================${NC}"
echo -e "${CYAN} Testing application startup (timeout: ${BOOTRUN_TIMEOUT_SECONDS}s)${NC}"
echo -e "${CYAN} Will automatically stop after successful startup${NC}"
echo -e "${CYAN}============================================================${NC}"
echo ""
BOOTRUN_SUCCESS=false
# macOS uses gtimeout if available, otherwise perl-based timeout
if command -v gtimeout &>/dev/null; then
gtimeout ${BOOTRUN_TIMEOUT_SECONDS}s ./gradlew bootRun --no-daemon &
else
# Fallback: start in background and kill after timeout
./gradlew bootRun --no-daemon &
fi
BOOTRUN_PID=$!
sleep 10
if ps -p $BOOTRUN_PID &>/dev/null; then
BOOTRUN_SUCCESS=true
echo ""
echo -e "${GREEN}[OK] Application started successfully${NC}"
kill $BOOTRUN_PID &>/dev/null || true
sleep 2
else
echo ""
echo -e "${YELLOW}[WARN] Application may not have started properly${NC}"
fi
# Cleanup - macOS process cleanup
pkill -f "gradle.*bootRun" &>/dev/null || true
sleep 2
echo ""
# ============================================================================
# [10/20] Stop Daemons Again
# ============================================================================
echo -e "${YELLOW}==[10/20] Stop Daemons Again ==${NC}"
./gradlew --stop &>/dev/null || true
sleep 2
echo -e "${GREEN}[OK] Daemons stopped${NC}"
echo ""
# ============================================================================
# [11/20] OFFLINE BUILD TEST (Verify Cache Completeness)
# ============================================================================
echo -e "${YELLOW}==[11/20] OFFLINE BUILD TEST ==${NC}"
echo ""
echo -e "${CYAN}============================================================${NC}"
echo -e "${CYAN} OFFLINE BUILD TEST (--offline flag)${NC}"
echo -e "${CYAN} Verifying all dependencies are cached${NC}"
echo -e "${CYAN}============================================================${NC}"
echo ""
OFFLINE_SUCCESS=false
./gradlew clean bootJar --offline --no-daemon
if [ $? -eq 0 ]; then
OFFLINE_SUCCESS=true
echo ""
echo -e "${GREEN}============================================================${NC}"
echo -e "${GREEN} OFFLINE BUILD TEST PASSED!${NC}"
echo -e "${GREEN}============================================================${NC}"
echo ""
echo -e "${GREEN}[OK] All dependencies are cached${NC}"
else
echo ""
echo -e "${RED}============================================================${NC}"
echo -e "${RED} OFFLINE BUILD TEST FAILED!${NC}"
echo -e "${RED}============================================================${NC}"
echo ""
echo -e "${YELLOW}Some dependencies may be missing from cache.${NC}"
echo -e "${YELLOW}The bundle may not work in air-gapped environment.${NC}"
echo ""
read -p "Continue anyway? (y/N): " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
exit 1
fi
fi
echo ""
# ============================================================================
# [12/20] Stop Daemons Before Archive
# ============================================================================
echo -e "${YELLOW}==[12/20] Stop Daemons Before Archive ==${NC}"
./gradlew --stop &>/dev/null || true
sleep 2
echo -e "${GREEN}[OK] Daemons stopped${NC}"
echo ""
# ============================================================================
# [13/20] Verify settings.gradle for Offline
# ============================================================================
echo -e "${YELLOW}==[13/20] Verify settings.gradle ==${NC}"
if [ -n "$SETTINGS_FILE" ]; then
if grep -q "mavenLocal()" "$SETTINGS_FILE" && grep -q "pluginManagement" "$SETTINGS_FILE"; then
echo -e "${GREEN}[OK] settings.gradle configured for offline${NC}"
else
echo -e "${YELLOW}[WARN] settings.gradle may need offline configuration${NC}"
echo -e "${GRAY}[INFO] Consider adding mavenLocal() to pluginManagement and repositories${NC}"
fi
else
echo -e "${GRAY}[INFO] No settings.gradle found${NC}"
fi
echo ""
# ============================================================================
# [14/20] Create Helper Scripts
# ============================================================================
echo -e "${YELLOW}==[14/20] Create Helper Scripts ==${NC}"
# run_offline_build.sh
cat > "$ROOT/run_offline_build.sh" << 'EOF'
#!/bin/bash
# run_offline_build.sh - Build JAR offline
export GRADLE_USER_HOME="$(pwd)/_offline_gradle_home"
echo "GRADLE_USER_HOME = $GRADLE_USER_HOME"
echo ""
./gradlew --offline bootJar --no-daemon
if [ $? -eq 0 ]; then
echo ""
echo "BUILD SUCCESS!"
echo ""
echo "JAR files:"
ls -lh ./build/libs/*.jar 2>/dev/null | awk '{print " " $9}'
else
echo "BUILD FAILED"
fi
EOF
chmod +x "$ROOT/run_offline_build.sh"
echo -e "${GREEN}[OK] run_offline_build.sh${NC}"
# run_offline_bootrun.sh
cat > "$ROOT/run_offline_bootrun.sh" << 'EOF'
#!/bin/bash
# run_offline_bootrun.sh - Run application offline
export GRADLE_USER_HOME="$(pwd)/_offline_gradle_home"
echo "GRADLE_USER_HOME = $GRADLE_USER_HOME"
echo ""
echo "Starting application (Ctrl+C to stop)..."
echo ""
./gradlew --offline bootRun --no-daemon
EOF
chmod +x "$ROOT/run_offline_bootrun.sh"
echo -e "${GREEN}[OK] run_offline_bootrun.sh${NC}"
echo ""
# ============================================================================
# [15/20] Final Daemon Cleanup
# ============================================================================
echo -e "${YELLOW}==[15/20] Final Daemon Cleanup ==${NC}"
./gradlew --stop &>/dev/null || true
sleep 2
echo -e "${GREEN}[OK] Daemons stopped${NC}"
echo ""
# ============================================================================
# [16/20] Clean Lock Files
# ============================================================================
echo -e "${YELLOW}==[16/20] Clean Lock Files ==${NC}"
DAEMON_DIR="$OFFLINE_HOME/daemon"
if [ -d "$DAEMON_DIR" ]; then
rm -rf "$DAEMON_DIR" 2>/dev/null || true
fi
find "$OFFLINE_HOME" -type f \( -name "*.lock" -o -name "*.log" -o -name "*.tmp" \) -delete 2>/dev/null || true
echo -e "${GREEN}[OK] Lock files cleaned${NC}"
echo ""
# ============================================================================
# [17/20] Calculate Cache Size
# ============================================================================
echo -e "${YELLOW}==[17/20] Cache Summary ==${NC}"
CACHES_DIR="$OFFLINE_HOME/caches"
WRAPPER_DISTS="$OFFLINE_HOME/wrapper/dists"
TOTAL_SIZE=0
if [ -d "$CACHES_DIR" ]; then
# macOS uses different options for du
if du -k "$CACHES_DIR" &>/dev/null; then
SIZE=$(du -sk "$CACHES_DIR" 2>/dev/null | cut -f1)
SIZE=$((SIZE * 1024)) # Convert to bytes
else
SIZE=0
fi
TOTAL_SIZE=$((TOTAL_SIZE + SIZE))
SIZE_MB=$(awk "BEGIN {printf \"%.2f\", $SIZE / 1048576}")
echo -e "${CYAN}[INFO] Dependencies: ${SIZE_MB} MB${NC}"
fi
if [ -d "$WRAPPER_DISTS" ]; then
if du -k "$WRAPPER_DISTS" &>/dev/null; then
SIZE=$(du -sk "$WRAPPER_DISTS" 2>/dev/null | cut -f1)
SIZE=$((SIZE * 1024))
else
SIZE=0
fi
TOTAL_SIZE=$((TOTAL_SIZE + SIZE))
SIZE_MB=$(awk "BEGIN {printf \"%.2f\", $SIZE / 1048576}")
echo -e "${CYAN}[INFO] Gradle dist: ${SIZE_MB} MB${NC}"
fi
TOTAL_MB=$(awk "BEGIN {printf \"%.2f\", $TOTAL_SIZE / 1048576}")
echo -e "${CYAN}[INFO] Total cache: ${TOTAL_MB} MB${NC}"
echo ""
# ============================================================================
# [18/20] Create Archive
# ============================================================================
echo -e "${YELLOW}==[18/20] Create Archive ==${NC}"
BASE_NAME=$(basename "$ROOT")
TIMESTAMP=$(date +"%Y%m%d_%H%M%S")
PARENT=$(dirname "$ROOT")
ARCHIVE_PATH="${PARENT}/${BASE_NAME}_offline_bundle_${TIMESTAMP}.tar.gz"
echo "Archive: $ARCHIVE_PATH"
echo -e "${GRAY}[INFO] Creating archive (this may take several minutes)...${NC}"
# macOS tar with BSD options
tar -czf "$ARCHIVE_PATH" \
--exclude=".git" \
--exclude=".idea" \
--exclude=".DS_Store" \
--exclude="*.log" \
--exclude="*.lock" \
--exclude="_offline_gradle_home/daemon" \
--exclude="_offline_gradle_home/native" \
--exclude="_offline_gradle_home/jdks" \
--exclude="build" \
--exclude="out" \
--exclude=".gradle" \
-C "$ROOT" .
if [ $? -ne 0 ]; then
echo -e "${RED}ERROR: tar failed${NC}"
exit 1
fi
# macOS stat command
ARCHIVE_SIZE=$(stat -f%z "$ARCHIVE_PATH" 2>/dev/null)
ARCHIVE_SIZE_MB=$(awk "BEGIN {printf \"%.2f\", $ARCHIVE_SIZE / 1048576}")
echo -e "${GREEN}[OK] Archive created: ${ARCHIVE_SIZE_MB} MB${NC}"
echo ""
# ============================================================================
# [19/20] Verify Archive
# ============================================================================
echo -e "${YELLOW}==[19/20] Verify Archive ==${NC}"
CHECKS=(
"gradle/wrapper/gradle-wrapper.jar"
"gradlew"
"_offline_gradle_home/caches"
"run_offline_build.sh"
)
for CHECK in "${CHECKS[@]}"; do
if tar -tzf "$ARCHIVE_PATH" | grep -q "$CHECK"; then
echo -e " ${GREEN}[OK] $CHECK${NC}"
else
echo -e " ${YELLOW}[WARN] $CHECK${NC}"
fi
done
echo ""
# ============================================================================
# [20/20] Complete
# ============================================================================
echo -e "${GREEN}============================================================${NC}"
echo -e "${GREEN} BUNDLE CREATION COMPLETE!${NC}"
echo -e "${GREEN}============================================================${NC}"
echo ""
echo -e "${CYAN}Archive: $ARCHIVE_PATH${NC}"
echo -e "${CYAN}Size: ${ARCHIVE_SIZE_MB} MB${NC}"
echo ""
echo -e "${CYAN}============================================================${NC}"
echo -e "${CYAN} Test Results${NC}"
echo -e "${CYAN}============================================================${NC}"
if [ "$BUILD_SUCCESS" = true ]; then
echo -e " Online build (bootJar): ${GREEN}PASSED${NC}"
else
echo -e " Online build (bootJar): ${RED}FAILED${NC}"
fi
if [ "$BOOTRUN_SUCCESS" = true ]; then
echo -e " Online test (bootRun): ${GREEN}PASSED${NC}"
else
echo -e " Online test (bootRun): ${YELLOW}SKIPPED${NC}"
fi
if [ "$OFFLINE_SUCCESS" = true ]; then
echo -e " Offline build test: ${GREEN}PASSED${NC}"
else
echo -e " Offline build test: ${RED}FAILED${NC}"
fi
echo ""
echo -e "${YELLOW}============================================================${NC}"
echo -e "${YELLOW} Usage in Air-gapped Environment${NC}"
echo -e "${YELLOW}============================================================${NC}"
echo ""
echo -e "${WHITE}Option 1: Use unpack script${NC}"
echo -e "${GRAY} ./unpack_and_offline_build_airgap.sh${NC}"
echo ""
echo -e "${WHITE}Option 2: Manual extraction${NC}"
echo -e "${GRAY} tar -xzf <archive>.tar.gz${NC}"
echo -e "${GRAY} cd <project>${NC}"
echo -e "${GRAY} ./run_offline_build.sh${NC}"
echo ""
echo -e "${WHITE}Option 3: Direct commands${NC}"
echo -e "${GRAY} export GRADLE_USER_HOME=\"./_offline_gradle_home\"${NC}"
echo -e "${GRAY} ./gradlew --offline bootJar --no-daemon${NC}"
echo ""

View File

@@ -0,0 +1,359 @@
#!/bin/bash
# unpack_and_offline_build_airgap_macos.sh
# ============================================================================
# Execution Environment: OFFLINE (Air-gapped, No Internet)
# Purpose: Extract bundle and run offline build
# ============================================================================
# macOS Bash Script
# Version: 3.1
#
# IMPORTANT: This script automatically:
# 1. Extracts the archive
# 2. Sets GRADLE_USER_HOME to project local cache
# 3. Configures settings.gradle for offline resolution
# 4. Runs build with --offline flag
# ============================================================================
set -e
# ============================================================================
# Configuration
# ============================================================================
WRAPPER_SEED_PATH="wrapper_jar_seed"
OFFLINE_HOME_NAME="_offline_gradle_home"
# Color codes
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
CYAN='\033[0;36m'
GRAY='\033[0;90m'
WHITE='\033[1;37m'
NC='\033[0m' # No Color
echo ""
echo -e "${CYAN}============================================================${NC}"
echo -e "${CYAN} Gradle Offline Build Runner (macOS)${NC}"
echo -e "${CYAN} Environment: AIR-GAPPED (No Internet)${NC}"
echo -e "${CYAN} Mode: Fully Offline (--offline enforced)${NC}"
echo -e "${CYAN}============================================================${NC}"
echo ""
# ============================================================================
# [1/16] Check Current Directory
# ============================================================================
echo -e "${YELLOW}==[1/16] Check Current Directory ==${NC}"
START_DIR="$(pwd)"
echo "PWD: $START_DIR"
echo ""
# ============================================================================
# [2/16] Select Archive
# ============================================================================
echo -e "${YELLOW}==[2/16] Select Archive ==${NC}"
ARCHIVE=""
if [ $# -ge 1 ]; then
ARCHIVE="$1"
fi
if [ -z "$ARCHIVE" ]; then
# Auto-detect most recent .tar.gz file (macOS compatible)
ARCHIVE=$(find "$START_DIR" -maxdepth 1 -type f \( -name "*.tar.gz" -o -name "*.tgz" \) -exec stat -f "%m %N" {} \; 2>/dev/null | sort -rn | head -1 | cut -d' ' -f2-)
if [ -z "$ARCHIVE" ]; then
echo -e "${RED}[ERROR] No archive found${NC}"
ls -lh "$START_DIR"
exit 1
fi
echo -e "${CYAN}[AUTO] $(basename "$ARCHIVE")${NC}"
else
if [ ! -f "$ARCHIVE" ]; then
ARCHIVE="$START_DIR/$ARCHIVE"
fi
echo -e "${CYAN}[USER] $(basename "$ARCHIVE")${NC}"
fi
if [ ! -f "$ARCHIVE" ]; then
echo -e "${RED}ERROR: Archive not found: $ARCHIVE${NC}"
exit 1
fi
# macOS stat command
ARCHIVE_SIZE=$(stat -f%z "$ARCHIVE" 2>/dev/null)
ARCHIVE_SIZE_MB=$(awk "BEGIN {printf \"%.2f\", $ARCHIVE_SIZE / 1048576}")
echo "Size: ${ARCHIVE_SIZE_MB} MB"
echo ""
# ============================================================================
# [3/16] Check tar
# ============================================================================
echo -e "${YELLOW}==[3/16] Check tar ==${NC}"
if ! command -v tar &>/dev/null; then
echo -e "${RED}ERROR: tar not found${NC}"
exit 1
fi
echo -e "${GREEN}[OK] tar found${NC}"
echo ""
# ============================================================================
# [4/16] Extract Archive
# ============================================================================
echo -e "${YELLOW}==[4/16] Extract Archive ==${NC}"
echo -e "${GRAY}[INFO] Extracting...${NC}"
tar -xzf "$ARCHIVE" -C "$START_DIR"
if [ $? -ne 0 ]; then
echo -e "${RED}ERROR: Extraction failed${NC}"
exit 1
fi
echo -e "${GREEN}[OK] Extracted${NC}"
echo ""
# ============================================================================
# [5/16] Set Permissions
# ============================================================================
echo -e "${YELLOW}==[5/16] Set Permissions ==${NC}"
chmod -R u+rw "$START_DIR" 2>/dev/null || true
# Remove extended attributes that macOS may add
xattr -cr "$START_DIR" 2>/dev/null || true
echo -e "${GREEN}[OK] Permissions set${NC}"
echo ""
# ============================================================================
# [6/16] Find Project Root
# ============================================================================
echo -e "${YELLOW}==[6/16] Find Project Root ==${NC}"
GRADLEW=$(find "$START_DIR" -name "gradlew" -type f 2>/dev/null | sort | head -1)
if [ -z "$GRADLEW" ]; then
echo -e "${RED}ERROR: gradlew not found${NC}"
exit 1
fi
PROJECT_DIR=$(dirname "$GRADLEW")
echo -e "${CYAN}Project: $PROJECT_DIR${NC}"
cd "$PROJECT_DIR"
echo ""
# ============================================================================
# [7/16] Fix Permissions
# ============================================================================
echo -e "${YELLOW}==[7/16] Fix Permissions ==${NC}"
chmod +x ./gradlew
find . -name "*.sh" -type f -exec chmod +x {} \; 2>/dev/null || true
# Remove quarantine attributes that macOS adds to downloaded files
xattr -d com.apple.quarantine ./gradlew 2>/dev/null || true
find . -name "*.jar" -exec xattr -d com.apple.quarantine {} \; 2>/dev/null || true
echo -e "${GREEN}[OK] Permissions fixed${NC}"
echo ""
# ============================================================================
# [8/16] Verify Wrapper
# ============================================================================
echo -e "${YELLOW}==[8/16] Verify Wrapper ==${NC}"
WRAPPER_DIR="$PROJECT_DIR/gradle/wrapper"
WRAPPER_JAR="$WRAPPER_DIR/gradle-wrapper.jar"
WRAPPER_PROP="$WRAPPER_DIR/gradle-wrapper.properties"
if [ ! -f "$WRAPPER_PROP" ]; then
echo -e "${RED}ERROR: gradle-wrapper.properties missing${NC}"
exit 1
fi
if [ ! -f "$WRAPPER_JAR" ]; then
SEED_JAR="$PROJECT_DIR/$WRAPPER_SEED_PATH/gradle-wrapper.jar"
if [ -f "$SEED_JAR" ]; then
mkdir -p "$WRAPPER_DIR"
cp "$SEED_JAR" "$WRAPPER_JAR"
echo -e "${GREEN}[OK] Injected from seed${NC}"
else
echo -e "${RED}ERROR: wrapper jar missing${NC}"
exit 1
fi
else
echo -e "${GREEN}[OK] Wrapper verified${NC}"
fi
echo ""
# ============================================================================
# [9/16] Set GRADLE_USER_HOME
# ============================================================================
echo -e "${YELLOW}==[9/16] Set GRADLE_USER_HOME ==${NC}"
OFFLINE_HOME="$PROJECT_DIR/$OFFLINE_HOME_NAME"
if [ ! -d "$OFFLINE_HOME" ]; then
echo -e "${RED}ERROR: _offline_gradle_home not found in archive${NC}"
exit 1
fi
export GRADLE_USER_HOME="$OFFLINE_HOME"
echo -e "${CYAN}GRADLE_USER_HOME = $GRADLE_USER_HOME${NC}"
# Check cache
CACHES_DIR="$OFFLINE_HOME/caches"
if [ -d "$CACHES_DIR" ]; then
# macOS du command
if du -k "$CACHES_DIR" &>/dev/null; then
CACHE_SIZE=$(du -sk "$CACHES_DIR" 2>/dev/null | cut -f1)
CACHE_SIZE=$((CACHE_SIZE * 1024))
else
CACHE_SIZE=0
fi
CACHE_SIZE_MB=$(awk "BEGIN {printf \"%.2f\", $CACHE_SIZE / 1048576}")
echo -e "${CYAN}[INFO] Cache size: ${CACHE_SIZE_MB} MB${NC}"
else
echo -e "${YELLOW}[WARN] No cache folder found${NC}"
fi
echo ""
# ============================================================================
# [10/16] Verify settings.gradle
# ============================================================================
echo -e "${YELLOW}==[10/16] Verify settings.gradle ==${NC}"
SETTINGS_FILE=""
if [ -f "./settings.gradle" ]; then
SETTINGS_FILE="settings.gradle"
elif [ -f "./settings.gradle.kts" ]; then
SETTINGS_FILE="settings.gradle.kts"
fi
if [ -n "$SETTINGS_FILE" ]; then
if grep -q "mavenLocal()" "$SETTINGS_FILE" && grep -q "pluginManagement" "$SETTINGS_FILE"; then
echo -e "${GREEN}[OK] settings.gradle configured for offline${NC}"
else
echo -e "${YELLOW}[WARN] settings.gradle may not be configured for offline${NC}"
echo -e "${GRAY}[INFO] Build may fail if plugins not cached${NC}"
fi
fi
echo ""
# ============================================================================
# [11/16] Test Gradle
# ============================================================================
echo -e "${YELLOW}==[11/16] Test Gradle ==${NC}"
GRADLE_WORKS=false
if ./gradlew --offline --version &>/dev/null; then
GRADLE_WORKS=true
echo -e "${GREEN}[OK] Gradle working in offline mode${NC}"
else
echo -e "${YELLOW}[WARN] Gradle --version failed${NC}"
fi
echo ""
# ============================================================================
# [12/16] Stop Daemon
# ============================================================================
echo -e "${YELLOW}==[12/16] Stop Daemon ==${NC}"
./gradlew --stop &>/dev/null || true
sleep 2
echo -e "${GREEN}[OK] Daemon stopped${NC}"
echo ""
# ============================================================================
# [13/16] Run Offline Build
# ============================================================================
echo -e "${YELLOW}==[13/16] Run Offline Build ==${NC}"
echo ""
echo -e "${CYAN}============================================================${NC}"
echo -e "${CYAN} Building with --offline flag${NC}"
echo -e "${CYAN} All dependencies from local cache${NC}"
echo -e "${CYAN}============================================================${NC}"
echo ""
BUILD_SUCCESS=false
BUILD_TASK=""
# Try bootJar
echo -e "${GRAY}[TRY] --offline bootJar...${NC}"
if ./gradlew --offline clean bootJar --no-daemon; then
BUILD_SUCCESS=true
BUILD_TASK="bootJar"
fi
# Try jar
if [ "$BUILD_SUCCESS" = false ]; then
echo -e "${GRAY}[TRY] --offline jar...${NC}"
if ./gradlew --offline clean jar --no-daemon; then
BUILD_SUCCESS=true
BUILD_TASK="jar"
fi
fi
# Try build
if [ "$BUILD_SUCCESS" = false ]; then
echo -e "${GRAY}[TRY] --offline build...${NC}"
if ./gradlew --offline build --no-daemon; then
BUILD_SUCCESS=true
BUILD_TASK="build"
fi
fi
echo ""
if [ "$BUILD_SUCCESS" = true ]; then
echo -e "${GREEN}============================================================${NC}"
echo -e "${GREEN} BUILD SUCCESS! (task: $BUILD_TASK)${NC}"
echo -e "${GREEN}============================================================${NC}"
else
echo -e "${RED}============================================================${NC}"
echo -e "${RED} BUILD FAILED!${NC}"
echo -e "${RED}============================================================${NC}"
echo ""
echo -e "${YELLOW}Possible causes:${NC}"
echo -e "${WHITE} - Dependencies not in cache${NC}"
echo -e "${WHITE} - Plugin resolution failed${NC}"
echo -e "${WHITE} - Need complete build in online env first${NC}"
exit 1
fi
echo ""
# ============================================================================
# [14/16] Show Build Output
# ============================================================================
echo -e "${YELLOW}==[14/16] Build Output ==${NC}"
LIBS_DIR="$PROJECT_DIR/build/libs"
if [ -d "$LIBS_DIR" ]; then
echo -e "${CYAN}build/libs contents:${NC}"
ls -lh "$LIBS_DIR"/*.jar 2>/dev/null | awk '{printf " %-40s %10s\n", $9, $5}'
MAIN_JAR=$(find "$LIBS_DIR" -name "*.jar" -type f ! -name "*-plain.jar" ! -name "*-sources.jar" ! -name "*-javadoc.jar" 2>/dev/null | head -1)
else
echo -e "${YELLOW}[WARN] build/libs not found${NC}"
fi
echo ""
# ============================================================================
# [15/16] Run Instructions
# ============================================================================
echo -e "${YELLOW}==[15/16] Run Instructions ==${NC}"
echo ""
if [ -n "$MAIN_JAR" ]; then
echo -e "${CYAN}To run the application:${NC}"
echo -e "${WHITE} java -jar $(basename "$MAIN_JAR")${NC}"
echo ""
fi
echo -e "${CYAN}To rebuild:${NC}"
echo -e "${WHITE} export GRADLE_USER_HOME=\"./_offline_gradle_home\"${NC}"
echo -e "${WHITE} ./gradlew --offline bootJar --no-daemon${NC}"
echo ""
# ============================================================================
# [16/16] Complete
# ============================================================================
echo -e "${GREEN}============================================================${NC}"
echo -e "${GREEN} Offline Build Complete!${NC}"
echo -e "${GREEN}============================================================${NC}"
echo ""
echo -e "${CYAN}Project: $PROJECT_DIR${NC}"
echo ""

View File

@@ -1,67 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.postgres.core;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.Basic;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.DetectMastReq;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.DetectMastSearch;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.FeaturePnuDto;
import com.kamco.cd.kamcoback.Innopam.postgres.entity.DetectMastEntity;
import com.kamco.cd.kamcoback.Innopam.postgres.repository.DetectMastRepository;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class DetectMastCoreService {
private final DetectMastRepository detectMastRepository;
public void saveDetectMast(DetectMastReq detectMast) {
DetectMastEntity detectMastEntity = new DetectMastEntity();
detectMastEntity.setCprsBfYr(detectMast.getCprsBfYr());
detectMastEntity.setCprsAdYr(detectMast.getCprsAdYr());
detectMastEntity.setDtctSno(detectMast.getDtctSno());
detectMastEntity.setPathNm(detectMast.getPathNm());
detectMastEntity.setCrtEpno(detectMast.getCrtEpno());
detectMastEntity.setCrtIp(detectMast.getCrtIp());
detectMastRepository.save(detectMastEntity);
}
public List<Basic> selectDetectMast(DetectMastSearch detectMast) {
return detectMastRepository.findDetectMastList(detectMast).stream()
.map(
e ->
new DetectMastDto.Basic(
e.getId(),
e.getCprsBfYr(),
e.getCprsAdYr(),
e.getDtctSno(),
e.getPathNm(),
e.getCrtEpno(),
e.getCrtIp()))
.toList();
}
public Basic selectDetectMast(Long id) {
DetectMastEntity e =
detectMastRepository.findById(id).orElseThrow(() -> new RuntimeException("등록 데이터가 없습니다."));
return new DetectMastDto.Basic(
e.getId(),
e.getCprsBfYr(),
e.getCprsAdYr(),
e.getDtctSno(),
e.getPathNm(),
e.getCrtEpno(),
e.getCrtIp());
}
public String findPnuData(DetectMastSearch detectMast) {
DetectMastEntity detectMastEntity = detectMastRepository.findPnuData(detectMast);
return detectMastEntity.getPathNm();
}
public Integer updatePnu(List<FeaturePnuDto> list) {
return detectMastRepository.updateGeomPnu(list);
}
}

View File

@@ -1,85 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@Getter
@Setter
@Entity
@Table(name = "detect_mast")
public class DetectMastEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "detect_mast_id_gen")
@SequenceGenerator(
name = "detect_mast_id_gen",
sequenceName = "seq_detect_mast_id",
allocationSize = 1)
@Column(name = "dtct_mst_id", nullable = false)
private Long id;
@NotNull
@ColumnDefault("gen_random_uuid()")
@Column(name = "dtct_mst_uuid", nullable = false)
private UUID dtctMstUuid = UUID.randomUUID();
@Size(max = 4)
@NotNull
@Column(name = "cprs_bf_yr", nullable = false, length = 4)
private String cprsBfYr;
@Size(max = 4)
@NotNull
@Column(name = "cprs_ad_yr", nullable = false, length = 4)
private String cprsAdYr;
@NotNull
@Column(name = "dtct_sno", nullable = false)
private Integer dtctSno;
@NotNull
@Column(name = "path_nm", nullable = false, length = Integer.MAX_VALUE)
private String pathNm;
@Size(max = 50)
@Column(name = "feature_id", length = 50)
private String featureId;
@Size(max = 30)
@NotNull
@Column(name = "crt_epno", nullable = false, length = 30)
private String crtEpno;
@Size(max = 45)
@NotNull
@Column(name = "crt_ip", nullable = false, length = 45)
private String crtIp;
@NotNull
@ColumnDefault("now()")
@Column(name = "crt_dttm", nullable = false)
private ZonedDateTime crtDttm = ZonedDateTime.now();
@Size(max = 30)
@Column(name = "chg_epno", length = 30)
private String chgEpno;
@Size(max = 45)
@Column(name = "chg_ip", length = 45)
private String chgIp;
@Column(name = "chg_dttm")
private ZonedDateTime chgDttm;
}

View File

@@ -1,48 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@Getter
@Setter
@Entity
@Table(name = "detect_mast_pnu")
public class DetectMastPnuEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "detect_mast_pnu_id_gen")
@SequenceGenerator(
name = "detect_mast_pnu_id_gen",
sequenceName = "seq_detect_mast_pnu_id",
allocationSize = 1)
@Column(name = "dtct_mst_pnu_id", nullable = false)
private Long id;
@NotNull
@ColumnDefault("gen_random_uuid()")
@Column(name = "detect_mast_pnu_uuid", nullable = false)
private UUID detectMastPnuUuid;
@NotNull
@Column(name = "dtct_mst_id", nullable = false)
private Long dtctMstId;
@Size(max = 4)
@NotNull
@Column(name = "pnu", nullable = false, length = 4)
private String pnu;
@Column(name = "polygon", length = Integer.MAX_VALUE)
private String polygon;
}

View File

@@ -1,7 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.postgres.repository;
import com.kamco.cd.kamcoback.Innopam.postgres.entity.DetectMastPnuEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface DetectMastPnuRepository
extends JpaRepository<DetectMastPnuEntity, Long>, DetectMastPnuRepositoryCustom {}

View File

@@ -1,3 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.postgres.repository;
public interface DetectMastPnuRepositoryCustom {}

View File

@@ -1,7 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.postgres.repository;
import com.kamco.cd.kamcoback.Innopam.postgres.entity.DetectMastEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface DetectMastRepository
extends JpaRepository<DetectMastEntity, Long>, DetectMastRepositoryCustom {}

View File

@@ -1,15 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.postgres.repository;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.DetectMastSearch;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.FeaturePnuDto;
import com.kamco.cd.kamcoback.Innopam.postgres.entity.DetectMastEntity;
import java.util.List;
public interface DetectMastRepositoryCustom {
public List<DetectMastEntity> findDetectMastList(DetectMastSearch detectMast);
public DetectMastEntity findPnuData(DetectMastSearch detectMast);
Integer updateGeomPnu(List<FeaturePnuDto> list);
}

View File

@@ -1,94 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.postgres.repository;
import static com.kamco.cd.kamcoback.Innopam.postgres.entity.QDetectMastEntity.detectMastEntity;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.DetectMastSearch;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.FeaturePnuDto;
import com.kamco.cd.kamcoback.Innopam.postgres.entity.DetectMastEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class DetectMastRepositoryImpl implements DetectMastRepositoryCustom {
private final EntityManager em;
private final JPAQueryFactory queryFactory;
private final ObjectMapper objectMapper;
@Override
public List<DetectMastEntity> findDetectMastList(DetectMastSearch detectMast) {
BooleanBuilder whereBuilder = new BooleanBuilder();
if (StringUtils.isNotBlank(detectMast.getCprsAdYr())) {
whereBuilder.and(detectMastEntity.cprsAdYr.eq(detectMast.getCprsAdYr()));
}
if (StringUtils.isNotBlank(detectMast.getCprsBfYr())) {
whereBuilder.and(detectMastEntity.cprsBfYr.eq(detectMast.getCprsBfYr()));
}
if (detectMast.getDtctSno() != null) {
whereBuilder.and(detectMastEntity.dtctSno.eq(detectMast.getDtctSno()));
}
return queryFactory.select(detectMastEntity).from(detectMastEntity).where(whereBuilder).fetch();
}
@Override
public DetectMastEntity findPnuData(DetectMastSearch detectMast) {
BooleanBuilder whereBuilder = new BooleanBuilder();
whereBuilder.and(detectMastEntity.cprsAdYr.eq(detectMast.getCprsAdYr()));
whereBuilder.and(detectMastEntity.cprsBfYr.eq(detectMast.getCprsBfYr()));
whereBuilder.and(detectMastEntity.dtctSno.eq(detectMast.getDtctSno()));
if (detectMast.getFeatureId() != null) {
whereBuilder.and(detectMastEntity.featureId.eq(detectMast.getFeatureId()));
}
return queryFactory
.select(detectMastEntity)
.from(detectMastEntity)
.where(whereBuilder)
.fetchOne();
}
@Override
public Integer updateGeomPnu(List<FeaturePnuDto> list) {
if (list == null || list.isEmpty()) {
return 0;
}
String sql =
"""
UPDATE tb_map_sheet_anal_data_inference_geom g
SET pnu = j.pnu
FROM (
SELECT
(elem->>'featureId')::uuid AS feature_uuid,
(elem->>'pnu')::bigint AS pnu
FROM jsonb_array_elements(CAST(:json AS jsonb)) AS elem
) j
WHERE g.uuid = j.feature_uuid;
""";
String json = "";
try {
json = objectMapper.writeValueAsString(list);
} catch (JsonProcessingException e) {
throw new RuntimeException("PNU 업데이트 실패", e);
}
return em.createNativeQuery(sql).setParameter("json", json).executeUpdate();
}
}

View File

@@ -1,153 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.service;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.Basic;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.DetectMastReq;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.DetectMastSearch;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.FeaturePnuDto;
import com.kamco.cd.kamcoback.Innopam.postgres.core.DetectMastCoreService;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
import java.util.stream.Stream;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@Transactional(readOnly = true)
@RequiredArgsConstructor
public class DetectMastService {
@Value("${spring.profiles.active:local}")
private String profile;
private final DetectMastCoreService detectMastCoreService;
private final JsonFactory jsonFactory = new JsonFactory();
@Transactional
public void saveDetectMast(DetectMastReq detectMast) {
detectMastCoreService.saveDetectMast(detectMast);
//
// String dirPath =
// "local".equals(profile)
// ? "/Users/bokmin/detect/result/2023_2024/4"
// : detectMast.getPathNm();
//
// List<FeaturePnuDto> list = this.extractFeaturePnusRandom(dirPath);
}
public List<Basic> selectDetectMast(DetectMastSearch detectMast) {
return detectMastCoreService.selectDetectMast(detectMast);
}
public Basic selectDetectMast(Long id) {
return detectMastCoreService.selectDetectMast(id);
}
/** GeoJSON → polygon_id + 랜덤 PNU */
public List<FeaturePnuDto> findPnuData(DetectMastSearch detectMast) {
String dirPath =
"local".equals(profile)
? "/Users/bokmin/detect/result/"
+ detectMast.getCprsBfYr()
+ "_"
+ detectMast.getCprsAdYr()
+ "/"
+ detectMast.getDtctSno()
: detectMastCoreService.findPnuData(detectMast);
return extractFeaturePnusRandom(dirPath);
}
public FeaturePnuDto selectPnuDetail(UUID uuid) {
FeaturePnuDto dto = new FeaturePnuDto();
dto.setPnu(randomPnu());
dto.setFeatureId(uuid.toString());
return dto;
}
@Transactional
public Integer updatePnuData(DetectMastSearch detectMast) {
String dirPath =
"local".equals(profile)
? "/Users/bokmin/detect/result/"
+ detectMast.getCprsBfYr()
+ "_"
+ detectMast.getCprsAdYr()
+ "/"
+ detectMast.getDtctSno()
: detectMastCoreService.findPnuData(detectMast);
List<FeaturePnuDto> list = extractFeaturePnusRandom(dirPath);
return detectMastCoreService.updatePnu(list);
}
/** 하위 폴더까지 .geojson 파일들에서 polygon_id만 뽑음 병렬처리(parallel) 제거: IO + parallel은 거의 항상 느려짐 */
private List<FeaturePnuDto> extractFeaturePnusRandom(String dirPath) {
Path basePath = Paths.get(dirPath);
if (!Files.isDirectory(basePath)) {
System.err.println("유효하지 않은 디렉터리: " + dirPath);
return List.of();
}
List<FeaturePnuDto> out = new ArrayList<>(4096);
try (Stream<Path> stream = Files.walk(basePath)) {
stream
.filter(Files::isRegularFile)
.filter(p -> p.toString().toLowerCase().endsWith(".geojson"))
.forEach(
p -> {
try (InputStream in = Files.newInputStream(p);
JsonParser parser = jsonFactory.createParser(in)) {
while (parser.nextToken() != null) {
if (parser.currentToken() == JsonToken.FIELD_NAME
&& "polygon_id".equals(parser.getCurrentName())) {
JsonToken next = parser.nextToken(); // 값으로 이동
if (next == JsonToken.VALUE_STRING) {
String polygonId = parser.getValueAsString();
out.add(new FeaturePnuDto(polygonId, randomPnu()));
}
}
}
} catch (Exception e) {
// 파일 단위 실패는 최소 로그
System.err.println("GeoJSON 파싱 실패: " + p.getFileName() + " / " + e.getMessage());
}
});
} catch (Exception e) {
System.err.println("디렉터리 탐색 실패: " + e.getMessage());
return List.of();
}
return out;
}
/** 랜덤 PNU 생성 (임시) - 법정동코드(5) + 산구분(1) + 본번(4) + 부번(4) = 14자리 */
private String randomPnu() {
ThreadLocalRandom r = ThreadLocalRandom.current();
String dongCode = String.format("%05d", r.nextInt(10000, 99999));
String san = r.nextBoolean() ? "1" : "2";
String bon = String.format("%04d", r.nextInt(1, 10000));
String bu = String.format("%04d", r.nextInt(0, 10000));
return dongCode + san + bon + bu;
}
}

View File

@@ -1,48 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.utils;
import com.fasterxml.jackson.databind.JsonNode;
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.GeometryFactory;
import org.locationtech.jts.geom.LinearRing;
import org.locationtech.jts.geom.MultiPolygon;
import org.locationtech.jts.geom.Polygon;
public class GeoJsonGeometryConverter {
private static final GeometryFactory GF = new GeometryFactory();
public static Geometry toGeometry(JsonNode geomNode) {
String type = geomNode.path("type").asText();
if ("Polygon".equals(type)) {
return toPolygon(geomNode.path("coordinates"));
}
if ("MultiPolygon".equals(type)) {
return toMultiPolygon(geomNode.path("coordinates"));
}
return null;
}
private static Polygon toPolygon(JsonNode coords) {
LinearRing shell = GF.createLinearRing(toCoords(coords.get(0)));
return GF.createPolygon(shell);
}
private static MultiPolygon toMultiPolygon(JsonNode coords) {
Polygon[] polys = new Polygon[coords.size()];
for (int i = 0; i < coords.size(); i++) {
polys[i] = toPolygon(coords.get(i));
}
return GF.createMultiPolygon(polys);
}
private static Coordinate[] toCoords(JsonNode ring) {
Coordinate[] c = new Coordinate[ring.size() + 1];
for (int i = 0; i < ring.size(); i++) {
c[i] = new Coordinate(ring.get(i).get(0).asDouble(), ring.get(i).get(1).asDouble());
}
c[c.length - 1] = c[0];
return c;
}
}

View File

@@ -1,44 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.utils;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
public class GeoJsonLoader {
private final ObjectMapper om = new ObjectMapper();
public GeoJsonFile load(File geoJsonFile) throws Exception {
JsonNode root = om.readTree(geoJsonFile);
long mapId = root.path("properties").path("map_id").asLong(-1);
if (mapId <= 0) {
throw new IllegalStateException(
"GeoJSON top-level properties.map_id 없음: " + geoJsonFile.getName());
}
List<JsonNode> features = new ArrayList<>();
root.path("features").forEach(features::add);
return new GeoJsonFile(mapId, features);
}
/** ✅ feature에서 polygon_id 추출 */
public static String polygonId(JsonNode feature) {
return feature.path("properties").path("polygon_id").asText(null);
}
public static class GeoJsonFile {
public final long mapId;
public final List<JsonNode> features;
public GeoJsonFile(long mapId, List<JsonNode> features) {
this.mapId = mapId;
this.features = features;
}
}
}

View File

@@ -1,17 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.utils;
public class MapIdUtils {
private MapIdUtils() {
// util class
}
/** map_id → 시도코드 예: 34602060 → "34" */
public static String sidoCodeFromMapId(long mapId) {
String s = String.valueOf(mapId);
if (s.length() < 2) {
throw new IllegalArgumentException("잘못된 map_id: " + mapId);
}
return s.substring(0, 2);
}
}

View File

@@ -1,76 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.utils;
import java.io.File;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import org.geotools.api.data.DataStore;
import org.geotools.api.data.DataStoreFinder;
import org.geotools.api.data.SimpleFeatureSource;
import org.geotools.api.feature.simple.SimpleFeature;
import org.geotools.data.simple.SimpleFeatureCollection;
import org.geotools.data.simple.SimpleFeatureIterator;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.index.strtree.STRtree;
public class ShpIndexManager {
private static final String SHP_ROOT = "/shp";
private static final String SHP_YYYYMM = "202512";
private static final String PNU_FIELD = "PNU";
private final Map<String, STRtree> cache = new ConcurrentHashMap<>();
public STRtree getIndex(String sidoCode) {
return cache.computeIfAbsent(sidoCode, this::loadIndex);
}
private STRtree loadIndex(String sidoCode) {
try {
String path = SHP_ROOT + "/LSMD_CONT_LDREG_" + sidoCode + "_" + SHP_YYYYMM + ".shp";
File shp = new File(path);
if (!shp.exists()) {
return null;
}
STRtree index = new STRtree(10);
DataStore store = DataStoreFinder.getDataStore(Map.of("url", shp.toURI().toURL()));
String typeName = store.getTypeNames()[0];
SimpleFeatureSource source = store.getFeatureSource(typeName);
SimpleFeatureCollection col = source.getFeatures();
try (SimpleFeatureIterator it = col.features()) {
while (it.hasNext()) {
SimpleFeature f = it.next();
Geometry geom = (Geometry) f.getDefaultGeometry();
String pnu = Objects.toString(f.getAttribute(PNU_FIELD), null);
if (geom != null && pnu != null) {
index.insert(geom.getEnvelopeInternal(), new ShpRow(geom, pnu));
}
}
}
index.build();
store.dispose();
return index;
} catch (Exception e) {
return null;
}
}
/** SHP 한 row */
public static class ShpRow {
public final Geometry geom;
public final String pnu;
public ShpRow(Geometry geom, String pnu) {
this.geom = geom;
this.pnu = pnu;
}
}
}

View File

@@ -1,42 +0,0 @@
package com.kamco.cd.kamcoback.Innopam.utils;
import java.util.List;
import org.locationtech.jts.geom.Envelope;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.prep.PreparedGeometry;
import org.locationtech.jts.geom.prep.PreparedGeometryFactory;
import org.locationtech.jts.index.strtree.STRtree;
public class ShpPnuMatcher {
public static String pickByIntersectionMax(STRtree index, Geometry target) {
Envelope env = target.getEnvelopeInternal();
@SuppressWarnings("unchecked")
List<ShpIndexManager.ShpRow> rows = index.query(env);
double best = 0;
String bestPnu = null;
for (ShpIndexManager.ShpRow row : rows) {
PreparedGeometry prep = PreparedGeometryFactory.prepare(row.geom);
if (prep.contains(target) || prep.covers(target)) {
return row.pnu;
}
if (!prep.intersects(target)) {
continue;
}
double area = row.geom.intersection(target).getArea();
if (area > best) {
best = area;
bestPnu = row.pnu;
}
}
return bestPnu;
}
}

View File

@@ -3,8 +3,10 @@ package com.kamco.cd.kamcoback.auth;
import com.kamco.cd.kamcoback.common.enums.StatusType;
import com.kamco.cd.kamcoback.common.enums.error.AuthErrorCode;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.common.utils.HeaderUtil;
import com.kamco.cd.kamcoback.postgres.entity.MemberEntity;
import com.kamco.cd.kamcoback.postgres.repository.members.MembersRepository;
import jakarta.servlet.http.HttpServletRequest;
import lombok.RequiredArgsConstructor;
import org.mindrot.jbcrypt.BCrypt;
import org.springframework.security.authentication.AuthenticationProvider;
@@ -12,11 +14,16 @@ import org.springframework.security.authentication.UsernamePasswordAuthenticatio
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
@Component
@RequiredArgsConstructor
public class CustomAuthenticationProvider implements AuthenticationProvider {
ServletRequestAttributes attr =
(ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
private final MembersRepository membersRepository;
@Override
@@ -52,7 +59,15 @@ public class CustomAuthenticationProvider implements AuthenticationProvider {
// 인증 성공 → UserDetails 생성
CustomUserDetails userDetails = new CustomUserDetails(member);
return new UsernamePasswordAuthenticationToken(userDetails, null, userDetails.getAuthorities());
// front에서 전달한 사용자 ip 등록
HttpServletRequest req = (attr != null) ? attr.getRequest() : null;
String ip = (req != null) ? HeaderUtil.get(req, "kamco-userIp") : null;
UsernamePasswordAuthenticationToken token =
new UsernamePasswordAuthenticationToken(userDetails, null, userDetails.getAuthorities());
token.setDetails(ip);
return token;
}
@Override

View File

@@ -10,7 +10,7 @@ import lombok.Getter;
@AllArgsConstructor
public enum StatusType implements EnumType {
ACTIVE("사용"),
INACTIVE("사용"),
INACTIVE("사용중지"),
PENDING("계정등록");
private final String desc;

View File

@@ -0,0 +1,23 @@
package com.kamco.cd.kamcoback.common.utils;
import jakarta.servlet.http.HttpServletRequest;
public final class HeaderUtil {
private HeaderUtil() {}
/** 특정 Header 값 조회 */
public static String get(HttpServletRequest request, String headerName) {
if (request == null || headerName == null) {
return null;
}
String value = request.getHeader(headerName);
return (value != null && !value.isBlank()) ? value : null;
}
/** 필수 Header 조회 (없으면 null) */
public static String getRequired(HttpServletRequest request, String headerName) {
return get(request, headerName);
}
}

View File

@@ -44,4 +44,11 @@ public class UserUtil {
MembersDto.Member user = getCurrentUser();
return user != null ? user.getRole() : null;
}
public String getIp() {
return Optional.ofNullable(SecurityContextHolder.getContext().getAuthentication())
.map(auth -> auth.getDetails())
.map(Object::toString)
.orElse(null);
}
}

View File

@@ -1,12 +1,13 @@
package com.kamco.cd.kamcoback.Innopam;
package com.kamco.cd.kamcoback.gukyuin;
import com.kamco.cd.kamcoback.Innopam.dto.ChngDetectMastDto;
import com.kamco.cd.kamcoback.Innopam.dto.ChngDetectMastDto.ChnDetectMastReqDto;
import com.kamco.cd.kamcoback.Innopam.dto.ChngDetectMastDto.ChngDetectMastSearchDto;
import com.kamco.cd.kamcoback.Innopam.dto.ChngDetectMastDto.ResReturn;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.Basic;
import com.kamco.cd.kamcoback.Innopam.dto.DetectMastDto.DetectMastReq;
import com.kamco.cd.kamcoback.Innopam.service.InnopamApiService;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto;
import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto.ChnDetectMastReqDto;
import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto.ChngDetectMastSearchDto;
import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto.ResReturn;
import com.kamco.cd.kamcoback.gukyuin.dto.DetectMastDto.Basic;
import com.kamco.cd.kamcoback.gukyuin.dto.DetectMastDto.DetectMastReq;
import com.kamco.cd.kamcoback.gukyuin.service.GukYuinApiService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
@@ -15,6 +16,7 @@ import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.validation.Valid;
import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
@@ -23,13 +25,13 @@ import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "이노펨 연동 API", description = "이노펨 연동 API")
@Tag(name = "국유인 연동 API", description = "국유인 연동 API")
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/innopam/")
public class InnopamApiController {
@RequestMapping("/api/gukyuin/")
public class GukYuinApiController {
private final InnopamApiService innopamApiService;
private final GukYuinApiService gukYuinApiService;
/** 탐지결과 등록 */
@Operation(summary = "탐지결과 등록", description = "탐지결과 등록")
@@ -48,8 +50,7 @@ public class InnopamApiController {
@PostMapping("/mast/regist")
public ChngDetectMastDto.Basic regist(
@RequestBody @Valid ChngDetectMastDto.ChnDetectMastReqDto chnDetectMastReq) {
// innopamApiService.saveDetectMast(chnDetectMastReq);
return innopamApiService.regist(chnDetectMastReq);
return gukYuinApiService.regist(chnDetectMastReq);
}
@Operation(summary = "탐지결과 삭제", description = "탐지결과 삭제")
@@ -68,7 +69,7 @@ public class InnopamApiController {
@PostMapping("/mast/remove")
public ResReturn remove(
@RequestBody @Valid ChngDetectMastDto.ChnDetectMastReqDto chnDetectMastReq) {
return innopamApiService.remove(chnDetectMastReq);
return gukYuinApiService.remove(chnDetectMastReq);
}
@Operation(summary = "탐지결과 등록목록 조회", description = "탐지결과 등록목록 조회")
@@ -95,6 +96,11 @@ public class InnopamApiController {
searchDto.setCprsYr(cprsYr);
searchDto.setCrtrYr(crtrYr);
searchDto.setChnDtctSno(chnDtctSno);
return innopamApiService.list(searchDto);
return gukYuinApiService.list(searchDto);
}
public ApiResponseDto<Boolean> getIsLinkGukYuin(UUID uuid) {
gukYuinApiService.getIsLinkGukYuin(uuid);
return ApiResponseDto.ok(false);
}
}

View File

@@ -1,4 +1,4 @@
package com.kamco.cd.kamcoback.Innopam.dto;
package com.kamco.cd.kamcoback.gukyuin.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.List;
@@ -65,6 +65,7 @@ public class ChngDetectMastDto {
@NoArgsConstructor
@AllArgsConstructor
public static class ChnDetectMastReqDto {
private String cprsYr; // 비교년도 2023
private String crtrYr; // 기준년도 2024
private String chnDtctSno; // 차수 (1 | 2 | ...)
@@ -127,6 +128,7 @@ public class ChngDetectMastDto {
@NoArgsConstructor
@AllArgsConstructor
public static class ChngDetectMastSearchDto {
private String chnDtctId;
private String cprsYr;
private String crtrYr;

View File

@@ -1,4 +1,4 @@
package com.kamco.cd.kamcoback.Innopam.dto;
package com.kamco.cd.kamcoback.gukyuin.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotBlank;

View File

@@ -1,15 +1,13 @@
package com.kamco.cd.kamcoback.Innopam.service;
package com.kamco.cd.kamcoback.gukyuin.service;
import com.fasterxml.jackson.core.JsonFactory;
import com.kamco.cd.kamcoback.Innopam.dto.ChngDetectMastDto;
import com.kamco.cd.kamcoback.Innopam.dto.ChngDetectMastDto.ResReturn;
import com.kamco.cd.kamcoback.Innopam.postgres.core.DetectMastCoreService;
import com.kamco.cd.kamcoback.common.utils.NetUtils;
import com.kamco.cd.kamcoback.common.utils.UserUtil;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient.ExternalCallResult;
import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto;
import com.kamco.cd.kamcoback.gukyuin.dto.ChngDetectMastDto.ResReturn;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpMethod;
@@ -19,31 +17,26 @@ import org.springframework.transaction.annotation.Transactional;
@Service
@Transactional(readOnly = true)
@RequiredArgsConstructor
public class InnopamApiService {
public class GukYuinApiService {
@Value("${spring.profiles.active:local}")
private String profile;
@Value("${innopam.url}")
private String innopamUrl;
@Value("${gukyuin.url}")
private String gukyuinUrl;
@Value("${innopam.mast}")
private String innopamMastUrl;
private final DetectMastCoreService detectMastCoreService;
@Value("${gukyuin.mast}")
private String gukyuinMastUrl;
private final ExternalHttpClient externalHttpClient;
private final UserUtil userUtil = new UserUtil();
private final NetUtils netUtils = new NetUtils();
private final JsonFactory jsonFactory = new JsonFactory();
@Transactional
public ChngDetectMastDto.Basic regist(ChngDetectMastDto.ChnDetectMastReqDto chnDetectMastReq) {
ChngDetectMastDto.Basic basic = new ChngDetectMastDto.Basic();
String url = innopamMastUrl + "/regist";
String url = gukyuinMastUrl + "/regist";
// url = "http://localhost:8080/api/kcd/cdi/detect/mast/regist";
String myip = netUtils.getLocalIP();
@@ -65,7 +58,7 @@ public class InnopamApiService {
public ResReturn remove(ChngDetectMastDto.ChnDetectMastReqDto chnDetectMastReq) {
ChngDetectMastDto.Basic basic = new ChngDetectMastDto.Basic();
String url = innopamMastUrl + "/remove";
String url = gukyuinMastUrl + "/remove";
// url = "http://localhost:8080/api/kcd/cdi/detect/mast/remove";
String myip = netUtils.getLocalIP();
@@ -88,7 +81,7 @@ public class InnopamApiService {
List<ChngDetectMastDto.Basic> masterList = new ArrayList<>();
String queryString = netUtils.dtoToQueryString(searchDto, null);
String url = innopamMastUrl + queryString;
String url = gukyuinMastUrl + queryString;
ExternalCallResult<String> result =
externalHttpClient.call(url, HttpMethod.GET, null, netUtils.jsonHeaders(), String.class);
@@ -97,4 +90,8 @@ public class InnopamApiService {
return masterList;
}
public Boolean getIsLinkGukYuin(UUID uuid) {
return false;
}
}

View File

@@ -262,7 +262,7 @@ public class InferenceResultApiController {
})
@GetMapping("/infer-result-info")
public ApiResponseDto<InferenceDetailDto.AnalResultInfo> getInferenceResultInfo(
@Parameter(description = "회차 uuid", example = "f30e8817-9625-4fff-ba43-c1e6ed2067c4")
@Parameter(description = "회차 uuid", example = "5799eb21-4780-48b0-a82e-e58dcbb8806b")
@RequestParam
UUID uuid) {
return ApiResponseDto.ok(inferenceResultService.getInferenceResultInfo(uuid));

View File

@@ -3,6 +3,9 @@ package com.kamco.cd.kamcoback.inference.dto;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.common.enums.DetectionClassification;
import com.kamco.cd.kamcoback.common.enums.ImageryFitStatus;
import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm;
@@ -431,6 +434,7 @@ public class InferenceDetailDto {
@Schema(name = "AnalResultInfo", description = "추론결과 기본정보")
@Getter
@Setter
@AllArgsConstructor
@NoArgsConstructor
public static class AnalResultInfo {
@@ -448,6 +452,31 @@ public class InferenceDetailDto {
private Integer stage;
private String elapsedDuration;
private String subUid;
private Boolean applyYn;
@JsonFormatDttm private ZonedDateTime applyDttm;
private String bboxGeom;
private String bboxCenterPoint;
@JsonProperty("bboxGeom")
public JsonNode getBboxGeom() {
ObjectMapper mapper = new ObjectMapper();
try {
return mapper.readTree(this.bboxGeom);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
@JsonProperty("bboxCenterPoint")
public JsonNode getBboxCenterPoint() {
ObjectMapper mapper = new ObjectMapper();
try {
return mapper.readTree(this.bboxCenterPoint);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
public AnalResultInfo(
String analTitle,
@@ -461,7 +490,9 @@ public class InferenceDetailDto {
ZonedDateTime inferStartDttm,
ZonedDateTime inferEndDttm,
Integer stage,
String subUid) {
String subUid,
Boolean applyYn,
ZonedDateTime applyDttm) {
this.analTitle = analTitle;
this.modelVer1 = modelVer1;
this.modelVer2 = modelVer2;
@@ -474,6 +505,8 @@ public class InferenceDetailDto {
this.inferEndDttm = inferEndDttm;
this.stage = stage;
this.subUid = subUid;
this.applyYn = applyYn;
this.applyDttm = applyDttm;
Duration elapsed =
(inferStartDttm != null && inferEndDttm != null)
? Duration.between(inferStartDttm, inferEndDttm)
@@ -492,6 +525,16 @@ public class InferenceDetailDto {
}
}
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class BboxPointDto {
private String bboxGeom;
private String bboxCenterPoint;
}
@Getter
@Setter
@NoArgsConstructor

View File

@@ -574,6 +574,12 @@ public class InferenceResultService {
return dto;
}
/**
* 추론결과 기본정보
*
* @param uuid uuid
* @return
*/
public AnalResultInfo getInferenceResultInfo(UUID uuid) {
return inferenceResultCoreService.getInferenceResultInfo(uuid);
}

View File

@@ -0,0 +1,8 @@
package com.kamco.cd.kamcoback.postgres.core;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class GukYuinCoreService {}

View File

@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.common.utils.UserUtil;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.BboxPointDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Geom;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.InferenceBatchSheet;
@@ -409,8 +410,18 @@ public class InferenceResultCoreService {
return dto;
}
/**
* 추론결과 기본정보
*
* @param uuid uuid
* @return
*/
public AnalResultInfo getInferenceResultInfo(UUID uuid) {
return mapSheetLearnRepository.getInferenceResultInfo(uuid);
AnalResultInfo resultInfo = mapSheetLearnRepository.getInferenceResultInfo(uuid);
BboxPointDto bboxPointDto = mapSheetLearnRepository.getBboxPoint(uuid);
resultInfo.setBboxGeom(bboxPointDto.getBboxGeom());
resultInfo.setBboxCenterPoint(bboxPointDto.getBboxCenterPoint());
return resultInfo;
}
public List<Dashboard> getInferenceClassCountList(UUID uuid) {

View File

@@ -1,6 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.BboxPointDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Geom;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.SearchGeoReq;
@@ -34,6 +35,8 @@ public interface MapSheetLearnRepositoryCustom {
AnalResultInfo getInferenceResultInfo(UUID uuid);
BboxPointDto getBboxPoint(UUID uuid);
List<Dashboard> getInferenceClassCountList(UUID uuid);
Page<Geom> getInferenceGeomList(UUID uuid, SearchGeoReq searchGeoReq);

View File

@@ -13,6 +13,7 @@ import static com.kamco.cd.kamcoback.postgres.entity.QSystemMetricEntity.systemM
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
import com.kamco.cd.kamcoback.common.utils.DateRange;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.AnalResultInfo;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.BboxPointDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Geom;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.SearchGeoReq;
@@ -24,6 +25,7 @@ import com.kamco.cd.kamcoback.model.service.ModelMngService;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Expression;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.CaseBuilder;
@@ -319,7 +321,9 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
mapSheetLearnEntity.inferStartDttm,
mapSheetLearnEntity.inferEndDttm,
mapSheetLearnEntity.stage,
Expressions.stringTemplate("substring({0} from 1 for 8)", mapSheetLearnEntity.uid)))
Expressions.stringTemplate("substring({0} from 1 for 8)", mapSheetLearnEntity.uid),
mapSheetLearnEntity.applyYn,
mapSheetLearnEntity.applyDttm))
.from(mapSheetLearnEntity)
.leftJoin(m1)
.on(mapSheetLearnEntity.m1ModelUuid.eq(m1.uuid))
@@ -331,6 +335,30 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
.fetchOne();
}
@Override
public BboxPointDto getBboxPoint(UUID uuid) {
Expression<String> bboxGeom =
Expressions.stringTemplate(
"ST_AsGeoJSON(ST_Envelope(ST_Collect({0})))", mapSheetAnalDataInferenceGeomEntity.geom);
Expression<String> bboxCenterPoint =
Expressions.stringTemplate(
"ST_AsGeoJSON(ST_Centroid(ST_Envelope(ST_Collect({0}))))",
mapSheetAnalDataInferenceGeomEntity.geom);
return queryFactory
.select(Projections.constructor(BboxPointDto.class, bboxGeom, bboxCenterPoint))
.from(mapSheetLearnEntity)
.join(mapSheetAnalInferenceEntity)
.on(mapSheetAnalInferenceEntity.learnId.eq(mapSheetLearnEntity.id))
.join(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
.join(mapSheetAnalDataInferenceGeomEntity)
.on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
.where(mapSheetLearnEntity.uuid.eq(uuid))
.fetchOne();
}
@Override
public List<Dashboard> getInferenceClassCountList(UUID uuid) {
@@ -420,7 +448,7 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
StringExpression pnu =
Expressions.stringTemplate(
"coalesce(({0}), '')",
"nullif(({0}), '')",
JPAExpressions.select(Expressions.stringTemplate("string_agg({0}, ',')", pnuEntity.pnu))
.from(pnuEntity)
.where(pnuEntity.geo.geoUid.eq(mapSheetAnalDataInferenceGeomEntity.geoUid)));

View File

@@ -67,10 +67,8 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
mapSheetAnalSttcEntity.id.classAfterCd.as("classNm"), // 앞단 CoreService 에서 한글명으로 변환
mapSheetAnalSttcEntity.classAfterCnt.sum()))
.from(mapSheetAnalInferenceEntity)
.innerJoin(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
.innerJoin(mapSheetAnalSttcEntity)
.on(mapSheetAnalSttcEntity.id.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
.on(mapSheetAnalSttcEntity.id.analUid.eq(mapSheetAnalInferenceEntity.id))
.where(
mapSheetAnalInferenceEntity.uuid.eq(uuid),
mapScaleTypeSearchExpression(scale, mapSheetNum))

View File

@@ -107,7 +107,7 @@ inference:
geojson-dir: /kamco-nfs/requests/
jar-path: /kamco-nfs/dataset/shp_exporter-1.0.0.jar
innopam:
gukyuin:
#url: http://localhost:8080
url: http://192.168.2.129:5301
mast : ${innopam.url}/api/kcd/cdi/chn/mast
url: http://192.168.2.129:5301
mast: ${gukyuin.url}/api/kcd/cdi/chn/mast

View File

@@ -87,3 +87,8 @@ inference:
batch-url: http://10.100.0.11:8000/batches
geojson-dir: /kamco-nfs/requests/
jar-path: jar/makeshp-1.0.0.jar
gukyuin:
#url: http://localhost:8080
url: http://192.168.2.129:5301
mast: ${gukyuin.url}/api/kcd/cdi/chn/mast

View File

@@ -63,7 +63,7 @@ inference:
geojson-dir: /kamco-nfs/requests/
jar-path: /kamco-nfs/dataset/shp_exporter-1.0.0.jar
innopam:
gukyuin:
#url: http://localhost:8080
url: http://192.168.2.129:5301
mast: ${innopam.url}/api/kcd/cdi/chn/mast
mast: ${gukyuin.url}/api/kcd/cdi/chn/mast