shp export 소스 추가

This commit is contained in:
2026-02-23 16:15:11 +09:00
parent 5c47d111b1
commit ee3f86f8ac
104 changed files with 3628 additions and 0 deletions

View File

@@ -0,0 +1,7 @@
{
"permissions": {
"allow": [
"WebSearch"
]
}
}

15
shp-exporter/.editorconfig Executable file
View File

@@ -0,0 +1,15 @@
root = true
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[*.java]
indent_style = space
indent_size = 2
[*.{gradle,yml,yaml}]
indent_style = space
indent_size = 2

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

View File

@@ -0,0 +1,2 @@
#Wed Jan 14 15:14:03 KST 2026
gradle.version=8.14.3

Binary file not shown.

Binary file not shown.

View File

8
shp-exporter/.idea/.gitignore generated vendored Executable file
View File

@@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

9
shp-exporter/.idea/compiler.xml generated Executable file
View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
<bytecodeTargetLevel target="17" />
</component>
<component name="JavacSettings">
<option name="ADDITIONAL_OPTIONS_STRING" value="-parameters" />
</component>
</project>

19
shp-exporter/.idea/gradle.xml generated Executable file
View File

@@ -0,0 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GradleMigrationSettings" migrationVersion="1" />
<component name="GradleSettings">
<option name="linkedExternalProjectsSettings">
<GradleProjectSettings>
<option name="delegatedBuild" value="false" />
<option name="testRunner" value="PLATFORM" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="modules">
<set>
<option value="$PROJECT_DIR$" />
</set>
</option>
<option name="resolveExternalAnnotations" value="true" />
</GradleProjectSettings>
</option>
</component>
</project>

35
shp-exporter/.idea/jarRepositories.xml generated Executable file
View File

@@ -0,0 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RemoteRepositoriesConfiguration">
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Maven Central repository" />
<option name="url" value="https://repo1.maven.org/maven2" />
</remote-repository>
<remote-repository>
<option name="id" value="jboss.community" />
<option name="name" value="JBoss Community repository" />
<option name="url" value="https://repository.jboss.org/nexus/content/repositories/public/" />
</remote-repository>
<remote-repository>
<option name="id" value="MavenRepo" />
<option name="name" value="MavenRepo" />
<option name="url" value="https://repo.maven.apache.org/maven2/" />
</remote-repository>
<remote-repository>
<option name="id" value="maven" />
<option name="name" value="maven" />
<option name="url" value="https://repo.osgeo.org/repository/release/" />
</remote-repository>
<remote-repository>
<option name="id" value="maven2" />
<option name="name" value="maven2" />
<option name="url" value="https://repo.osgeo.org/repository/geotools-releases/" />
</remote-repository>
<remote-repository>
<option name="id" value="maven3" />
<option name="name" value="maven3" />
<option name="url" value="https://repo.osgeo.org/repository/snapshot/" />
</remote-repository>
</component>
</project>

9
shp-exporter/.idea/makesample_geoserver.iml generated Executable file
View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

10
shp-exporter/.idea/misc.xml generated Executable file
View File

@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="FrameworkDetectionExcludesConfiguration">
<file type="web" url="file://$PROJECT_DIR$" />
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_17" default="true" project-jdk-name="17" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/out" />
</component>
</project>

122
shp-exporter/CLAUDE.md Executable file
View File

@@ -0,0 +1,122 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Project Overview
Spring Boot CLI application that queries PostgreSQL PostGIS spatial data and converts it to ESRI shapefiles. The application processes inference results from the KAMCO database and generates geographic shapefiles for visualization in GIS applications.
## Build and Run Commands
### Build
```bash
./gradlew build
```
### Run Application
```bash
./gradlew bootRun
```
Or run the built JAR:
```bash
java -jar build/libs/makesample-1.0.0.jar
```
### Code Formatting
Apply Google Java Format (2-space indentation) before committing:
```bash
./gradlew spotlessApply
```
Check formatting without applying:
```bash
./gradlew spotlessCheck
```
## Architecture
### Processing Pipeline
The application follows a layered architecture with a linear data flow:
1. **CLI Entry** (`ConverterCommandLineRunner`) → Reads configuration and initiates batch processing
2. **Service Orchestration** (`ShapefileConverterService`) → Coordinates the conversion workflow for each map_id
3. **Data Access** (`InferenceResultRepository`) → Queries PostGIS database and converts WKT to JTS geometries
4. **Geometry Conversion** (`GeometryConverter`) → Converts PostGIS WKT format to JTS Geometry objects
5. **Shapefile Writing** (`ShapefileWriter`) → Uses GeoTools to generate shapefile artifacts (.shp, .shx, .dbf, .prj)
### Key Design Points
**Geometry Handling**: The application uses a two-step geometry conversion process:
- PostGIS returns geometries as WKT (Well-Known Text) via `ST_AsText(geometry)`
- `GeometryConverter` parses WKT to JTS `Geometry` objects
- `ShapefileWriter` uses JTS geometries with GeoTools to write shapefiles
**Batch Processing**: Configuration in `application.yml` drives batch execution:
- Multiple `map-ids` processed sequentially (if specified)
- If `map-ids` is null/empty, creates a merged shapefile for all batch-ids
- Each map_id filtered by `batch-ids` array
- Output directory structure: `{output-base-dir}/{inference-id}/{map-id}/` or `{output-base-dir}/{inference-id}/merge/` for merged mode
- Separate output directory created for each map_id
**Shapefile Constraints**: The application validates that all geometries for a single shapefile are homogeneous (same type) because shapefiles cannot contain mixed geometry types. This validation happens in `ShapefileConverterService.validateGeometries()`.
**Feature Schema**: GeoTools requires explicit geometry field setup:
- Default geometry field named `the_geom` (not `geometry`)
- Field names truncated to 10 characters for DBF format compatibility
- Geometry type determined from first valid geometry in result set
## Configuration
Primary configuration in `src/main/resources/application.yml`:
```yaml
converter:
inference-id: 'D5E46F60FC40B1A8BE0CD1F3547AA6' # Inference ID (used for output folder structure)
map-ids: ['35813030'] # List of map_ids to process (text type), omit for merged shapefile
batch-ids: [252, 253, 257] # Batch ID array filter
output-base-dir: '/kamco-nfs/dataset/export/' # Base directory for shapefile output
crs: 'EPSG:5186' # Korean 2000 / Central Belt CRS
```
Database connection configured via standard Spring Boot datasource properties.
## Database Integration
### Query Pattern
The repository uses `PreparedStatementCreator` to handle PostgreSQL array parameters:
```sql
WHERE batch_id = ANY(?) AND map_id = ?
```
The `ANY(?)` clause requires creating a PostgreSQL array using `Connection.createArrayOf("bigint", ...)`.
### Field Mapping
Database columns are mapped to shapefile fields with Korean naming:
| Database Column | Shapefile Field |
|-----------------|-----------------|
| uid | uid |
| map_id | map_id |
| probability | chn_dtct_prob |
| before_year | cprs_yr |
| after_year | crtr_yr |
| before_c | bf_cls_cd |
| before_p | bf_cls_prob |
| after_c | af_cls_cd |
| after_p | af_cls_prob |
| geometry | the_geom |
### Coordinate Reference System
All geometries use **EPSG:5186** (Korean 2000 / Central Belt). The PostGIS geometry column is defined as `geometry(Polygon, 5186)`, and this CRS is preserved in the output shapefile's `.prj` file via GeoTools CRS encoding.
## Dependencies
Key libraries and their roles:
- **GeoTools 30.0**: Shapefile generation (`gt-shapefile`, `gt-referencing`, `gt-epsg-hsql`)
- **JTS 1.19.0**: Java Topology Suite for geometry representation
- **PostGIS JDBC 2.5.1**: PostgreSQL spatial extension support
- **Spring Boot 3.5.7**: Framework for DI, JDBC, and configuration
Note: `javax.media:jai_core` is excluded in `build.gradle` to avoid conflicts.

388
shp-exporter/README.md Executable file
View File

@@ -0,0 +1,388 @@
# PostgreSQL to Shapefile Converter
Spring Boot CLI application that queries PostgreSQL spatial data and generates shapefiles.
## Features
- Batch processing for multiple map_ids
- PostGIS geometry to Shapefile conversion
- **GeoServer REST API integration for automatic layer registration**
- Configurable via `application.yml`
- Generates all required shapefile files (.shp, .shx, .dbf, .prj)
- Supports EPSG:5186 (Korean 2000 / Central Belt) coordinate reference system
- GeoJSON export support
## Prerequisites
- Java 17 or higher
- PostgreSQL database with PostGIS extension
- Access to the KAMCO database at 192.168.2.127:15432
## Configuration
Edit `src/main/resources/application.yml` to configure:
```yaml
converter:
spring.profiles.active: 'dev' #profiles.active
inference-id: 'D5E46F60FC40B1A8BE0CD1F3547AA6' # Inference ID (used for output folder structure)
map-ids:
- '35813030' # Add your map_ids here (text type)
batch-ids:
- 252
- 253
- 257
output-base-dir: '/kamco-nfs/dataset/export/'
crs: 'EPSG:5186' # Korean 2000 / Central Belt
geoserver:
base-url: 'https://kamco.geo-dev.gs.dabeeo.com'
workspace: 'cd'
datastore: 'inference_result'
overwrite-existing: true # Delete existing layers before re-registering
connection-timeout: 30000 # 30 seconds
read-timeout: 60000 # 60 seconds
```
```md
## Converter Mode (`converter.mode`)
`converter.mode`는 대량 `map_ids` 처리 시
OS 커맨드라인 길이 제한(`Argument list too long`) 문제를 방지하기 위해 추가 하였습니다.
### Supported Modes
#### MERGED
- `batch-ids`에 해당하는 **모든 데이터를 하나의 Shapefile로 병합 생성**
- `map-ids`가 설정되어 있어도 **무시됨**
- 단일 결과 파일이 필요한 경우 적합
#### MAP_IDS
- 명시적으로 전달한 `map-ids`만 대상으로 Shapefile 생성
- `converter.map-ids` **필수**
- `map-ids` 개수가 많을 경우 OS 커맨드라인 길이 제한에 걸릴 수 있음
#### RESOLVE
- `batch-ids` 기준으로 **JAR 내부에서 map_ids를 조회**한 뒤 Shapefile 생성
- `map-ids`를 커맨드라인 인자로 전달하지 않음
- 대량 데이터 처리 시 가장 안전한 방식
### Default Behavior (mode 미지정 시)
- `converter.map-ids`가 비어 있으면 → **MERGED**
- `converter.map-ids`가 있으면 → **MAP_IDS**
### Command Line Parameters
You can override configuration values using command line arguments:
**Using Gradle (recommended - no quoting issues):**
```bash
./gradlew bootRun --args="--converter.inference-id=D5E46F60FC40B1A8BE0CD1F3547AA6 --converter.map-ids[0]=35813030 --converter.batch-ids[0]=252 --converter.batch-ids[1]=253 --converter.batch-ids[2]=257 --converter.mode=MERGED"
```
**Using JAR (zsh shell - quote arguments with brackets):**
```bash
java -jar build/libs/makesample-1.0.0.jar \
'--converter.inference-id=D5E46F60FC40B1A8BE0CD1F3547AA6' \
'--converter.map-ids[0]=35813030' \
'--converter.batch-ids[0]=252' \
'--converter.batch-ids[1]=253' \
'--converter.batch-ids[2]=257' \
'--converter.mode=MERGED'
```
**Using JAR (bash shell - no quotes needed):**
```bash
java -jar build/libs/makesample-1.0.0.jar \
--converter.inference-id=D5E46F60FC40B1A8BE0CD1F3547AA6 \
--converter.map-ids[0]=35813030 \
--converter.batch-ids[0]=252 \
--converter.batch-ids[1]=253 \
--converter.batch-ids[2]=257 \
--converter.mode=MERGED
```
**Note for zsh users:** zsh interprets square brackets `[]` as glob patterns. Always quote arguments containing brackets when using zsh.
## Building
```bash
./gradlew build
```
## Running
### Generate Shapefiles
```bash
./gradlew bootRun
```
Or run the JAR directly:
```bash
java -jar build/libs/makesample-1.0.0.jar
```
### Register Shapefile to GeoServer
First, set GeoServer credentials as environment variables:
```bash
export GEOSERVER_USERNAME=admin
export GEOSERVER_PASSWORD=geoserver
```
Then register a shapefile:
```bash
./gradlew bootRun --args="--upload-shp /kamco-nfs/dataset/export/D5E46F60FC40B1A8BE0CD1F3547AA6/35813030/35813030.shp --layer inference_35813030"
```
Or using the JAR:
```bash
java -jar build/libs/makesample-1.0.0.jar \
--upload-shp /path/to/shapefile.shp \
--layer layer_name
```
**GeoServer Registration Process:**
1. Verifies workspace 'cd' exists (must be pre-created in GeoServer)
2. Creates datastore 'inference_result' if it doesn't exist
3. Deletes existing layer if `overwrite-existing: true`
4. Publishes shapefile via REST API
5. Verifies successful registration
6. Automatically enables WMS, WFS, WMTS services
**Important Notes:**
- Workspace 'cd' must exist in GeoServer before registration
- Environment variables `GEOSERVER_USERNAME` and `GEOSERVER_PASSWORD` must be set
- Shapefile path must be absolute
- GeoServer must have file system access to the shapefile location
## Output
Shapefiles will be created in directories structured as `output-base-dir/inference-id/map-id/`:
```
/kamco-nfs/dataset/export/D5E46F60FC40B1A8BE0CD1F3547AA6/35813030/
├── 35813030.shp # Shapefile geometry
├── 35813030.shx # Shape index
├── 35813030.dbf # Attribute data
└── 35813030.prj # Projection information
```
## Database Query
The application executes the following query for each map_id:
```sql
SELECT uid, map_id, probability, before_year, after_year,
before_c, before_p, after_c, after_p, ST_AsText(geometry) as geometry_wkt
FROM inference_results_testing
WHERE batch_id = ANY(?) AND map_id = ?
```
### Database Schema
- **geometry**: `geometry(Polygon, 5186)` - EPSG:5186 좌표계
- **map_id**: `text` - 문자열 타입
- **before_year, after_year**: `bigint` - Long 타입
- **batch_id**: `bigint` - Long 타입
- **uid**: `uuid` - UUID 타입
## Field Mapping
Shapefile field names are limited to 10 characters:
| Database Column | DB Type | Shapefile Field | Shapefile Type |
|-----------------|----------|-----------------|----------------|
| uid | uuid | chnDtctId | String |
| map_id | text | mpqd_no | String |
| probability | float8 | chn_dtct_p | Double |
| before_year | bigint | cprs_yr | Long |
| after_year | bigint | crtr_yr | Long |
| before_c | text | bf_cls_cd | String |
| before_p | float8 | bf_cls_pro | Double |
| after_c | text | af_cls_cd | String |
| after_p | float8 | af_cls_pro | Double |
| geometry | geom | the_geom | Polygon |
## Error Handling
### Shapefile Generation
- **No results**: Logs warning and continues to next map_id
- **Mixed geometry types**: Throws exception (shapefiles require homogeneous geometry)
- **Database connection failure**: Application exits with error
- **Invalid geometry**: Logs warning and continues processing
### GeoServer Registration
- **Workspace not found**: Logs error with remediation steps (must be pre-created)
- **Authentication failure**: Logs error prompting to verify environment variables
- **Network timeout**: Logs connection error with timeout details
- **Layer already exists**: Automatically deletes and re-registers if `overwrite-existing: true`
- **Registration failure**: Logs error but does not stop application (non-blocking)
## Validating Output
### Shapefile Validation
Open the generated shapefiles in QGIS or ArcGIS to verify:
1. Geometry displays correctly
2. Attribute table contains all expected fields
3. CRS is EPSG:5186 (Korean 2000 / Central Belt)
### GeoServer Layer Validation
After registering to GeoServer, verify the layer:
1. **GeoServer Admin Console**: https://kamco.geo-dev.gs.dabeeo.com/geoserver/web
2. Navigate to **Layers** → Find your layer (e.g., `cd:inference_35813030`)
3. Preview the layer using **Layer Preview**
4. Verify services are enabled:
- WMS: `https://kamco.geo-dev.gs.dabeeo.com/geoserver/cd/wms`
- WFS: `https://kamco.geo-dev.gs.dabeeo.com/geoserver/cd/wfs`
- WMTS: `https://kamco.geo-dev.gs.dabeeo.com/geoserver/cd/wmts`
**Example WMS GetMap Request:**
```
https://kamco.geo-dev.gs.dabeeo.com/geoserver/cd/wms?
service=WMS&
version=1.1.0&
request=GetMap&
layers=cd:inference_35813030&
bbox=<bounds>&
width=768&
height=768&
srs=EPSG:5186&
format=image/png
```
## Development
### Code Formatting
The project uses Google Java Format with 2-space indentation:
```bash
./gradlew spotlessApply
```
### Project Structure
```
src/main/java/com/kamco/makesample/
├── MakeSampleApplication.java # Main application class
├── cli/
│ └── ConverterCommandLineRunner.java # CLI entry point
├── config/
│ ├── ConverterProperties.java # Shapefile converter configuration
│ ├── GeoServerProperties.java # GeoServer configuration
│ ├── GeoServerCredentials.java # GeoServer authentication
│ └── RestTemplateConfig.java # HTTP client configuration
├── exception/
│ ├── ShapefileConversionException.java
│ ├── GeometryConversionException.java
│ ├── MixedGeometryException.java
│ └── GeoServerRegistrationException.java # GeoServer registration errors
├── model/
│ └── InferenceResult.java # Domain model
├── repository/
│ └── InferenceResultRepository.java # Data access layer
├── service/
│ ├── GeometryConverter.java # PostGIS to JTS conversion
│ ├── ShapefileConverterService.java # Orchestration service
│ └── GeoServerRegistrationService.java # GeoServer REST API integration
└── writer/
├── ShapefileWriter.java # GeoTools shapefile writer
└── GeoJsonWriter.java # GeoJSON export writer
```
## Dependencies
- Spring Boot 3.5.7
- spring-boot-starter
- spring-boot-starter-jdbc
- spring-boot-starter-web (for RestTemplate)
- spring-boot-starter-validation (for @NotBlank annotations)
- GeoTools 30.0
- gt-shapefile
- gt-referencing
- gt-epsg-hsql
- gt-geojson
- PostgreSQL JDBC Driver
- PostGIS JDBC 2.5.1
- JTS (Java Topology Suite) 1.19.0
## Troubleshooting
### GeoServer Registration Issues
**Problem: "Workspace not found: cd"**
```
Solution: Create workspace 'cd' in GeoServer admin console before registration
Steps:
1. Login to GeoServer admin: https://kamco.geo-dev.gs.dabeeo.com/geoserver/web
2. Go to Workspaces → Add new workspace
3. Name: cd, Namespace URI: http://cd
4. Click Save
```
**Problem: "GeoServer credentials not configured"**
```
Solution: Set environment variables before running
export GEOSERVER_USERNAME=admin
export GEOSERVER_PASSWORD=geoserver
```
**Problem: "Layer already exists and overwrite is disabled"**
```
Solution: Enable overwrite in application.yml
geoserver:
overwrite-existing: true
```
**Problem: Connection timeout to GeoServer**
```
Solution: Increase timeout values in application.yml
geoserver:
connection-timeout: 60000 # 60 seconds
read-timeout: 120000 # 120 seconds
```
**Problem: "Registration failed. Layer not found after publication"**
```
Possible causes:
1. GeoServer cannot access shapefile path (check file system permissions)
2. Shapefile is corrupted or invalid
3. Network issue interrupted registration
Solution:
1. Verify GeoServer has read access to shapefile directory
2. Validate shapefile using QGIS or ogr2ogr
3. Check GeoServer logs for detailed error messages
```
### Database Connection Issues
**Problem: "Connection refused to 192.168.2.127:15432"**
```
Solution: Verify PostgreSQL is running and accessible
psql -h 192.168.2.127 -p 15432 -U kamco_cds -d kamco_cds
```
**Problem: "No results found for map_id"**
```
Solution: Verify data exists in database
SELECT COUNT(*) FROM inference_results_testing
WHERE batch_id IN (252, 253, 257) AND map_id = '35813030';
```
## License
KAMCO Internal Use Only

88
shp-exporter/build.gradle Executable file
View File

@@ -0,0 +1,88 @@
plugins {
id 'java'
id 'org.springframework.boot' version '3.5.7'
id 'io.spring.dependency-management' version '1.1.7'
id 'com.diffplug.spotless' version '6.25.0'
}
group = 'com.kamco'
version = '1.0.0'
java {
toolchain {
languageVersion = JavaLanguageVersion.of(17)
}
}
repositories {
mavenCentral()
maven {
url 'https://repo.osgeo.org/repository/release/'
}
maven {
url 'https://repo.osgeo.org/repository/geotools-releases/'
}
maven {
url 'https://repo.osgeo.org/repository/snapshot/'
}
}
ext {
geoToolsVersion = '30.0'
}
configurations.all {
exclude group: 'javax.media', module: 'jai_core'
}
bootJar {
archiveFileName = "shp-exporter.jar"
}
jar {
enabled = false // plain.jar 안 만들기(혼동 방지)
}
dependencies {
// Spring Boot
implementation 'org.springframework.boot:spring-boot-starter'
implementation 'org.springframework.boot:spring-boot-starter-jdbc'
implementation 'org.springframework.boot:spring-boot-starter-web'
implementation 'org.springframework.boot:spring-boot-starter-validation'
// Database
implementation 'org.postgresql:postgresql'
implementation 'com.zaxxer:HikariCP'
// PostGIS
implementation 'net.postgis:postgis-jdbc:2.5.1'
// JTS Geometry
implementation 'org.locationtech.jts:jts-core:1.19.0'
// GeoTools
implementation "org.geotools:gt-shapefile:${geoToolsVersion}"
implementation "org.geotools:gt-referencing:${geoToolsVersion}"
implementation "org.geotools:gt-epsg-hsql:${geoToolsVersion}"
implementation "org.geotools:gt-geojson:${geoToolsVersion}"
// Logging
implementation 'org.slf4j:slf4j-api'
// Testing
testImplementation 'org.springframework.boot:spring-boot-starter-test'
testRuntimeOnly 'org.junit.platform:junit-platform-launcher'
}
spotless {
java {
googleJavaFormat('1.19.2')
indentWithSpaces(2)
trimTrailingWhitespace()
endWithNewline()
}
}
tasks.named('test') {
useJUnitPlatform()
}

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
com.kamco.makesample.MakeSampleApplication

View File

@@ -0,0 +1,3 @@
spring:
application:
name: make-shapefile-service

View File

@@ -0,0 +1,12 @@
Manifest-Version: 1.0
Main-Class: org.springframework.boot.loader.launch.JarLauncher
Start-Class: com.kamco.makesample.MakeSampleApplication
Spring-Boot-Version: 3.5.7
Spring-Boot-Classes: BOOT-INF/classes/
Spring-Boot-Lib: BOOT-INF/lib/
Spring-Boot-Classpath-Index: BOOT-INF/classpath.idx
Spring-Boot-Layers-Index: BOOT-INF/layers.idx
Build-Jdk-Spec: 17
Implementation-Title: shp-exporter
Implementation-Version: 1.0.0

View File

@@ -0,0 +1 @@
1

BIN
shp-exporter/gradle/wrapper/gradle-wrapper.jar vendored Executable file

Binary file not shown.

View File

@@ -0,0 +1,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

251
shp-exporter/gradlew vendored Executable file
View File

@@ -0,0 +1,251 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH="\\\"\\\""
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
if ! command -v java >/dev/null 2>&1
then
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Collect all arguments for the java command:
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
# and any embedded shellness will be escaped.
# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
# treated as '${Hostname}' itself on the command line.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
-jar "$APP_HOME/gradle/wrapper/gradle-wrapper.jar" \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

94
shp-exporter/gradlew.bat vendored Executable file
View File

@@ -0,0 +1,94 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@rem SPDX-License-Identifier: Apache-2.0
@rem
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:execute
@rem Setup the command line
set CLASSPATH=
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %*
:end
@rem End local scope for the variables with windows NT shell
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
PROJCS["Korea 2000 / Central Belt 2010", GEOGCS["Korea 2000", DATUM["Geocentric datum of Korea", SPHEROID["GRS 1980", 6378137.0, 298.257222101, AUTHORITY["EPSG","7019"]], TOWGS84[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], AUTHORITY["EPSG","6737"]], PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]], UNIT["degree", 0.017453292519943295], AXIS["Geodetic latitude", NORTH], AXIS["Geodetic longitude", EAST], AUTHORITY["EPSG","4737"]], PROJECTION["Transverse_Mercator", AUTHORITY["EPSG","9807"]], PARAMETER["central_meridian", 127.0], PARAMETER["latitude_of_origin", 38.0], PARAMETER["scale_factor", 1.0], PARAMETER["false_easting", 200000.0], PARAMETER["false_northing", 600000.0], UNIT["m", 1.0], AXIS["Northing", NORTH], AXIS["Easting", EAST], AUTHORITY["EPSG","5186"]]

View File

@@ -0,0 +1,52 @@
spring:
datasource:
url: jdbc:postgresql://192.168.2.127:15432/kamco_cds
username: kamco_cds
password: kamco_cds_Q!W@E#R$
driver-class-name: org.postgresql.Driver
hikari:
maximum-pool-size: 5
connection-timeout: 30000
idle-timeout: 600000
max-lifetime: 1800000
application:
name: make-shapefile-service
main:
web-application-type: none # Disable web server for CLI application
converter:
inference-id: D5E46F60FC40B1A8BE0CD1F3547AA6
# Optional: omit or set empty to create merged shapefile for all batch-ids
batch-ids: # Required
- 252
- 253
- 257
output-base-dir: '/kamco-nfs/dataset/export/'
#output-base-dir: '/Users/bokmin/export/'
crs: 'EPSG:5186'
geoserver:
base-url: 'http://label-tile.gs.dabeeo.com/geoserver'
workspace: 'cd'
overwrite-existing: true
connection-timeout: 30000
read-timeout: 60000
# Credentials (optional - environment variables take precedence)
# Uncomment and set values for development convenience
# For production, use GEOSERVER_USERNAME and GEOSERVER_PASSWORD environment variables
username: 'admin'
password: 'geoserver'
logging:
level:
com.kamco.makesample: DEBUG
org.springframework: WARN
pattern:
console: '%d{yyyy-MM-dd HH:mm:ss} - %msg%n'
layer:
geoserver-url: http://label-tile.gs.dabeeo.com
workspace: cd

1
shp-exporter/settings.gradle Executable file
View File

@@ -0,0 +1 @@
rootProject.name = 'shp-exporter'

View File

@@ -0,0 +1,12 @@
package com.kamco.makesample;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class MakeSampleApplication {
public static void main(String[] args) {
SpringApplication.run(MakeSampleApplication.class, args);
}
}

View File

@@ -0,0 +1,167 @@
package com.kamco.makesample.cli;
import com.kamco.makesample.config.ConverterProperties;
import com.kamco.makesample.service.GeoServerRegistrationService;
import com.kamco.makesample.service.ShapefileConverterService;
import java.nio.file.Paths;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.DefaultApplicationArguments;
import org.springframework.stereotype.Component;
@Component
public class ConverterCommandLineRunner implements CommandLineRunner {
private static final Logger log = LoggerFactory.getLogger(ConverterCommandLineRunner.class);
private final ShapefileConverterService converterService;
private final GeoServerRegistrationService geoServerService;
private final ConverterProperties converterProperties;
public ConverterCommandLineRunner(
ShapefileConverterService converterService,
GeoServerRegistrationService geoServerService,
ConverterProperties converterProperties) {
this.converterService = converterService;
this.geoServerService = geoServerService;
this.converterProperties = converterProperties;
}
@Override
public void run(String... args) throws Exception {
ApplicationArguments appArgs = new DefaultApplicationArguments(args);
List<String> profiles = appArgs.getOptionValues("spring.profiles.active");
log.info("profiles.active={}", profiles);
if (appArgs.containsOption("upload-shp")) {
handleRegistration(appArgs);
return;
}
// Existing shapefile generation logic
log.info("=== PostgreSQL to Shapefile Converter ===");
log.info("Inference ID: {}", converterProperties.getInferenceId());
List<String> mapIds = converterProperties.getMapIds();
if (mapIds == null || mapIds.isEmpty()) {
log.info("Map IDs: <not specified - will create merged shapefile>");
} else {
log.info("Map IDs to process: {}", mapIds);
}
log.info("Batch IDs: {}", converterProperties.getBatchIds());
log.info("Output directory: {}", converterProperties.getOutputBaseDir());
log.info("CRS: {}", converterProperties.getCrs());
log.info("==========================================");
try {
converterService.convertAll();
log.info("Conversion process completed successfully");
} catch (Exception e) {
log.error("Conversion process failed: {}", e.getMessage(), e);
System.exit(1);
}
}
private void handleRegistration(ApplicationArguments appArgs) {
// --help
if (appArgs.containsOption("help") || appArgs.containsOption("h")) {
printUsage();
return;
}
String filePath = firstOption(appArgs, "upload-shp");
String layerName = firstOption(appArgs, "layer");
if (filePath == null || filePath.isBlank()) {
log.info("No upload requested. Use --upload-shp option to upload a shapefile.");
printUsage();
return;
}
if (layerName == null || layerName.isBlank()) {
String fileName = Paths.get(filePath).getFileName().toString();
layerName = fileName.replaceAll("(?i)\\.(zip|shp)$", ""); // 대소문자도 처리
}
log.info("========================================");
log.info("Shapefile Upload to GeoServer");
log.info("========================================");
log.info("Input File: {}", filePath);
log.info("Layer Name: {}", layerName);
log.info("========================================");
try {
geoServerService.uploadShapefileZip(filePath, layerName);
log.info("========================================");
log.info("Upload completed successfully!");
log.info("========================================");
} catch (Exception e) {
log.error("========================================");
log.error("Upload failed: {}", e.getMessage(), e);
log.error("========================================");
throw e;
}
}
private String firstOption(ApplicationArguments appArgs, String key) {
var values = appArgs.getOptionValues(key);
return (values == null || values.isEmpty()) ? null : values.get(0);
}
/**
* Get option value supporting both --key=value and --key value formats
*
* @param args ApplicationArguments
* @param optionName option name without --
* @return option value or null if not found
*/
private String getOptionValue(ApplicationArguments args, String optionName) {
// Try --key=value format first
if (args.getOptionValues(optionName) != null && !args.getOptionValues(optionName).isEmpty()) {
return args.getOptionValues(optionName).get(0);
}
// Try --key value format by looking at non-option arguments
String[] sourceArgs = args.getSourceArgs();
for (int i = 0; i < sourceArgs.length - 1; i++) {
if (sourceArgs[i].equals("--" + optionName)) {
// Next argument should be the value
String nextArg = sourceArgs[i + 1];
if (!nextArg.startsWith("--")) {
return nextArg;
}
}
}
return null;
}
private void printUsage() {
System.out.println();
System.out.println("Usage: java -jar shp-exporter.jar [OPTIONS]");
System.out.println();
System.out.println("Options:");
System.out.println(
" --upload-shp <file-path> Upload shapefile to GeoServer (.shp or .zip)");
System.out.println(
" --layer <layer-name> Specify layer name (optional, defaults to filename)");
System.out.println(" --help, -h Show this help message");
System.out.println();
System.out.println("Examples:");
System.out.println(" # Upload ZIP file directly");
System.out.println(" java -jar shp-exporter.jar --upload-shp /path/to/shapefile.zip");
System.out.println();
System.out.println(" # Upload .shp file (will auto-create ZIP with related files)");
System.out.println(" java -jar shp-exporter.jar --upload-shp /path/to/shapefile.shp");
System.out.println();
System.out.println(" # Specify custom layer name");
System.out.println(
" java -jar shp-exporter.jar --upload-shp /path/to/shapefile.shp --layer my_layer");
System.out.println();
}
}

View File

@@ -0,0 +1,65 @@
package com.kamco.makesample.config;
import java.util.List;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@Component
@ConfigurationProperties(prefix = "converter")
public class ConverterProperties {
private String inferenceId;
private List<String> mapIds;
private List<Long> batchIds;
private String outputBaseDir;
private String crs;
private String mode;
public String getInferenceId() {
return inferenceId;
}
public void setInferenceId(String inferenceId) {
this.inferenceId = inferenceId;
}
public List<String> getMapIds() {
return mapIds;
}
public void setMapIds(List<String> mapIds) {
this.mapIds = mapIds;
}
public List<Long> getBatchIds() {
return batchIds;
}
public void setBatchIds(List<Long> batchIds) {
this.batchIds = batchIds;
}
public String getOutputBaseDir() {
return outputBaseDir;
}
public void setOutputBaseDir(String outputBaseDir) {
this.outputBaseDir = outputBaseDir;
}
public String getCrs() {
return crs;
}
public void setCrs(String crs) {
this.crs = crs;
}
public void setMode(String mode) {
this.mode = mode;
}
public String getMode() {
return mode;
}
}

View File

@@ -0,0 +1,31 @@
package com.kamco.makesample.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@Component
public class GeoServerCredentials {
@Value("${GEOSERVER_USERNAME:${geoserver.username:#{null}}}")
private String username;
@Value("${GEOSERVER_PASSWORD:${geoserver.password:#{null}}}")
private String password;
public void validate() {
if (username == null || password == null) {
throw new IllegalStateException(
"GeoServer credentials not configured. "
+ "Set GEOSERVER_USERNAME and GEOSERVER_PASSWORD environment variables "
+ "or configure geoserver.username and geoserver.password in application.yml");
}
}
public String getUsername() {
return username;
}
public String getPassword() {
return password;
}
}

View File

@@ -0,0 +1,96 @@
package com.kamco.makesample.config;
import jakarta.validation.constraints.NotBlank;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
import org.springframework.validation.annotation.Validated;
@Component
@ConfigurationProperties(prefix = "geoserver")
@Validated
public class GeoServerProperties {
@NotBlank(message = "GeoServer base URL must be configured")
@Value("${layer.geoserver-url}")
private String baseUrl;
@NotBlank(message = "GeoServer workspace must be configured")
@Value("${layer.workspace}")
private String workspace;
@NotBlank(message = "GeoServer datastore must be configured")
private String datastore = "inference_result";
private boolean overwriteExisting = true;
private int connectionTimeout = 30000; // 30 seconds
private int readTimeout = 60000; // 60 seconds
@NotBlank private String username;
@NotBlank private String password;
public String getBaseUrl() {
return baseUrl;
}
public void setBaseUrl(String baseUrl) {
this.baseUrl = baseUrl;
}
public String getWorkspace() {
return workspace;
}
public void setWorkspace(String workspace) {
this.workspace = workspace;
}
public String getDatastore() {
return datastore;
}
public void setDatastore(String datastore) {
this.datastore = datastore;
}
public boolean isOverwriteExisting() {
return overwriteExisting;
}
public void setOverwriteExisting(boolean overwriteExisting) {
this.overwriteExisting = overwriteExisting;
}
public int getConnectionTimeout() {
return connectionTimeout;
}
public void setConnectionTimeout(int connectionTimeout) {
this.connectionTimeout = connectionTimeout;
}
public int getReadTimeout() {
return readTimeout;
}
public void setReadTimeout(int readTimeout) {
this.readTimeout = readTimeout;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
}

View File

@@ -0,0 +1,31 @@
package com.kamco.makesample.config;
import java.time.Duration;
import org.springframework.boot.web.client.RestTemplateBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.client.RestTemplate;
@Configuration
public class RestTemplateConfig {
private final GeoServerProperties properties;
public RestTemplateConfig(GeoServerProperties properties) {
this.properties = properties;
}
@Bean
public RestTemplate restTemplate(RestTemplateBuilder builder) {
return builder
.requestFactory(
() -> {
org.springframework.http.client.SimpleClientHttpRequestFactory factory =
new org.springframework.http.client.SimpleClientHttpRequestFactory();
factory.setConnectTimeout(Duration.ofMillis(properties.getConnectionTimeout()));
factory.setReadTimeout(Duration.ofMillis(properties.getReadTimeout()));
return factory;
})
.build();
}
}

View File

@@ -0,0 +1,12 @@
package com.kamco.makesample.exception;
public class GeoServerRegistrationException extends RuntimeException {
public GeoServerRegistrationException(String message) {
super(message);
}
public GeoServerRegistrationException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@@ -0,0 +1,12 @@
package com.kamco.makesample.exception;
public class GeometryConversionException extends ShapefileConversionException {
public GeometryConversionException(String message) {
super(message);
}
public GeometryConversionException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@@ -0,0 +1,12 @@
package com.kamco.makesample.exception;
public class MixedGeometryException extends ShapefileConversionException {
public MixedGeometryException(String message) {
super(message);
}
public MixedGeometryException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@@ -0,0 +1,12 @@
package com.kamco.makesample.exception;
public class ShapefileConversionException extends RuntimeException {
public ShapefileConversionException(String message) {
super(message);
}
public ShapefileConversionException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@@ -0,0 +1,122 @@
package com.kamco.makesample.model;
import org.locationtech.jts.geom.Geometry;
public class InferenceResult {
private String uid;
private String mapId;
private Double probability;
private Long beforeYear;
private Long afterYear;
private String beforeC;
private Double beforeP;
private String afterC;
private Double afterP;
private Geometry geometry;
public InferenceResult() {}
public InferenceResult(
String uid,
String mapId,
Double probability,
Long beforeYear,
Long afterYear,
String beforeC,
Double beforeP,
String afterC,
Double afterP,
Geometry geometry) {
this.uid = uid;
this.mapId = mapId;
this.probability = probability;
this.beforeYear = beforeYear;
this.afterYear = afterYear;
this.beforeC = beforeC;
this.beforeP = beforeP;
this.afterC = afterC;
this.afterP = afterP;
this.geometry = geometry;
}
public String getUid() {
return uid;
}
public void setUid(String uid) {
this.uid = uid;
}
public String getMapId() {
return mapId;
}
public void setMapId(String mapId) {
this.mapId = mapId;
}
public Double getProbability() {
return probability;
}
public void setProbability(Double probability) {
this.probability = probability;
}
public Long getBeforeYear() {
return beforeYear;
}
public void setBeforeYear(Long beforeYear) {
this.beforeYear = beforeYear;
}
public Long getAfterYear() {
return afterYear;
}
public void setAfterYear(Long afterYear) {
this.afterYear = afterYear;
}
public String getBeforeC() {
return beforeC;
}
public void setBeforeC(String beforeC) {
this.beforeC = beforeC;
}
public Double getBeforeP() {
return beforeP;
}
public void setBeforeP(Double beforeP) {
this.beforeP = beforeP;
}
public String getAfterC() {
return afterC;
}
public void setAfterC(String afterC) {
this.afterC = afterC;
}
public Double getAfterP() {
return afterP;
}
public void setAfterP(Double afterP) {
this.afterP = afterP;
}
public Geometry getGeometry() {
return geometry;
}
public void setGeometry(Geometry geometry) {
this.geometry = geometry;
}
}

View File

@@ -0,0 +1,153 @@
package com.kamco.makesample.repository;
import com.kamco.makesample.model.InferenceResult;
import com.kamco.makesample.service.GeometryConverter;
import java.sql.Array;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.PreparedStatementCreator;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Repository;
@Repository
public class InferenceResultRepository {
private static final Logger log = LoggerFactory.getLogger(InferenceResultRepository.class);
private final JdbcTemplate jdbcTemplate;
private final GeometryConverter geometryConverter;
public InferenceResultRepository(JdbcTemplate jdbcTemplate, GeometryConverter geometryConverter) {
this.jdbcTemplate = jdbcTemplate;
this.geometryConverter = geometryConverter;
}
public List<InferenceResult> findByMapId(List<Long> batchIds, String mapId) {
String sql =
"""
SELECT uid, map_id, probability, before_year, after_year,
before_c, before_p, after_c, after_p, ST_AsText(geometry) as geometry_wkt
FROM inference_results_testing
WHERE batch_id = ANY(?) AND map_id = ?
AND after_c IS NOT NULL
AND after_p IS NOT NULL
""";
log.info("Querying database for map_id: {}, batch_ids: {}", mapId, batchIds);
PreparedStatementCreator psc =
new PreparedStatementCreator() {
@Override
public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
PreparedStatement ps = con.prepareStatement(sql);
Array batchIdsArray = con.createArrayOf("bigint", batchIds.toArray());
ps.setArray(1, batchIdsArray);
ps.setString(2, mapId);
return ps;
}
};
List<InferenceResult> results = jdbcTemplate.query(psc, new InferenceResultRowMapper());
log.info("Found {} results for map_id: {}", results.size(), mapId);
return results;
}
public List<InferenceResult> findByBatchIds(List<Long> batchIds) {
String sql =
"""
SELECT uid, map_id, probability, before_year, after_year,
before_c, before_p, after_c, after_p, ST_AsText(geometry) as geometry_wkt
FROM inference_results_testing
WHERE batch_id = ANY(?)
AND after_c IS NOT NULL
AND after_p IS NOT NULL
""";
log.info("Querying database for all map_ids, batch_ids: {}", batchIds);
PreparedStatementCreator psc =
new PreparedStatementCreator() {
@Override
public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
PreparedStatement ps = con.prepareStatement(sql);
Array batchIdsArray = con.createArrayOf("bigint", batchIds.toArray());
ps.setArray(1, batchIdsArray);
return ps;
}
};
List<InferenceResult> results = jdbcTemplate.query(psc, new InferenceResultRowMapper());
log.info("Found {} results across all map_ids", results.size());
return results;
}
public List<String> findMapIdByBatchIds(List<Long> batchIds) {
String sql =
"""
SELECT DISTINCT map_id
FROM inference_results_testing
WHERE batch_id = ANY(?)
AND after_c IS NOT NULL
AND after_p IS NOT NULL
""";
log.info("Querying database for all map_ids, batch_ids: {}", batchIds);
PreparedStatementCreator psc =
con -> {
PreparedStatement ps = con.prepareStatement(sql);
Array batchIdsArray = con.createArrayOf("bigint", batchIds.toArray());
ps.setArray(1, batchIdsArray);
return ps;
};
List<String> mapIds = jdbcTemplate.query(psc, (rs, rowNum) -> rs.getString("map_id"));
log.info("Found {} map_ids", mapIds.size());
return mapIds;
}
private class InferenceResultRowMapper implements RowMapper<InferenceResult> {
@Override
public InferenceResult mapRow(ResultSet rs, int rowNum) throws SQLException {
InferenceResult result = new InferenceResult();
result.setUid(rs.getString("uid"));
result.setMapId(rs.getString("map_id"));
result.setProbability(getDoubleOrNull(rs, "probability"));
result.setBeforeYear(getLongOrNull(rs, "before_year"));
result.setAfterYear(getLongOrNull(rs, "after_year"));
result.setBeforeC(rs.getString("before_c"));
result.setBeforeP(getDoubleOrNull(rs, "before_p"));
result.setAfterC(rs.getString("after_c"));
result.setAfterP(getDoubleOrNull(rs, "after_p"));
String geometryWkt = rs.getString("geometry_wkt");
if (geometryWkt != null) {
result.setGeometry(geometryConverter.convertWKTToJTS(geometryWkt));
}
return result;
}
private Long getLongOrNull(ResultSet rs, String columnName) throws SQLException {
long value = rs.getLong(columnName);
return rs.wasNull() ? null : value;
}
private Double getDoubleOrNull(ResultSet rs, String columnName) throws SQLException {
double value = rs.getDouble(columnName);
return rs.wasNull() ? null : value;
}
}
}

View File

@@ -0,0 +1,245 @@
package com.kamco.makesample.service;
import com.kamco.makesample.config.GeoServerProperties;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Base64;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.client.RestTemplate;
@Service
public class GeoServerRegistrationService {
private static final Logger log = LoggerFactory.getLogger(GeoServerRegistrationService.class);
private final RestTemplate restTemplate;
private final GeoServerProperties properties;
public GeoServerRegistrationService(GeoServerProperties properties) {
this.restTemplate = new RestTemplate();
this.properties = properties;
}
public void uploadShapefileZip(String filePath, String layerName) {
String zipFilePath = filePath;
boolean tempZipCreated = false;
try {
log.info("Starting shapefile upload to GeoServer");
log.info("Input file: {}", filePath);
log.info("Layer name: {}", layerName);
log.info("Workspace: {}", properties.getWorkspace());
// Validate inputs
validateInputs(filePath, layerName);
// If input is .shp file, create temporary ZIP
if (filePath.toLowerCase().endsWith(".shp")) {
log.info("Input is .shp file, creating ZIP archive with related files...");
zipFilePath = createZipFromShapefile(filePath);
tempZipCreated = true;
log.info("Temporary ZIP created: {}", zipFilePath);
}
// Check if layer exists and handle overwrite
if (properties.isOverwriteExisting() && layerExists(layerName)) {
log.info("Layer '{}' already exists. Deleting...", layerName);
deleteLayer(layerName);
}
// Read ZIP file
Path path = Paths.get(zipFilePath);
byte[] zipData = Files.readAllBytes(path);
log.info("ZIP file size: {} bytes", zipData.length);
// Upload to GeoServer
String url =
String.format(
"%s/rest/workspaces/%s/datastores/%s/file.shp",
properties.getBaseUrl(), properties.getWorkspace(), layerName);
HttpHeaders headers = createHeaders();
headers.setContentType(MediaType.valueOf("application/zip"));
HttpEntity<byte[]> request = new HttpEntity<>(zipData, headers);
log.info("Uploading to GeoServer: {}", url);
ResponseEntity<String> response =
restTemplate.exchange(url, HttpMethod.PUT, request, String.class);
if (response.getStatusCode() == HttpStatus.CREATED
|| response.getStatusCode() == HttpStatus.OK) {
log.info("Shapefile uploaded successfully to GeoServer");
log.info(
"Layer '{}' is now available in workspace '{}'", layerName, properties.getWorkspace());
} else {
log.warn("Unexpected response status: {}", response.getStatusCode());
}
} catch (IOException e) {
log.error("Failed to read file: {}", filePath, e);
throw new RuntimeException("Failed to read file", e);
} catch (HttpClientErrorException e) {
log.error(
"GeoServer upload failed. Status: {}, Response: {}",
e.getStatusCode(),
e.getResponseBodyAsString());
throw new RuntimeException("GeoServer upload failed", e);
} catch (Exception e) {
log.error("Unexpected error during shapefile upload", e);
throw new RuntimeException("Shapefile upload failed", e);
} finally {
// Clean up temporary ZIP file if created
if (tempZipCreated && zipFilePath != null) {
try {
Files.deleteIfExists(Paths.get(zipFilePath));
log.info("Temporary ZIP file deleted: {}", zipFilePath);
} catch (IOException e) {
log.warn("Failed to delete temporary ZIP file: {}", zipFilePath, e);
}
}
}
}
private void validateInputs(String filePath, String layerName) {
if (filePath == null || filePath.trim().isEmpty()) {
throw new IllegalArgumentException("File path cannot be empty");
}
if (layerName == null || layerName.trim().isEmpty()) {
throw new IllegalArgumentException("Layer name cannot be empty");
}
File file = new File(filePath);
if (!file.exists()) {
throw new IllegalArgumentException("File does not exist: " + filePath);
}
if (!file.canRead()) {
throw new IllegalArgumentException("Cannot read file: " + filePath);
}
String lowerPath = filePath.toLowerCase();
if (!lowerPath.endsWith(".zip") && !lowerPath.endsWith(".shp")) {
throw new IllegalArgumentException("File must be a .zip or .shp file: " + filePath);
}
}
private boolean layerExists(String layerName) {
try {
String url =
String.format(
"%s/rest/workspaces/%s/layers/%s",
properties.getBaseUrl(), properties.getWorkspace(), layerName);
HttpHeaders headers = createHeaders();
HttpEntity<Void> request = new HttpEntity<>(headers);
ResponseEntity<String> response =
restTemplate.exchange(url, HttpMethod.GET, request, String.class);
return response.getStatusCode() == HttpStatus.OK;
} catch (HttpClientErrorException e) {
if (e.getStatusCode() == HttpStatus.NOT_FOUND) {
return false;
}
log.warn("Error checking if layer exists: {}", e.getMessage());
return false;
}
}
private void deleteLayer(String layerName) {
try {
// Delete datastore (which will also delete the layer)
String datastoreUrl =
String.format(
"%s/rest/workspaces/%s/datastores/%s?recurse=true",
properties.getBaseUrl(), properties.getWorkspace(), layerName);
HttpHeaders headers = createHeaders();
HttpEntity<Void> request = new HttpEntity<>(headers);
restTemplate.exchange(datastoreUrl, HttpMethod.DELETE, request, String.class);
log.info("Successfully deleted existing layer/datastore: {}", layerName);
} catch (HttpClientErrorException e) {
if (e.getStatusCode() != HttpStatus.NOT_FOUND) {
log.warn("Failed to delete layer: {}", e.getMessage());
}
}
}
private String createZipFromShapefile(String shpFilePath) throws IOException {
File shpFile = new File(shpFilePath);
String parentDir = shpFile.getParent();
String baseName = shpFile.getName().replaceAll("\\.shp$", "");
// Shapefile related extensions
List<String> extensions = Arrays.asList(".shp", ".shx", ".dbf", ".prj", ".cpg", ".qpj");
// Find all related files
File[] relatedFiles =
new File(parentDir)
.listFiles(
f -> {
String name = f.getName();
String fileBaseName = name.substring(0, name.lastIndexOf('.'));
String ext = name.substring(name.lastIndexOf('.')).toLowerCase();
return fileBaseName.equals(baseName) && extensions.contains(ext);
});
if (relatedFiles == null || relatedFiles.length == 0) {
throw new IOException("No shapefile components found for: " + shpFilePath);
}
log.info("Found {} shapefile components to archive:", relatedFiles.length);
for (File f : relatedFiles) {
log.info(" - {}", f.getName());
}
// Create temporary ZIP file
Path tempZip = Files.createTempFile("shapefile_", ".zip");
try (ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(tempZip.toFile()))) {
for (File file : relatedFiles) {
try (FileInputStream fis = new FileInputStream(file)) {
ZipEntry zipEntry = new ZipEntry(file.getName());
zos.putNextEntry(zipEntry);
byte[] buffer = new byte[8192];
int length;
while ((length = fis.read(buffer)) > 0) {
zos.write(buffer, 0, length);
}
zos.closeEntry();
}
}
}
return tempZip.toString();
}
private HttpHeaders createHeaders() {
HttpHeaders headers = new HttpHeaders();
String auth = properties.getUsername() + ":" + properties.getPassword();
String encodedAuth = Base64.getEncoder().encodeToString(auth.getBytes());
headers.set("Authorization", "Basic " + encodedAuth);
return headers;
}
}

View File

@@ -0,0 +1,41 @@
package com.kamco.makesample.service;
import com.kamco.makesample.exception.GeometryConversionException;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.ParseException;
import org.locationtech.jts.io.WKTReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class GeometryConverter {
private static final Logger log = LoggerFactory.getLogger(GeometryConverter.class);
private final WKTReader wktReader;
public GeometryConverter() {
this.wktReader = new WKTReader();
}
public Geometry convertWKTToJTS(String wkt) {
if (wkt == null || wkt.trim().isEmpty()) {
return null;
}
try {
// WKT 문자열을 JTS Geometry로 변환
Geometry jtsGeometry = wktReader.read(wkt);
if (!jtsGeometry.isValid()) {
log.warn("Invalid geometry detected: {}", jtsGeometry);
}
return jtsGeometry;
} catch (ParseException e) {
throw new GeometryConversionException(
"Failed to convert WKT to JTS geometry: " + e.getMessage(), e);
}
}
}

View File

@@ -0,0 +1,231 @@
package com.kamco.makesample.service;
import com.kamco.makesample.config.ConverterProperties;
import com.kamco.makesample.exception.MixedGeometryException;
import com.kamco.makesample.model.InferenceResult;
import com.kamco.makesample.repository.InferenceResultRepository;
import com.kamco.makesample.writer.GeoJsonWriter;
import com.kamco.makesample.writer.ResultZipWriter;
import com.kamco.makesample.writer.ShapefileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
@Service
public class ShapefileConverterService {
private static final Logger log = LoggerFactory.getLogger(ShapefileConverterService.class);
private final ConverterProperties converterProperties;
private final InferenceResultRepository repository;
private final ShapefileWriter shapefileWriter;
private final GeoJsonWriter geoJsonWriter;
public ShapefileConverterService(
ConverterProperties converterProperties,
InferenceResultRepository repository,
ShapefileWriter shapefileWriter,
GeoJsonWriter geoJsonWriter) {
this.converterProperties = converterProperties;
this.repository = repository;
this.shapefileWriter = shapefileWriter;
this.geoJsonWriter = geoJsonWriter;
}
public void convertAll() {
List<String> mapIds = converterProperties.getMapIds();
String mode = converterProperties.getMode();
// Validation: ensure we have batch-ids
if (converterProperties.getBatchIds() == null || converterProperties.getBatchIds().isEmpty()) {
throw new IllegalStateException("Configuration error: batch-ids must be specified");
}
String m = (mode == null) ? "" : mode.trim().toUpperCase();
// mode 있으면
if (!m.isEmpty()) {
switch (m) {
case "RESOLVE" -> {
log.info("Starting shapefile conversion (map_ids resolved internally)");
convertByResolvedMapIds();
return;
}
case "MERGED" -> {
log.info("Starting merged shapefile conversion");
convertMerged();
return;
}
case "MAP_IDS" -> {
if (mapIds == null || mapIds.isEmpty()) {
throw new IllegalStateException(
"Configuration error: map-ids must be specified when mode=MAP_IDS");
}
log.info("Starting shapefile conversion for {} map_ids", mapIds.size());
convertByMapIds(mapIds);
return;
}
default ->
throw new IllegalStateException(
"Configuration error: unsupported mode="
+ mode
+ " (supported: MERGED, MAP_IDS, RESOLVE)");
}
}
// Branch: merged mode vs map-specific mode 기존 동작 유지
if (mapIds == null || mapIds.isEmpty()) {
log.info("Starting merged shapefile conversion (no map_ids specified)");
convertMerged();
} else {
log.info("Starting shapefile conversion for {} map_ids: {}", mapIds.size(), mapIds);
convertByMapIds(mapIds);
}
}
private void convertByMapIds(List<String> mapIds) {
int successCount = 0;
int failureCount = 0;
for (String mapId : mapIds) {
try {
convertSingle(mapId);
successCount++;
} catch (Exception e) {
log.error("Failed to convert map_id {}: {}", mapId, e.getMessage(), e);
failureCount++;
}
}
log.info("Conversion completed. Success: {}, Failures: {}", successCount, failureCount);
}
private void convertMerged() {
try {
log.info("Processing all records for batch_ids: {}", converterProperties.getBatchIds());
Path outputDir = createMergedOutputDirectory();
List<InferenceResult> results = repository.findByBatchIds(converterProperties.getBatchIds());
if (results.isEmpty()) {
log.warn("No results found for batch_ids: {}", converterProperties.getBatchIds());
return;
}
validateGeometries(results);
// Use inference-id as filename
String filename = converterProperties.getInferenceId() + ".shp";
String shapefilePath = outputDir.resolve(filename).toString();
shapefileWriter.write(results, shapefilePath, converterProperties.getCrs());
String geoJsonFilename = converterProperties.getInferenceId() + ".geojson";
String geoJsonPath = outputDir.resolve(geoJsonFilename).toString();
geoJsonWriter.write(results, geoJsonPath, converterProperties.getCrs());
// create zip file
ResultZipWriter.createZip(outputDir, converterProperties.getInferenceId());
log.info(
"Successfully created merged shapefile and GeoJSON with {} records from {} batch_ids",
results.size(),
converterProperties.getBatchIds().size());
} catch (Exception e) {
log.error("Failed to create merged shapefile: {}", e.getMessage(), e);
throw e;
}
}
private void convertSingle(String mapId) {
log.debug("Processing map_id: {}", mapId);
Path outputDir = createOutputDirectory(mapId);
List<InferenceResult> results =
repository.findByMapId(converterProperties.getBatchIds(), mapId);
if (results.isEmpty()) {
log.warn("No results found for map_id: {}", mapId);
return;
}
validateGeometries(results);
String shapefilePath = outputDir.resolve(mapId + ".shp").toString();
shapefileWriter.write(results, shapefilePath, converterProperties.getCrs());
String geoJsonPath = outputDir.resolve(mapId + ".geojson").toString();
geoJsonWriter.write(results, geoJsonPath, converterProperties.getCrs());
log.info(
"Successfully created shapefile and GeoJSON for map_id {} with {} records",
mapId,
results.size());
}
private void convertByResolvedMapIds() {
try {
log.info("Processing all records for batch_ids: {}", converterProperties.getBatchIds());
List<String> results = repository.findMapIdByBatchIds(converterProperties.getBatchIds());
convertByMapIds(results);
} catch (Exception e) {
log.error("Failed to create merged shapefile: {}", e.getMessage(), e);
throw e;
}
}
private Path createOutputDirectory(String mapId) {
try {
Path outputPath =
Paths.get(
converterProperties.getOutputBaseDir(), converterProperties.getInferenceId(), mapId);
Files.createDirectories(outputPath);
log.debug("Created output directory: {}", outputPath);
return outputPath;
} catch (IOException e) {
throw new RuntimeException("Failed to create output directory for map_id: " + mapId, e);
}
}
private Path createMergedOutputDirectory() {
try {
Path outputPath =
Paths.get(
converterProperties.getOutputBaseDir(),
converterProperties.getInferenceId(),
"merge");
Files.createDirectories(outputPath);
log.info("Created merged output directory: {}", outputPath);
return outputPath;
} catch (IOException e) {
throw new RuntimeException("Failed to create merged output directory", e);
}
}
private void validateGeometries(List<InferenceResult> results) {
Set<String> geometryTypes =
results.stream()
.filter(r -> r.getGeometry() != null)
.map(r -> r.getGeometry().getGeometryType())
.collect(Collectors.toSet());
if (geometryTypes.size() > 1) {
throw new MixedGeometryException(
"Shapefile requires homogeneous geometry type. Found: " + geometryTypes);
}
if (geometryTypes.isEmpty()) {
log.warn("No valid geometries found in results");
}
}
}

View File

@@ -0,0 +1,151 @@
package com.kamco.makesample.writer;
import com.kamco.makesample.exception.ShapefileConversionException;
import com.kamco.makesample.model.InferenceResult;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.List;
import org.geotools.api.feature.simple.SimpleFeature;
import org.geotools.api.feature.simple.SimpleFeatureType;
import org.geotools.api.referencing.FactoryException;
import org.geotools.api.referencing.crs.CoordinateReferenceSystem;
import org.geotools.feature.DefaultFeatureCollection;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
import org.geotools.geojson.feature.FeatureJSON;
import org.geotools.referencing.CRS;
import org.locationtech.jts.geom.Geometry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class GeoJsonWriter {
private static final Logger log = LoggerFactory.getLogger(GeoJsonWriter.class);
public void write(List<InferenceResult> results, String outputPath, String crsCode) {
if (results == null || results.isEmpty()) {
log.warn("No results to write to GeoJSON");
return;
}
try {
CoordinateReferenceSystem crs = CRS.decode(crsCode);
Class<?> geometryType = determineGeometryType(results);
SimpleFeatureType featureType = createFeatureType(crs, geometryType);
DefaultFeatureCollection collection = new DefaultFeatureCollection();
SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder(featureType);
int geometryCount = 0;
int nullGeometryCount = 0;
for (InferenceResult result : results) {
if (result.getGeometry() != null) {
geometryCount++;
} else {
nullGeometryCount++;
}
SimpleFeature feature = buildFeature(featureBuilder, result);
collection.add(feature);
}
log.info(
"Built {} features for GeoJSON: {} with geometry, {} without geometry",
results.size(),
geometryCount,
nullGeometryCount);
File geoJsonFile = new File(outputPath);
// Ensure parent directory exists
if (geoJsonFile.getParentFile() != null) {
geoJsonFile.getParentFile().mkdirs();
}
try (FileOutputStream fos = new FileOutputStream(geoJsonFile)) {
FeatureJSON featureJSON = new FeatureJSON();
featureJSON.setEncodeFeatureCollectionCRS(true); // Include CRS in GeoJSON
featureJSON.writeFeatureCollection(collection, fos);
log.info("Successfully wrote {} features to GeoJSON: {}", results.size(), outputPath);
}
} catch (FactoryException e) {
throw new ShapefileConversionException("Invalid CRS code: " + crsCode, e);
} catch (IOException e) {
throw new ShapefileConversionException("Failed to create GeoJSON at: " + outputPath, e);
}
}
private SimpleFeatureType createFeatureType(
CoordinateReferenceSystem crs, Class<?> geometryType) {
SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();
builder.setName("inference_results");
builder.setCRS(crs);
// Geometry field
builder.add("the_geom", geometryType);
builder.setDefaultGeometry("the_geom");
// Attribute fields - keeping consistent with ShapefileWriter
builder.add("uid", String.class);
builder.add("map_id", String.class);
builder.add("chn_dtct_p", String.class);
builder.add("cprs_yr", Long.class);
builder.add("crtr_yr", Long.class);
builder.add("bf_cls_cd", String.class);
builder.add("bf_cls_pro", String.class);
builder.add("af_cls_cd", String.class);
builder.add("af_cls_pro", String.class);
return builder.buildFeatureType();
}
private SimpleFeature buildFeature(SimpleFeatureBuilder builder, InferenceResult result) {
Geometry geom = result.getGeometry();
// Geometry (the_geom)
builder.add(geom);
// Attribute fields
String uid = result.getUid();
String mapId = result.getMapId();
Double probability = result.getProbability();
Long beforeYear = result.getBeforeYear();
Long afterYear = result.getAfterYear();
String beforeC = result.getBeforeC();
Double beforeP = result.getBeforeP();
String afterC = result.getAfterC();
Double afterP = result.getAfterP();
builder.add(uid);
builder.add(mapId);
builder.add(probability != null ? String.valueOf(probability) : "0.0");
builder.add(beforeYear != null ? beforeYear : 0L);
builder.add(afterYear != null ? afterYear : 0L);
builder.add(beforeC);
builder.add(beforeP != null ? String.valueOf(beforeP) : "0.0");
builder.add(afterC);
builder.add(afterP != null ? String.valueOf(afterP) : "0.0");
return builder.buildFeature(uid);
}
private Class<?> determineGeometryType(List<InferenceResult> results) {
Geometry firstGeometry = null;
for (InferenceResult result : results) {
if (result.getGeometry() != null) {
firstGeometry = result.getGeometry();
break;
}
}
if (firstGeometry == null) {
throw new ShapefileConversionException("No valid geometries found in results");
}
return firstGeometry.getClass();
}
}

View File

@@ -0,0 +1,68 @@
package com.kamco.makesample.writer;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.List;
import java.util.zip.Deflater;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
public class ResultZipWriter {
private static final int BUF = 1024 * 1024; // 1MB
/**
* @param dirPath 결과 파일들이 있는 디렉터리
* @param baseName uid
*/
public static void createZip(Path dirPath, String baseName) {
List<String> exts = List.of("shp", "shx", "dbf", "prj", "fix", "geojson");
Path zip = dirPath.resolve(baseName + ".zip");
Path tmp = dirPath.resolve(baseName + ".zip.tmp");
try (OutputStream os = Files.newOutputStream(tmp);
BufferedOutputStream bos = new BufferedOutputStream(os);
ZipOutputStream zos = new ZipOutputStream(bos)) {
zos.setLevel(Deflater.BEST_SPEED);
byte[] buffer = new byte[BUF];
for (String ext : exts) {
Path file = dirPath.resolve(baseName + "." + ext);
if (!Files.exists(file)) continue;
ZipEntry entry = new ZipEntry(file.getFileName().toString());
zos.putNextEntry(entry);
try (InputStream is = Files.newInputStream(file);
BufferedInputStream bis = new BufferedInputStream(is, BUF)) {
int len;
while ((len = bis.read(buffer)) != -1) {
zos.write(buffer, 0, len);
}
}
zos.closeEntry();
}
} catch (IOException e) {
throw new RuntimeException("ZIP 생성 실패", e);
}
try {
Files.move(tmp, zip, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE);
} catch (IOException e) {
throw new RuntimeException("ZIP 완료 처리 실패", e);
}
}
}

View File

@@ -0,0 +1,211 @@
package com.kamco.makesample.writer;
import com.kamco.makesample.exception.ShapefileConversionException;
import com.kamco.makesample.model.InferenceResult;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.geotools.api.data.SimpleFeatureSource;
import org.geotools.api.data.SimpleFeatureStore;
import org.geotools.api.data.Transaction;
import org.geotools.api.feature.simple.SimpleFeature;
import org.geotools.api.feature.simple.SimpleFeatureType;
import org.geotools.api.referencing.FactoryException;
import org.geotools.api.referencing.crs.CoordinateReferenceSystem;
import org.geotools.data.DefaultTransaction;
import org.geotools.data.shapefile.ShapefileDataStore;
import org.geotools.data.shapefile.ShapefileDataStoreFactory;
import org.geotools.feature.DefaultFeatureCollection;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
import org.geotools.referencing.CRS;
import org.locationtech.jts.geom.Geometry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class ShapefileWriter {
private static final Logger log = LoggerFactory.getLogger(ShapefileWriter.class);
public void write(List<InferenceResult> results, String outputPath, String crsCode) {
if (results == null || results.isEmpty()) {
log.warn("No results to write to shapefile");
return;
}
try {
CoordinateReferenceSystem crs = CRS.decode(crsCode);
Class<?> geometryType = determineGeometryType(results);
SimpleFeatureType featureType = createFeatureType(crs, geometryType);
File shpFile = new File(outputPath);
ShapefileDataStoreFactory factory = new ShapefileDataStoreFactory();
Map<String, Serializable> params = new HashMap<>();
params.put("url", shpFile.toURI().toURL());
params.put("create spatial index", Boolean.TRUE);
ShapefileDataStore dataStore = (ShapefileDataStore) factory.createNewDataStore(params);
dataStore.createSchema(featureType);
DefaultFeatureCollection collection = new DefaultFeatureCollection();
SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder(featureType);
int geometryCount = 0;
int nullGeometryCount = 0;
for (InferenceResult result : results) {
if (result.getGeometry() != null) {
geometryCount++;
} else {
nullGeometryCount++;
}
SimpleFeature feature = buildFeature(featureBuilder, result);
collection.add(feature);
}
log.info(
"Built {} features: {} with geometry, {} without geometry",
results.size(),
geometryCount,
nullGeometryCount);
Transaction transaction = new DefaultTransaction("create");
try {
String typeName = dataStore.getTypeNames()[0];
SimpleFeatureSource featureSource = dataStore.getFeatureSource(typeName);
if (featureSource instanceof SimpleFeatureStore) {
SimpleFeatureStore featureStore = (SimpleFeatureStore) featureSource;
featureStore.setTransaction(transaction);
featureStore.addFeatures(collection);
transaction.commit();
} else {
throw new ShapefileConversionException("Feature source is read-only");
}
log.info("Successfully wrote {} features to shapefile: {}", results.size(), outputPath);
} catch (Exception e) {
transaction.rollback();
throw new ShapefileConversionException("Failed to write features to shapefile", e);
} finally {
transaction.close();
dataStore.dispose();
}
} catch (FactoryException e) {
throw new ShapefileConversionException("Invalid CRS code: " + crsCode, e);
} catch (IOException e) {
throw new ShapefileConversionException("Failed to create shapefile at: " + outputPath, e);
}
}
private SimpleFeatureType createFeatureType(
CoordinateReferenceSystem crs, Class<?> geometryType) {
SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();
builder.setName("inference_results");
builder.setCRS(crs);
// Geometry 필드를 기본 geometry로 설정 (중요!)
builder.add("the_geom", geometryType);
builder.setDefaultGeometry("the_geom");
// 속성 필드들
// builder.add("uid", String.class);
// builder.add("map_id", String.class);
// builder.add("prob", Double.class);
// builder.add("before_yr", Long.class);
// builder.add("after_yr", Long.class);
// builder.add("before_c", String.class);
// builder.add("before_p", Double.class);
// builder.add("after_c", String.class);
// builder.add("after_p", Double.class);
builder.add("uid", String.class);
builder.add("map_id", String.class);
builder.add("chn_dtct_p", String.class);
builder.add("cprs_yr", Long.class);
builder.add("crtr_yr", Long.class);
builder.add("bf_cls_cd", String.class);
builder.add("bf_cls_pro", String.class);
builder.add("af_cls_cd", String.class);
builder.add("af_cls_pro", String.class);
return builder.buildFeatureType();
}
private SimpleFeature buildFeature(SimpleFeatureBuilder builder, InferenceResult result) {
Geometry geom = result.getGeometry();
if (geom == null) {
log.warn("Null geometry detected for uid: {}", result.getUid());
} else {
log.debug(
"Adding geometry for uid {}: type={}, valid={}, numPoints={}",
result.getUid(),
geom.getGeometryType(),
geom.isValid(),
geom.getNumPoints());
}
// Geometry (the_geom)
builder.add(geom);
// 속성 필드들
String uid = result.getUid();
String mapId = result.getMapId();
Double probability = result.getProbability();
Long beforeYear = result.getBeforeYear();
Long afterYear = result.getAfterYear();
String beforeC = result.getBeforeC();
Double beforeP = result.getBeforeP();
String afterC = result.getAfterC();
Double afterP = result.getAfterP();
log.debug(
"Feature values - uid: {}, mapId: {}, prob: {}, beforeYear: {}, afterYear: {}, beforeC: {}, beforeP: {}, afterC: {}, afterP: {}",
uid,
mapId,
probability,
beforeYear,
afterYear,
beforeC,
beforeP,
afterC,
afterP);
builder.add(uid);
builder.add(mapId);
builder.add(probability != null ? String.valueOf(probability) : "0.0");
builder.add(beforeYear != null ? beforeYear : 0L);
builder.add(afterYear != null ? afterYear : 0L);
builder.add(beforeC);
builder.add(beforeP != null ? String.valueOf(beforeP) : "0.0");
builder.add(afterC);
builder.add(afterP != null ? String.valueOf(afterP) : "0.0");
return builder.buildFeature(null);
}
private Class<?> determineGeometryType(List<InferenceResult> results) {
Geometry firstGeometry = null;
for (InferenceResult result : results) {
if (result.getGeometry() != null) {
firstGeometry = result.getGeometry();
break;
}
}
if (firstGeometry == null) {
throw new ShapefileConversionException("No valid geometries found in results");
}
return firstGeometry.getClass();
}
}

Some files were not shown because too many files have changed in this diff Show More