16 Commits

Author SHA1 Message Date
207078e5a2 Merge remote-tracking branch 'origin/main' 2026-02-23 16:15:25 +09:00
ee3f86f8ac shp export 소스 추가 2026-02-23 16:15:11 +09:00
ca7c5e7fa7 test 2026-02-23 15:18:59 +09:00
e92a56ff4f add log 2026-02-12 20:27:44 +09:00
d1a3c8cc2c add log 2026-02-12 20:27:24 +09:00
5c47d111b1 build 2026-02-10 15:11:09 +09:00
b940db8e73 Revert "build: add executable boot jar"
This reverts commit 4a3e299325.

revert: remove jar
2026-02-10 14:48:10 +09:00
4a3e299325 build: add executable boot jar 2026-02-10 14:19:20 +09:00
c2d4d3a5f0 Split the function 2026-02-09 20:52:41 +09:00
ef31309f77 Split the function 2026-02-09 20:36:07 +09:00
e697867bb0 Split the function 2026-02-09 20:34:33 +09:00
b4fff05460 Split the function 2026-02-09 20:29:50 +09:00
703c25aadf Split the function 2026-02-09 20:26:38 +09:00
be4840bb58 Split the function 2026-02-09 20:21:22 +09:00
b1765e9d1f Split the function 2026-02-09 20:21:09 +09:00
0ff3f43a99 Split the function 2026-02-09 20:17:12 +09:00
118 changed files with 3813 additions and 40 deletions

View File

@@ -8,7 +8,7 @@ spring:
active: prod # 사용할 프로파일 지정 (ex. dev, prod, test)
datasource:
url: jdbc:postgresql://192.168.2.127:15432/kamco_cds
url: jdbc:postgresql://172.16.4.56:15432/kamco_cds
#url: jdbc:postgresql://localhost:5432/kamco_cds
username: kamco_cds
password: kamco_cds_Q!W@E#R$
@@ -59,8 +59,8 @@ management:
file:
#sync-root-dir: D:/kamco-nfs/images/
sync-root-dir: /kamco-nfs/images/
sync-tmp-dir: ${file.sync-root-dir}/tmp
sync-root-dir: /data/images/
sync-tmp-dir: /data/repo/tmp
sync-file-extention: tfw,tif
sync-auto-exception-start-year: 2025
sync-auto-exception-before-year-cnt: 3

View File

@@ -1,12 +1,13 @@
package com.kamco.cd.geojsonscheduler;
import com.kamco.cd.geojsonscheduler.config.DockerProperties;
import com.kamco.cd.geojsonscheduler.config.TrainDockerProperties;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
@SpringBootApplication
@EnableConfigurationProperties(DockerProperties.class)
@EnableConfigurationProperties({DockerProperties.class, TrainDockerProperties.class})
public class GeoJsonSchedulerApplication {
public static void main(String[] args) {

View File

@@ -4,6 +4,7 @@ import com.kamco.cd.geojsonscheduler.service.DockerRunnerService;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
@@ -45,6 +46,7 @@ import org.springframework.stereotype.Component;
*/
@Log4j2
@Component
@StepScope
@RequiredArgsConstructor
public class DockerRunTasklet implements Tasklet {

View File

@@ -6,7 +6,9 @@ import java.util.List;
import java.util.Objects;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.StepContribution;
@@ -16,14 +18,21 @@ import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import org.springframework.transaction.support.TransactionTemplate;
/**
* Child Job 실행 Tasklet (Parent Job용)
*
* <p>진행 중인 모든 분석 회차(AnalCntInfo)를 조회하여 각 회차마다 독립적인 Child Job을 실행합니다. 각 Child Job은 3개의
* <p>
* 진행 중인 모든 분석 회차(AnalCntInfo)를 조회하여 각 회차마다 독립적인 Child Job을 실행합니다. 각 Child Job은
* 3개의
* Step(makeGeoJson → dockerRun → zipResponse)을 순차적으로 실행합니다.
*
* <p><b>주요 기능:</b>
* <p>
* <b>주요 기능:</b>
*
* <ul>
* <li>진행 중인 분석 회차 목록 조회 (tb_map_sheet_anal_inference, anal_state='ING')
@@ -32,7 +41,8 @@ import org.springframework.stereotype.Component;
* <li>부분 실패 허용 (한 회차 실패해도 다른 회차 계속 처리)
* </ul>
*
* <p><b>실행 조건:</b>
* <p>
* <b>실행 조건:</b>
*
* <ul>
* <li>tb_map_sheet_anal_inference.anal_state = 'ING' (진행 중)
@@ -40,7 +50,8 @@ import org.springframework.stereotype.Component;
* <li>all_cnt != file_cnt (아직 파일 생성이 완료되지 않음)
* </ul>
*
* <p><b>실패 정책:</b>
* <p>
* <b>실패 정책:</b>
*
* <ul>
* <li>현재: 부분 실패 허용 (일부 Child Job 실패해도 Parent Job 성공)
@@ -59,17 +70,23 @@ public class LaunchChildJobsTasklet implements Tasklet {
/** 분석 회차 정보 조회를 위한 Repository */
private final TrainingDataReviewJobRepository repository;
/** Child Job을 실행하기 위한 JobLauncher */
private final JobLauncher jobLauncher;
/** Child Job을 실행하기 위한 비동기 JobLauncher (트랜잭션 충돌 방지) */
@Qualifier("asyncJobLauncher")
private final JobLauncher asyncJobLauncher;
/** 실행할 Child Job (processAnalCntInfoJob) */
@Qualifier("processAnalCntInfoJob")
private final Job processAnalCntInfoJob;
/** 트랜잭션 매니저 (트랜잭션 제어용) */
private final PlatformTransactionManager transactionManager;
/**
* Parent Job의 메인 로직 실행
*
* <p>진행 중인 모든 분석 회차를 조회하여 각 회차마다 Child Job을 실행합니다. 한 회차가 실패해도 다른 회차는 계속 처리되며, 최종적으로 통계를
* <p>
* 진행 중인 모든 분석 회차를 조회하여 각 회차마다 Child Job을 실행합니다. 한 회차가 실패해도 다른 회차는 계속 처리되며,
* 최종적으로 통계를
* 로깅합니다.
*
* @param contribution Step 실행 정보를 담는 객체
@@ -130,8 +147,7 @@ public class LaunchChildJobsTasklet implements Tasklet {
try {
// Child Job Parameters 생성
JobParameters jobParameters =
new JobParametersBuilder()
JobParameters jobParameters = new JobParametersBuilder()
.addLong("analUid", info.getAnalUid())
.addString("resultUid", info.getResultUid())
.addLong("timestamp", System.currentTimeMillis()) // JobInstance 고유성 보장
@@ -141,18 +157,70 @@ public class LaunchChildJobsTasklet implements Tasklet {
log.info(" - JobParameters: analUid={}, resultUid={}", info.getAnalUid(),
info.getResultUid());
// Child Job 실행 (동기 방식)
// Child Job 실행 (동기 방식 - 트랜잭션 충돌 방지)
// asyncJobLauncher를 사용하여 별도 쓰레드에서 실행
// 내부적으로 makeGeoJsonStep → dockerRunStep → zipResponseStep 순차 실행
long startTime = System.currentTimeMillis();
jobLauncher.run(processAnalCntInfoJob, jobParameters);
long duration = System.currentTimeMillis() - startTime;
// 트랜잭션 일시 정지 후 실행 (Existing transaction detected 에러 방지)
JobExecution jobExecution = new TransactionTemplate(transactionManager,
new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_NOT_SUPPORTED))
.execute(status -> {
try {
return asyncJobLauncher.run(processAnalCntInfoJob, jobParameters);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
if (jobExecution == null) {
throw new RuntimeException("JobExecution is null after launching child job");
}
// Child Job 완료 대기 (비동기 실행이므로 완료를 폴링)
log.info("[Child Job 대기] 실행 완료 대기 중... (JobExecutionId={})",
jobExecution.getId());
while (jobExecution.isRunning()) {
try {
Thread.sleep(1000); // 1초마다 상태 확인
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException("Child Job 대기 중 인터럽트 발생", e);
}
}
long duration = System.currentTimeMillis() - startTime;
BatchStatus status = jobExecution.getStatus();
if (status == BatchStatus.COMPLETED) {
log.info("[Child Job 완료] ✓ 정상 종료");
log.info(" - AnalUid: {}", info.getAnalUid());
log.info(" - ResultUid: {}", info.getResultUid());
log.info(" - 실행 시간: {} ms ({} 초)", duration, duration / 1000);
log.info(" - 최종 상태: {}", status);
processedCount++;
} else {
// Child Job 실패
log.error("[Child Job 실패] ✗ 비정상 종료");
log.error(" - AnalUid: {}", info.getAnalUid());
log.error(" - ResultUid: {}", info.getResultUid());
log.error(" - 실행 시간: {} ms ({} 초)", duration, duration / 1000);
log.error(" - 최종 상태: {}", status);
log.error(" - Exit 상태: {}", jobExecution.getExitStatus());
// 실패 예외 정보 로깅
if (!jobExecution.getAllFailureExceptions().isEmpty()) {
log.error(" - 실패 예외:");
for (Throwable t : jobExecution.getAllFailureExceptions()) {
log.error(" * {}: {}", t.getClass().getSimpleName(), t.getMessage());
}
}
failedCount++;
// 실패해도 다음 회차 계속 처리
log.info("[계속 진행] 다음 회차 처리를 계속합니다.");
continue; // 다음 for 루프로
}
} catch (Exception e) {
// Child Job 실행 실패 시 (Step 실패 또는 예외 발생)
@@ -181,8 +249,7 @@ public class LaunchChildJobsTasklet implements Tasklet {
// 성공률 계산
if (analList.size() > 0) {
double successRate =
(double) processedCount / (analList.size() - skippedCount) * 100;
double successRate = (double) processedCount / (analList.size() - skippedCount) * 100;
log.info(" - 성공률: {}% (건너뛴 회차 제외)", String.format("%.2f", successRate));
}

View File

@@ -16,6 +16,7 @@ import java.util.List;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
@@ -46,6 +47,7 @@ import org.springframework.stereotype.Component;
*/
@Log4j2
@Component
@StepScope
@RequiredArgsConstructor
public class MakeGeoJsonTasklet implements Tasklet {
@@ -140,7 +142,7 @@ public class MakeGeoJsonTasklet implements Tasklet {
try {
// Step 2-6: 디렉토리 생성 (존재하지 않으면)
log.info(" [2-6] 디렉토리 생성 중...");
log.info(" [2-6] 디렉토리 생성 중... {}", outputPath.getParent());
Files.createDirectories(outputPath.getParent());
log.info(" [2-6] 디렉토리 생성 완료: {}", outputPath.getParent());

View File

@@ -12,6 +12,7 @@ import java.util.zip.ZipOutputStream;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
@@ -53,6 +54,7 @@ import org.springframework.stereotype.Component;
*/
@Log4j2
@Component
@StepScope
@RequiredArgsConstructor
public class ZipResponseTasklet implements Tasklet {

View File

@@ -0,0 +1,44 @@
package com.kamco.cd.geojsonscheduler.config;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.launch.support.TaskExecutorJobLauncher;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
/**
* 비동기 JobLauncher 설정
*
* <p>Parent Job 내에서 Child Job을 실행할 때 트랜잭션 충돌을 방지하기 위해 비동기 JobLauncher를 생성합니다.
*
* <p><b>문제:</b> Parent Job의 Step이 트랜잭션 내에서 실행되는데, 그 안에서 동기 JobLauncher로 Child Job을
* 실행하면 "Existing transaction detected in JobRepository" 에러 발생
*
* <p><b>해결:</b> 비동기 TaskExecutor를 사용하는 별도의 JobLauncher를 생성하여 트랜잭션 분리
*
* @author KAMCO Development Team
* @since 1.0.0
*/
@Configuration
public class AsyncJobLauncherConfig {
/**
* 비동기 JobLauncher 생성
*
* <p>SimpleAsyncTaskExecutor를 사용하여 Child Job을 별도 쓰레드에서 실행합니다. 이렇게 하면 Parent Job의
* 트랜잭션과 분리되어 트랜잭션 충돌이 발생하지 않습니다.
*
* @param jobRepository JobRepository
* @return 비동기 JobLauncher
* @throws Exception JobLauncher 초기화 실패 시
*/
@Bean(name = "asyncJobLauncher")
public JobLauncher asyncJobLauncher(JobRepository jobRepository) throws Exception {
TaskExecutorJobLauncher jobLauncher = new TaskExecutorJobLauncher();
jobLauncher.setJobRepository(jobRepository);
jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor()); // 비동기 실행
jobLauncher.afterPropertiesSet();
return jobLauncher;
}
}

View File

@@ -144,7 +144,7 @@ public class DockerRunnerService {
* @param resultUid 결과물 고유 ID (입력/출력 폴더 경로에 사용)
* @return Docker 명령어 문자열 리스트 (ProcessBuilder 실행용)
*/
private List<String> cmd buildCommand(String resultUid) {
private List<String> buildCommand(String resultUid) {
log.debug("Docker 명령어 파라미터 구성 중...");
List<String> cmd = new ArrayList<>();

View File

@@ -6,6 +6,9 @@ spring:
hikari:
minimum-idle: 2
maximum-pool-size: 5
batch:
job:
name: exportGeoJsonJob # 기본 실행 Job 지정
training-data:
geojson-dir: /kamco-nfs/dataset

View File

@@ -3,6 +3,9 @@ spring:
url: jdbc:postgresql://localhost:5432/kamco_cds
username: kamco_cds
password: kamco_cds
batch:
job:
name: exportGeoJsonJob # 기본 실행 Job 지정
training-data:
geojson-dir: /tmp/geojson

View File

@@ -6,6 +6,26 @@ spring:
hikari:
minimum-idle: 2
maximum-pool-size: 5
batch:
job:
name: exportGeoJsonJob # 기본 실행 Job 지정
training-data:
geojson-dir: /kamco-nfs/dataset
# Train Model Docker Configuration
train-data:
docker:
image: kamco-cd-train:latest
data-volume: /kamco-nfs/dataset:/data
checkpoints-volume: /kamco-nfs/checkpoints:/checkpoints
dataset-folder: /data/dataset
output-folder: /data/output
input-size: "512"
crop-size: "256"
batch-size: 8
gpu-ids: "0,1,2,3"
gpus: 4
lr: "0.001"
backbone: resnet50
epochs: 100

View File

@@ -21,6 +21,7 @@ spring:
batch:
job:
enabled: true
name: exportGeoJsonJob # 기본 실행 Job 지정
jdbc:
initialize-schema: always

View File

@@ -0,0 +1,7 @@
{
"permissions": {
"allow": [
"WebSearch"
]
}
}

15
shp-exporter/.editorconfig Executable file
View File

@@ -0,0 +1,15 @@
root = true
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[*.java]
indent_style = space
indent_size = 2
[*.{gradle,yml,yaml}]
indent_style = space
indent_size = 2

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

View File

@@ -0,0 +1,2 @@
#Wed Jan 14 15:14:03 KST 2026
gradle.version=8.14.3

Binary file not shown.

Binary file not shown.

View File

8
shp-exporter/.idea/.gitignore generated vendored Executable file
View File

@@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

9
shp-exporter/.idea/compiler.xml generated Executable file
View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
<bytecodeTargetLevel target="17" />
</component>
<component name="JavacSettings">
<option name="ADDITIONAL_OPTIONS_STRING" value="-parameters" />
</component>
</project>

19
shp-exporter/.idea/gradle.xml generated Executable file
View File

@@ -0,0 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GradleMigrationSettings" migrationVersion="1" />
<component name="GradleSettings">
<option name="linkedExternalProjectsSettings">
<GradleProjectSettings>
<option name="delegatedBuild" value="false" />
<option name="testRunner" value="PLATFORM" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="modules">
<set>
<option value="$PROJECT_DIR$" />
</set>
</option>
<option name="resolveExternalAnnotations" value="true" />
</GradleProjectSettings>
</option>
</component>
</project>

35
shp-exporter/.idea/jarRepositories.xml generated Executable file
View File

@@ -0,0 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RemoteRepositoriesConfiguration">
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Maven Central repository" />
<option name="url" value="https://repo1.maven.org/maven2" />
</remote-repository>
<remote-repository>
<option name="id" value="jboss.community" />
<option name="name" value="JBoss Community repository" />
<option name="url" value="https://repository.jboss.org/nexus/content/repositories/public/" />
</remote-repository>
<remote-repository>
<option name="id" value="MavenRepo" />
<option name="name" value="MavenRepo" />
<option name="url" value="https://repo.maven.apache.org/maven2/" />
</remote-repository>
<remote-repository>
<option name="id" value="maven" />
<option name="name" value="maven" />
<option name="url" value="https://repo.osgeo.org/repository/release/" />
</remote-repository>
<remote-repository>
<option name="id" value="maven2" />
<option name="name" value="maven2" />
<option name="url" value="https://repo.osgeo.org/repository/geotools-releases/" />
</remote-repository>
<remote-repository>
<option name="id" value="maven3" />
<option name="name" value="maven3" />
<option name="url" value="https://repo.osgeo.org/repository/snapshot/" />
</remote-repository>
</component>
</project>

9
shp-exporter/.idea/makesample_geoserver.iml generated Executable file
View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

10
shp-exporter/.idea/misc.xml generated Executable file
View File

@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="FrameworkDetectionExcludesConfiguration">
<file type="web" url="file://$PROJECT_DIR$" />
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_17" default="true" project-jdk-name="17" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/out" />
</component>
</project>

122
shp-exporter/CLAUDE.md Executable file
View File

@@ -0,0 +1,122 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Project Overview
Spring Boot CLI application that queries PostgreSQL PostGIS spatial data and converts it to ESRI shapefiles. The application processes inference results from the KAMCO database and generates geographic shapefiles for visualization in GIS applications.
## Build and Run Commands
### Build
```bash
./gradlew build
```
### Run Application
```bash
./gradlew bootRun
```
Or run the built JAR:
```bash
java -jar build/libs/makesample-1.0.0.jar
```
### Code Formatting
Apply Google Java Format (2-space indentation) before committing:
```bash
./gradlew spotlessApply
```
Check formatting without applying:
```bash
./gradlew spotlessCheck
```
## Architecture
### Processing Pipeline
The application follows a layered architecture with a linear data flow:
1. **CLI Entry** (`ConverterCommandLineRunner`) → Reads configuration and initiates batch processing
2. **Service Orchestration** (`ShapefileConverterService`) → Coordinates the conversion workflow for each map_id
3. **Data Access** (`InferenceResultRepository`) → Queries PostGIS database and converts WKT to JTS geometries
4. **Geometry Conversion** (`GeometryConverter`) → Converts PostGIS WKT format to JTS Geometry objects
5. **Shapefile Writing** (`ShapefileWriter`) → Uses GeoTools to generate shapefile artifacts (.shp, .shx, .dbf, .prj)
### Key Design Points
**Geometry Handling**: The application uses a two-step geometry conversion process:
- PostGIS returns geometries as WKT (Well-Known Text) via `ST_AsText(geometry)`
- `GeometryConverter` parses WKT to JTS `Geometry` objects
- `ShapefileWriter` uses JTS geometries with GeoTools to write shapefiles
**Batch Processing**: Configuration in `application.yml` drives batch execution:
- Multiple `map-ids` processed sequentially (if specified)
- If `map-ids` is null/empty, creates a merged shapefile for all batch-ids
- Each map_id filtered by `batch-ids` array
- Output directory structure: `{output-base-dir}/{inference-id}/{map-id}/` or `{output-base-dir}/{inference-id}/merge/` for merged mode
- Separate output directory created for each map_id
**Shapefile Constraints**: The application validates that all geometries for a single shapefile are homogeneous (same type) because shapefiles cannot contain mixed geometry types. This validation happens in `ShapefileConverterService.validateGeometries()`.
**Feature Schema**: GeoTools requires explicit geometry field setup:
- Default geometry field named `the_geom` (not `geometry`)
- Field names truncated to 10 characters for DBF format compatibility
- Geometry type determined from first valid geometry in result set
## Configuration
Primary configuration in `src/main/resources/application.yml`:
```yaml
converter:
inference-id: 'D5E46F60FC40B1A8BE0CD1F3547AA6' # Inference ID (used for output folder structure)
map-ids: ['35813030'] # List of map_ids to process (text type), omit for merged shapefile
batch-ids: [252, 253, 257] # Batch ID array filter
output-base-dir: '/kamco-nfs/dataset/export/' # Base directory for shapefile output
crs: 'EPSG:5186' # Korean 2000 / Central Belt CRS
```
Database connection configured via standard Spring Boot datasource properties.
## Database Integration
### Query Pattern
The repository uses `PreparedStatementCreator` to handle PostgreSQL array parameters:
```sql
WHERE batch_id = ANY(?) AND map_id = ?
```
The `ANY(?)` clause requires creating a PostgreSQL array using `Connection.createArrayOf("bigint", ...)`.
### Field Mapping
Database columns are mapped to shapefile fields with Korean naming:
| Database Column | Shapefile Field |
|-----------------|-----------------|
| uid | uid |
| map_id | map_id |
| probability | chn_dtct_prob |
| before_year | cprs_yr |
| after_year | crtr_yr |
| before_c | bf_cls_cd |
| before_p | bf_cls_prob |
| after_c | af_cls_cd |
| after_p | af_cls_prob |
| geometry | the_geom |
### Coordinate Reference System
All geometries use **EPSG:5186** (Korean 2000 / Central Belt). The PostGIS geometry column is defined as `geometry(Polygon, 5186)`, and this CRS is preserved in the output shapefile's `.prj` file via GeoTools CRS encoding.
## Dependencies
Key libraries and their roles:
- **GeoTools 30.0**: Shapefile generation (`gt-shapefile`, `gt-referencing`, `gt-epsg-hsql`)
- **JTS 1.19.0**: Java Topology Suite for geometry representation
- **PostGIS JDBC 2.5.1**: PostgreSQL spatial extension support
- **Spring Boot 3.5.7**: Framework for DI, JDBC, and configuration
Note: `javax.media:jai_core` is excluded in `build.gradle` to avoid conflicts.

388
shp-exporter/README.md Executable file
View File

@@ -0,0 +1,388 @@
# PostgreSQL to Shapefile Converter
Spring Boot CLI application that queries PostgreSQL spatial data and generates shapefiles.
## Features
- Batch processing for multiple map_ids
- PostGIS geometry to Shapefile conversion
- **GeoServer REST API integration for automatic layer registration**
- Configurable via `application.yml`
- Generates all required shapefile files (.shp, .shx, .dbf, .prj)
- Supports EPSG:5186 (Korean 2000 / Central Belt) coordinate reference system
- GeoJSON export support
## Prerequisites
- Java 17 or higher
- PostgreSQL database with PostGIS extension
- Access to the KAMCO database at 192.168.2.127:15432
## Configuration
Edit `src/main/resources/application.yml` to configure:
```yaml
converter:
spring.profiles.active: 'dev' #profiles.active
inference-id: 'D5E46F60FC40B1A8BE0CD1F3547AA6' # Inference ID (used for output folder structure)
map-ids:
- '35813030' # Add your map_ids here (text type)
batch-ids:
- 252
- 253
- 257
output-base-dir: '/kamco-nfs/dataset/export/'
crs: 'EPSG:5186' # Korean 2000 / Central Belt
geoserver:
base-url: 'https://kamco.geo-dev.gs.dabeeo.com'
workspace: 'cd'
datastore: 'inference_result'
overwrite-existing: true # Delete existing layers before re-registering
connection-timeout: 30000 # 30 seconds
read-timeout: 60000 # 60 seconds
```
```md
## Converter Mode (`converter.mode`)
`converter.mode`는 대량 `map_ids` 처리 시
OS 커맨드라인 길이 제한(`Argument list too long`) 문제를 방지하기 위해 추가 하였습니다.
### Supported Modes
#### MERGED
- `batch-ids`에 해당하는 **모든 데이터를 하나의 Shapefile로 병합 생성**
- `map-ids`가 설정되어 있어도 **무시됨**
- 단일 결과 파일이 필요한 경우 적합
#### MAP_IDS
- 명시적으로 전달한 `map-ids`만 대상으로 Shapefile 생성
- `converter.map-ids` **필수**
- `map-ids` 개수가 많을 경우 OS 커맨드라인 길이 제한에 걸릴 수 있음
#### RESOLVE
- `batch-ids` 기준으로 **JAR 내부에서 map_ids를 조회**한 뒤 Shapefile 생성
- `map-ids`를 커맨드라인 인자로 전달하지 않음
- 대량 데이터 처리 시 가장 안전한 방식
### Default Behavior (mode 미지정 시)
- `converter.map-ids`가 비어 있으면 → **MERGED**
- `converter.map-ids`가 있으면 → **MAP_IDS**
### Command Line Parameters
You can override configuration values using command line arguments:
**Using Gradle (recommended - no quoting issues):**
```bash
./gradlew bootRun --args="--converter.inference-id=D5E46F60FC40B1A8BE0CD1F3547AA6 --converter.map-ids[0]=35813030 --converter.batch-ids[0]=252 --converter.batch-ids[1]=253 --converter.batch-ids[2]=257 --converter.mode=MERGED"
```
**Using JAR (zsh shell - quote arguments with brackets):**
```bash
java -jar build/libs/makesample-1.0.0.jar \
'--converter.inference-id=D5E46F60FC40B1A8BE0CD1F3547AA6' \
'--converter.map-ids[0]=35813030' \
'--converter.batch-ids[0]=252' \
'--converter.batch-ids[1]=253' \
'--converter.batch-ids[2]=257' \
'--converter.mode=MERGED'
```
**Using JAR (bash shell - no quotes needed):**
```bash
java -jar build/libs/makesample-1.0.0.jar \
--converter.inference-id=D5E46F60FC40B1A8BE0CD1F3547AA6 \
--converter.map-ids[0]=35813030 \
--converter.batch-ids[0]=252 \
--converter.batch-ids[1]=253 \
--converter.batch-ids[2]=257 \
--converter.mode=MERGED
```
**Note for zsh users:** zsh interprets square brackets `[]` as glob patterns. Always quote arguments containing brackets when using zsh.
## Building
```bash
./gradlew build
```
## Running
### Generate Shapefiles
```bash
./gradlew bootRun
```
Or run the JAR directly:
```bash
java -jar build/libs/makesample-1.0.0.jar
```
### Register Shapefile to GeoServer
First, set GeoServer credentials as environment variables:
```bash
export GEOSERVER_USERNAME=admin
export GEOSERVER_PASSWORD=geoserver
```
Then register a shapefile:
```bash
./gradlew bootRun --args="--upload-shp /kamco-nfs/dataset/export/D5E46F60FC40B1A8BE0CD1F3547AA6/35813030/35813030.shp --layer inference_35813030"
```
Or using the JAR:
```bash
java -jar build/libs/makesample-1.0.0.jar \
--upload-shp /path/to/shapefile.shp \
--layer layer_name
```
**GeoServer Registration Process:**
1. Verifies workspace 'cd' exists (must be pre-created in GeoServer)
2. Creates datastore 'inference_result' if it doesn't exist
3. Deletes existing layer if `overwrite-existing: true`
4. Publishes shapefile via REST API
5. Verifies successful registration
6. Automatically enables WMS, WFS, WMTS services
**Important Notes:**
- Workspace 'cd' must exist in GeoServer before registration
- Environment variables `GEOSERVER_USERNAME` and `GEOSERVER_PASSWORD` must be set
- Shapefile path must be absolute
- GeoServer must have file system access to the shapefile location
## Output
Shapefiles will be created in directories structured as `output-base-dir/inference-id/map-id/`:
```
/kamco-nfs/dataset/export/D5E46F60FC40B1A8BE0CD1F3547AA6/35813030/
├── 35813030.shp # Shapefile geometry
├── 35813030.shx # Shape index
├── 35813030.dbf # Attribute data
└── 35813030.prj # Projection information
```
## Database Query
The application executes the following query for each map_id:
```sql
SELECT uid, map_id, probability, before_year, after_year,
before_c, before_p, after_c, after_p, ST_AsText(geometry) as geometry_wkt
FROM inference_results_testing
WHERE batch_id = ANY(?) AND map_id = ?
```
### Database Schema
- **geometry**: `geometry(Polygon, 5186)` - EPSG:5186 좌표계
- **map_id**: `text` - 문자열 타입
- **before_year, after_year**: `bigint` - Long 타입
- **batch_id**: `bigint` - Long 타입
- **uid**: `uuid` - UUID 타입
## Field Mapping
Shapefile field names are limited to 10 characters:
| Database Column | DB Type | Shapefile Field | Shapefile Type |
|-----------------|----------|-----------------|----------------|
| uid | uuid | chnDtctId | String |
| map_id | text | mpqd_no | String |
| probability | float8 | chn_dtct_p | Double |
| before_year | bigint | cprs_yr | Long |
| after_year | bigint | crtr_yr | Long |
| before_c | text | bf_cls_cd | String |
| before_p | float8 | bf_cls_pro | Double |
| after_c | text | af_cls_cd | String |
| after_p | float8 | af_cls_pro | Double |
| geometry | geom | the_geom | Polygon |
## Error Handling
### Shapefile Generation
- **No results**: Logs warning and continues to next map_id
- **Mixed geometry types**: Throws exception (shapefiles require homogeneous geometry)
- **Database connection failure**: Application exits with error
- **Invalid geometry**: Logs warning and continues processing
### GeoServer Registration
- **Workspace not found**: Logs error with remediation steps (must be pre-created)
- **Authentication failure**: Logs error prompting to verify environment variables
- **Network timeout**: Logs connection error with timeout details
- **Layer already exists**: Automatically deletes and re-registers if `overwrite-existing: true`
- **Registration failure**: Logs error but does not stop application (non-blocking)
## Validating Output
### Shapefile Validation
Open the generated shapefiles in QGIS or ArcGIS to verify:
1. Geometry displays correctly
2. Attribute table contains all expected fields
3. CRS is EPSG:5186 (Korean 2000 / Central Belt)
### GeoServer Layer Validation
After registering to GeoServer, verify the layer:
1. **GeoServer Admin Console**: https://kamco.geo-dev.gs.dabeeo.com/geoserver/web
2. Navigate to **Layers** → Find your layer (e.g., `cd:inference_35813030`)
3. Preview the layer using **Layer Preview**
4. Verify services are enabled:
- WMS: `https://kamco.geo-dev.gs.dabeeo.com/geoserver/cd/wms`
- WFS: `https://kamco.geo-dev.gs.dabeeo.com/geoserver/cd/wfs`
- WMTS: `https://kamco.geo-dev.gs.dabeeo.com/geoserver/cd/wmts`
**Example WMS GetMap Request:**
```
https://kamco.geo-dev.gs.dabeeo.com/geoserver/cd/wms?
service=WMS&
version=1.1.0&
request=GetMap&
layers=cd:inference_35813030&
bbox=<bounds>&
width=768&
height=768&
srs=EPSG:5186&
format=image/png
```
## Development
### Code Formatting
The project uses Google Java Format with 2-space indentation:
```bash
./gradlew spotlessApply
```
### Project Structure
```
src/main/java/com/kamco/makesample/
├── MakeSampleApplication.java # Main application class
├── cli/
│ └── ConverterCommandLineRunner.java # CLI entry point
├── config/
│ ├── ConverterProperties.java # Shapefile converter configuration
│ ├── GeoServerProperties.java # GeoServer configuration
│ ├── GeoServerCredentials.java # GeoServer authentication
│ └── RestTemplateConfig.java # HTTP client configuration
├── exception/
│ ├── ShapefileConversionException.java
│ ├── GeometryConversionException.java
│ ├── MixedGeometryException.java
│ └── GeoServerRegistrationException.java # GeoServer registration errors
├── model/
│ └── InferenceResult.java # Domain model
├── repository/
│ └── InferenceResultRepository.java # Data access layer
├── service/
│ ├── GeometryConverter.java # PostGIS to JTS conversion
│ ├── ShapefileConverterService.java # Orchestration service
│ └── GeoServerRegistrationService.java # GeoServer REST API integration
└── writer/
├── ShapefileWriter.java # GeoTools shapefile writer
└── GeoJsonWriter.java # GeoJSON export writer
```
## Dependencies
- Spring Boot 3.5.7
- spring-boot-starter
- spring-boot-starter-jdbc
- spring-boot-starter-web (for RestTemplate)
- spring-boot-starter-validation (for @NotBlank annotations)
- GeoTools 30.0
- gt-shapefile
- gt-referencing
- gt-epsg-hsql
- gt-geojson
- PostgreSQL JDBC Driver
- PostGIS JDBC 2.5.1
- JTS (Java Topology Suite) 1.19.0
## Troubleshooting
### GeoServer Registration Issues
**Problem: "Workspace not found: cd"**
```
Solution: Create workspace 'cd' in GeoServer admin console before registration
Steps:
1. Login to GeoServer admin: https://kamco.geo-dev.gs.dabeeo.com/geoserver/web
2. Go to Workspaces → Add new workspace
3. Name: cd, Namespace URI: http://cd
4. Click Save
```
**Problem: "GeoServer credentials not configured"**
```
Solution: Set environment variables before running
export GEOSERVER_USERNAME=admin
export GEOSERVER_PASSWORD=geoserver
```
**Problem: "Layer already exists and overwrite is disabled"**
```
Solution: Enable overwrite in application.yml
geoserver:
overwrite-existing: true
```
**Problem: Connection timeout to GeoServer**
```
Solution: Increase timeout values in application.yml
geoserver:
connection-timeout: 60000 # 60 seconds
read-timeout: 120000 # 120 seconds
```
**Problem: "Registration failed. Layer not found after publication"**
```
Possible causes:
1. GeoServer cannot access shapefile path (check file system permissions)
2. Shapefile is corrupted or invalid
3. Network issue interrupted registration
Solution:
1. Verify GeoServer has read access to shapefile directory
2. Validate shapefile using QGIS or ogr2ogr
3. Check GeoServer logs for detailed error messages
```
### Database Connection Issues
**Problem: "Connection refused to 192.168.2.127:15432"**
```
Solution: Verify PostgreSQL is running and accessible
psql -h 192.168.2.127 -p 15432 -U kamco_cds -d kamco_cds
```
**Problem: "No results found for map_id"**
```
Solution: Verify data exists in database
SELECT COUNT(*) FROM inference_results_testing
WHERE batch_id IN (252, 253, 257) AND map_id = '35813030';
```
## License
KAMCO Internal Use Only

88
shp-exporter/build.gradle Executable file
View File

@@ -0,0 +1,88 @@
plugins {
id 'java'
id 'org.springframework.boot' version '3.5.7'
id 'io.spring.dependency-management' version '1.1.7'
id 'com.diffplug.spotless' version '6.25.0'
}
group = 'com.kamco'
version = '1.0.0'
java {
toolchain {
languageVersion = JavaLanguageVersion.of(17)
}
}
repositories {
mavenCentral()
maven {
url 'https://repo.osgeo.org/repository/release/'
}
maven {
url 'https://repo.osgeo.org/repository/geotools-releases/'
}
maven {
url 'https://repo.osgeo.org/repository/snapshot/'
}
}
ext {
geoToolsVersion = '30.0'
}
configurations.all {
exclude group: 'javax.media', module: 'jai_core'
}
bootJar {
archiveFileName = "shp-exporter.jar"
}
jar {
enabled = false // plain.jar 안 만들기(혼동 방지)
}
dependencies {
// Spring Boot
implementation 'org.springframework.boot:spring-boot-starter'
implementation 'org.springframework.boot:spring-boot-starter-jdbc'
implementation 'org.springframework.boot:spring-boot-starter-web'
implementation 'org.springframework.boot:spring-boot-starter-validation'
// Database
implementation 'org.postgresql:postgresql'
implementation 'com.zaxxer:HikariCP'
// PostGIS
implementation 'net.postgis:postgis-jdbc:2.5.1'
// JTS Geometry
implementation 'org.locationtech.jts:jts-core:1.19.0'
// GeoTools
implementation "org.geotools:gt-shapefile:${geoToolsVersion}"
implementation "org.geotools:gt-referencing:${geoToolsVersion}"
implementation "org.geotools:gt-epsg-hsql:${geoToolsVersion}"
implementation "org.geotools:gt-geojson:${geoToolsVersion}"
// Logging
implementation 'org.slf4j:slf4j-api'
// Testing
testImplementation 'org.springframework.boot:spring-boot-starter-test'
testRuntimeOnly 'org.junit.platform:junit-platform-launcher'
}
spotless {
java {
googleJavaFormat('1.19.2')
indentWithSpaces(2)
trimTrailingWhitespace()
endWithNewline()
}
}
tasks.named('test') {
useJUnitPlatform()
}

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
com.kamco.makesample.MakeSampleApplication

View File

@@ -0,0 +1,3 @@
spring:
application:
name: make-shapefile-service

View File

@@ -0,0 +1,12 @@
Manifest-Version: 1.0
Main-Class: org.springframework.boot.loader.launch.JarLauncher
Start-Class: com.kamco.makesample.MakeSampleApplication
Spring-Boot-Version: 3.5.7
Spring-Boot-Classes: BOOT-INF/classes/
Spring-Boot-Lib: BOOT-INF/lib/
Spring-Boot-Classpath-Index: BOOT-INF/classpath.idx
Spring-Boot-Layers-Index: BOOT-INF/layers.idx
Build-Jdk-Spec: 17
Implementation-Title: shp-exporter
Implementation-Version: 1.0.0

View File

@@ -0,0 +1 @@
1

BIN
shp-exporter/gradle/wrapper/gradle-wrapper.jar vendored Executable file

Binary file not shown.

View File

@@ -0,0 +1,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

251
shp-exporter/gradlew vendored Executable file
View File

@@ -0,0 +1,251 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH="\\\"\\\""
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
if ! command -v java >/dev/null 2>&1
then
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Collect all arguments for the java command:
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
# and any embedded shellness will be escaped.
# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
# treated as '${Hostname}' itself on the command line.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
-jar "$APP_HOME/gradle/wrapper/gradle-wrapper.jar" \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

94
shp-exporter/gradlew.bat vendored Executable file
View File

@@ -0,0 +1,94 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@rem SPDX-License-Identifier: Apache-2.0
@rem
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:execute
@rem Setup the command line
set CLASSPATH=
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %*
:end
@rem End local scope for the variables with windows NT shell
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
PROJCS["Korea 2000 / Central Belt 2010", GEOGCS["Korea 2000", DATUM["Geocentric datum of Korea", SPHEROID["GRS 1980", 6378137.0, 298.257222101, AUTHORITY["EPSG","7019"]], TOWGS84[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], AUTHORITY["EPSG","6737"]], PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]], UNIT["degree", 0.017453292519943295], AXIS["Geodetic latitude", NORTH], AXIS["Geodetic longitude", EAST], AUTHORITY["EPSG","4737"]], PROJECTION["Transverse_Mercator", AUTHORITY["EPSG","9807"]], PARAMETER["central_meridian", 127.0], PARAMETER["latitude_of_origin", 38.0], PARAMETER["scale_factor", 1.0], PARAMETER["false_easting", 200000.0], PARAMETER["false_northing", 600000.0], UNIT["m", 1.0], AXIS["Northing", NORTH], AXIS["Easting", EAST], AUTHORITY["EPSG","5186"]]

View File

@@ -0,0 +1,52 @@
spring:
datasource:
url: jdbc:postgresql://192.168.2.127:15432/kamco_cds
username: kamco_cds
password: kamco_cds_Q!W@E#R$
driver-class-name: org.postgresql.Driver
hikari:
maximum-pool-size: 5
connection-timeout: 30000
idle-timeout: 600000
max-lifetime: 1800000
application:
name: make-shapefile-service
main:
web-application-type: none # Disable web server for CLI application
converter:
inference-id: D5E46F60FC40B1A8BE0CD1F3547AA6
# Optional: omit or set empty to create merged shapefile for all batch-ids
batch-ids: # Required
- 252
- 253
- 257
output-base-dir: '/kamco-nfs/dataset/export/'
#output-base-dir: '/Users/bokmin/export/'
crs: 'EPSG:5186'
geoserver:
base-url: 'http://label-tile.gs.dabeeo.com/geoserver'
workspace: 'cd'
overwrite-existing: true
connection-timeout: 30000
read-timeout: 60000
# Credentials (optional - environment variables take precedence)
# Uncomment and set values for development convenience
# For production, use GEOSERVER_USERNAME and GEOSERVER_PASSWORD environment variables
username: 'admin'
password: 'geoserver'
logging:
level:
com.kamco.makesample: DEBUG
org.springframework: WARN
pattern:
console: '%d{yyyy-MM-dd HH:mm:ss} - %msg%n'
layer:
geoserver-url: http://label-tile.gs.dabeeo.com
workspace: cd

1
shp-exporter/settings.gradle Executable file
View File

@@ -0,0 +1 @@
rootProject.name = 'shp-exporter'

View File

@@ -0,0 +1,12 @@
package com.kamco.makesample;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class MakeSampleApplication {
public static void main(String[] args) {
SpringApplication.run(MakeSampleApplication.class, args);
}
}

View File

@@ -0,0 +1,167 @@
package com.kamco.makesample.cli;
import com.kamco.makesample.config.ConverterProperties;
import com.kamco.makesample.service.GeoServerRegistrationService;
import com.kamco.makesample.service.ShapefileConverterService;
import java.nio.file.Paths;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.DefaultApplicationArguments;
import org.springframework.stereotype.Component;
@Component
public class ConverterCommandLineRunner implements CommandLineRunner {
private static final Logger log = LoggerFactory.getLogger(ConverterCommandLineRunner.class);
private final ShapefileConverterService converterService;
private final GeoServerRegistrationService geoServerService;
private final ConverterProperties converterProperties;
public ConverterCommandLineRunner(
ShapefileConverterService converterService,
GeoServerRegistrationService geoServerService,
ConverterProperties converterProperties) {
this.converterService = converterService;
this.geoServerService = geoServerService;
this.converterProperties = converterProperties;
}
@Override
public void run(String... args) throws Exception {
ApplicationArguments appArgs = new DefaultApplicationArguments(args);
List<String> profiles = appArgs.getOptionValues("spring.profiles.active");
log.info("profiles.active={}", profiles);
if (appArgs.containsOption("upload-shp")) {
handleRegistration(appArgs);
return;
}
// Existing shapefile generation logic
log.info("=== PostgreSQL to Shapefile Converter ===");
log.info("Inference ID: {}", converterProperties.getInferenceId());
List<String> mapIds = converterProperties.getMapIds();
if (mapIds == null || mapIds.isEmpty()) {
log.info("Map IDs: <not specified - will create merged shapefile>");
} else {
log.info("Map IDs to process: {}", mapIds);
}
log.info("Batch IDs: {}", converterProperties.getBatchIds());
log.info("Output directory: {}", converterProperties.getOutputBaseDir());
log.info("CRS: {}", converterProperties.getCrs());
log.info("==========================================");
try {
converterService.convertAll();
log.info("Conversion process completed successfully");
} catch (Exception e) {
log.error("Conversion process failed: {}", e.getMessage(), e);
System.exit(1);
}
}
private void handleRegistration(ApplicationArguments appArgs) {
// --help
if (appArgs.containsOption("help") || appArgs.containsOption("h")) {
printUsage();
return;
}
String filePath = firstOption(appArgs, "upload-shp");
String layerName = firstOption(appArgs, "layer");
if (filePath == null || filePath.isBlank()) {
log.info("No upload requested. Use --upload-shp option to upload a shapefile.");
printUsage();
return;
}
if (layerName == null || layerName.isBlank()) {
String fileName = Paths.get(filePath).getFileName().toString();
layerName = fileName.replaceAll("(?i)\\.(zip|shp)$", ""); // 대소문자도 처리
}
log.info("========================================");
log.info("Shapefile Upload to GeoServer");
log.info("========================================");
log.info("Input File: {}", filePath);
log.info("Layer Name: {}", layerName);
log.info("========================================");
try {
geoServerService.uploadShapefileZip(filePath, layerName);
log.info("========================================");
log.info("Upload completed successfully!");
log.info("========================================");
} catch (Exception e) {
log.error("========================================");
log.error("Upload failed: {}", e.getMessage(), e);
log.error("========================================");
throw e;
}
}
private String firstOption(ApplicationArguments appArgs, String key) {
var values = appArgs.getOptionValues(key);
return (values == null || values.isEmpty()) ? null : values.get(0);
}
/**
* Get option value supporting both --key=value and --key value formats
*
* @param args ApplicationArguments
* @param optionName option name without --
* @return option value or null if not found
*/
private String getOptionValue(ApplicationArguments args, String optionName) {
// Try --key=value format first
if (args.getOptionValues(optionName) != null && !args.getOptionValues(optionName).isEmpty()) {
return args.getOptionValues(optionName).get(0);
}
// Try --key value format by looking at non-option arguments
String[] sourceArgs = args.getSourceArgs();
for (int i = 0; i < sourceArgs.length - 1; i++) {
if (sourceArgs[i].equals("--" + optionName)) {
// Next argument should be the value
String nextArg = sourceArgs[i + 1];
if (!nextArg.startsWith("--")) {
return nextArg;
}
}
}
return null;
}
private void printUsage() {
System.out.println();
System.out.println("Usage: java -jar shp-exporter.jar [OPTIONS]");
System.out.println();
System.out.println("Options:");
System.out.println(
" --upload-shp <file-path> Upload shapefile to GeoServer (.shp or .zip)");
System.out.println(
" --layer <layer-name> Specify layer name (optional, defaults to filename)");
System.out.println(" --help, -h Show this help message");
System.out.println();
System.out.println("Examples:");
System.out.println(" # Upload ZIP file directly");
System.out.println(" java -jar shp-exporter.jar --upload-shp /path/to/shapefile.zip");
System.out.println();
System.out.println(" # Upload .shp file (will auto-create ZIP with related files)");
System.out.println(" java -jar shp-exporter.jar --upload-shp /path/to/shapefile.shp");
System.out.println();
System.out.println(" # Specify custom layer name");
System.out.println(
" java -jar shp-exporter.jar --upload-shp /path/to/shapefile.shp --layer my_layer");
System.out.println();
}
}

View File

@@ -0,0 +1,65 @@
package com.kamco.makesample.config;
import java.util.List;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@Component
@ConfigurationProperties(prefix = "converter")
public class ConverterProperties {
private String inferenceId;
private List<String> mapIds;
private List<Long> batchIds;
private String outputBaseDir;
private String crs;
private String mode;
public String getInferenceId() {
return inferenceId;
}
public void setInferenceId(String inferenceId) {
this.inferenceId = inferenceId;
}
public List<String> getMapIds() {
return mapIds;
}
public void setMapIds(List<String> mapIds) {
this.mapIds = mapIds;
}
public List<Long> getBatchIds() {
return batchIds;
}
public void setBatchIds(List<Long> batchIds) {
this.batchIds = batchIds;
}
public String getOutputBaseDir() {
return outputBaseDir;
}
public void setOutputBaseDir(String outputBaseDir) {
this.outputBaseDir = outputBaseDir;
}
public String getCrs() {
return crs;
}
public void setCrs(String crs) {
this.crs = crs;
}
public void setMode(String mode) {
this.mode = mode;
}
public String getMode() {
return mode;
}
}

View File

@@ -0,0 +1,31 @@
package com.kamco.makesample.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@Component
public class GeoServerCredentials {
@Value("${GEOSERVER_USERNAME:${geoserver.username:#{null}}}")
private String username;
@Value("${GEOSERVER_PASSWORD:${geoserver.password:#{null}}}")
private String password;
public void validate() {
if (username == null || password == null) {
throw new IllegalStateException(
"GeoServer credentials not configured. "
+ "Set GEOSERVER_USERNAME and GEOSERVER_PASSWORD environment variables "
+ "or configure geoserver.username and geoserver.password in application.yml");
}
}
public String getUsername() {
return username;
}
public String getPassword() {
return password;
}
}

Some files were not shown because too many files have changed in this diff Show More