shp export 소스 추가

This commit is contained in:
2026-02-23 16:15:11 +09:00
parent 5c47d111b1
commit ee3f86f8ac
104 changed files with 3628 additions and 0 deletions

View File

@@ -0,0 +1,12 @@
package com.kamco.makesample;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class MakeSampleApplication {
public static void main(String[] args) {
SpringApplication.run(MakeSampleApplication.class, args);
}
}

View File

@@ -0,0 +1,167 @@
package com.kamco.makesample.cli;
import com.kamco.makesample.config.ConverterProperties;
import com.kamco.makesample.service.GeoServerRegistrationService;
import com.kamco.makesample.service.ShapefileConverterService;
import java.nio.file.Paths;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.DefaultApplicationArguments;
import org.springframework.stereotype.Component;
@Component
public class ConverterCommandLineRunner implements CommandLineRunner {
private static final Logger log = LoggerFactory.getLogger(ConverterCommandLineRunner.class);
private final ShapefileConverterService converterService;
private final GeoServerRegistrationService geoServerService;
private final ConverterProperties converterProperties;
public ConverterCommandLineRunner(
ShapefileConverterService converterService,
GeoServerRegistrationService geoServerService,
ConverterProperties converterProperties) {
this.converterService = converterService;
this.geoServerService = geoServerService;
this.converterProperties = converterProperties;
}
@Override
public void run(String... args) throws Exception {
ApplicationArguments appArgs = new DefaultApplicationArguments(args);
List<String> profiles = appArgs.getOptionValues("spring.profiles.active");
log.info("profiles.active={}", profiles);
if (appArgs.containsOption("upload-shp")) {
handleRegistration(appArgs);
return;
}
// Existing shapefile generation logic
log.info("=== PostgreSQL to Shapefile Converter ===");
log.info("Inference ID: {}", converterProperties.getInferenceId());
List<String> mapIds = converterProperties.getMapIds();
if (mapIds == null || mapIds.isEmpty()) {
log.info("Map IDs: <not specified - will create merged shapefile>");
} else {
log.info("Map IDs to process: {}", mapIds);
}
log.info("Batch IDs: {}", converterProperties.getBatchIds());
log.info("Output directory: {}", converterProperties.getOutputBaseDir());
log.info("CRS: {}", converterProperties.getCrs());
log.info("==========================================");
try {
converterService.convertAll();
log.info("Conversion process completed successfully");
} catch (Exception e) {
log.error("Conversion process failed: {}", e.getMessage(), e);
System.exit(1);
}
}
private void handleRegistration(ApplicationArguments appArgs) {
// --help
if (appArgs.containsOption("help") || appArgs.containsOption("h")) {
printUsage();
return;
}
String filePath = firstOption(appArgs, "upload-shp");
String layerName = firstOption(appArgs, "layer");
if (filePath == null || filePath.isBlank()) {
log.info("No upload requested. Use --upload-shp option to upload a shapefile.");
printUsage();
return;
}
if (layerName == null || layerName.isBlank()) {
String fileName = Paths.get(filePath).getFileName().toString();
layerName = fileName.replaceAll("(?i)\\.(zip|shp)$", ""); // 대소문자도 처리
}
log.info("========================================");
log.info("Shapefile Upload to GeoServer");
log.info("========================================");
log.info("Input File: {}", filePath);
log.info("Layer Name: {}", layerName);
log.info("========================================");
try {
geoServerService.uploadShapefileZip(filePath, layerName);
log.info("========================================");
log.info("Upload completed successfully!");
log.info("========================================");
} catch (Exception e) {
log.error("========================================");
log.error("Upload failed: {}", e.getMessage(), e);
log.error("========================================");
throw e;
}
}
private String firstOption(ApplicationArguments appArgs, String key) {
var values = appArgs.getOptionValues(key);
return (values == null || values.isEmpty()) ? null : values.get(0);
}
/**
* Get option value supporting both --key=value and --key value formats
*
* @param args ApplicationArguments
* @param optionName option name without --
* @return option value or null if not found
*/
private String getOptionValue(ApplicationArguments args, String optionName) {
// Try --key=value format first
if (args.getOptionValues(optionName) != null && !args.getOptionValues(optionName).isEmpty()) {
return args.getOptionValues(optionName).get(0);
}
// Try --key value format by looking at non-option arguments
String[] sourceArgs = args.getSourceArgs();
for (int i = 0; i < sourceArgs.length - 1; i++) {
if (sourceArgs[i].equals("--" + optionName)) {
// Next argument should be the value
String nextArg = sourceArgs[i + 1];
if (!nextArg.startsWith("--")) {
return nextArg;
}
}
}
return null;
}
private void printUsage() {
System.out.println();
System.out.println("Usage: java -jar shp-exporter.jar [OPTIONS]");
System.out.println();
System.out.println("Options:");
System.out.println(
" --upload-shp <file-path> Upload shapefile to GeoServer (.shp or .zip)");
System.out.println(
" --layer <layer-name> Specify layer name (optional, defaults to filename)");
System.out.println(" --help, -h Show this help message");
System.out.println();
System.out.println("Examples:");
System.out.println(" # Upload ZIP file directly");
System.out.println(" java -jar shp-exporter.jar --upload-shp /path/to/shapefile.zip");
System.out.println();
System.out.println(" # Upload .shp file (will auto-create ZIP with related files)");
System.out.println(" java -jar shp-exporter.jar --upload-shp /path/to/shapefile.shp");
System.out.println();
System.out.println(" # Specify custom layer name");
System.out.println(
" java -jar shp-exporter.jar --upload-shp /path/to/shapefile.shp --layer my_layer");
System.out.println();
}
}

View File

@@ -0,0 +1,65 @@
package com.kamco.makesample.config;
import java.util.List;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@Component
@ConfigurationProperties(prefix = "converter")
public class ConverterProperties {
private String inferenceId;
private List<String> mapIds;
private List<Long> batchIds;
private String outputBaseDir;
private String crs;
private String mode;
public String getInferenceId() {
return inferenceId;
}
public void setInferenceId(String inferenceId) {
this.inferenceId = inferenceId;
}
public List<String> getMapIds() {
return mapIds;
}
public void setMapIds(List<String> mapIds) {
this.mapIds = mapIds;
}
public List<Long> getBatchIds() {
return batchIds;
}
public void setBatchIds(List<Long> batchIds) {
this.batchIds = batchIds;
}
public String getOutputBaseDir() {
return outputBaseDir;
}
public void setOutputBaseDir(String outputBaseDir) {
this.outputBaseDir = outputBaseDir;
}
public String getCrs() {
return crs;
}
public void setCrs(String crs) {
this.crs = crs;
}
public void setMode(String mode) {
this.mode = mode;
}
public String getMode() {
return mode;
}
}

View File

@@ -0,0 +1,31 @@
package com.kamco.makesample.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@Component
public class GeoServerCredentials {
@Value("${GEOSERVER_USERNAME:${geoserver.username:#{null}}}")
private String username;
@Value("${GEOSERVER_PASSWORD:${geoserver.password:#{null}}}")
private String password;
public void validate() {
if (username == null || password == null) {
throw new IllegalStateException(
"GeoServer credentials not configured. "
+ "Set GEOSERVER_USERNAME and GEOSERVER_PASSWORD environment variables "
+ "or configure geoserver.username and geoserver.password in application.yml");
}
}
public String getUsername() {
return username;
}
public String getPassword() {
return password;
}
}

View File

@@ -0,0 +1,96 @@
package com.kamco.makesample.config;
import jakarta.validation.constraints.NotBlank;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
import org.springframework.validation.annotation.Validated;
@Component
@ConfigurationProperties(prefix = "geoserver")
@Validated
public class GeoServerProperties {
@NotBlank(message = "GeoServer base URL must be configured")
@Value("${layer.geoserver-url}")
private String baseUrl;
@NotBlank(message = "GeoServer workspace must be configured")
@Value("${layer.workspace}")
private String workspace;
@NotBlank(message = "GeoServer datastore must be configured")
private String datastore = "inference_result";
private boolean overwriteExisting = true;
private int connectionTimeout = 30000; // 30 seconds
private int readTimeout = 60000; // 60 seconds
@NotBlank private String username;
@NotBlank private String password;
public String getBaseUrl() {
return baseUrl;
}
public void setBaseUrl(String baseUrl) {
this.baseUrl = baseUrl;
}
public String getWorkspace() {
return workspace;
}
public void setWorkspace(String workspace) {
this.workspace = workspace;
}
public String getDatastore() {
return datastore;
}
public void setDatastore(String datastore) {
this.datastore = datastore;
}
public boolean isOverwriteExisting() {
return overwriteExisting;
}
public void setOverwriteExisting(boolean overwriteExisting) {
this.overwriteExisting = overwriteExisting;
}
public int getConnectionTimeout() {
return connectionTimeout;
}
public void setConnectionTimeout(int connectionTimeout) {
this.connectionTimeout = connectionTimeout;
}
public int getReadTimeout() {
return readTimeout;
}
public void setReadTimeout(int readTimeout) {
this.readTimeout = readTimeout;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
}

View File

@@ -0,0 +1,31 @@
package com.kamco.makesample.config;
import java.time.Duration;
import org.springframework.boot.web.client.RestTemplateBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.client.RestTemplate;
@Configuration
public class RestTemplateConfig {
private final GeoServerProperties properties;
public RestTemplateConfig(GeoServerProperties properties) {
this.properties = properties;
}
@Bean
public RestTemplate restTemplate(RestTemplateBuilder builder) {
return builder
.requestFactory(
() -> {
org.springframework.http.client.SimpleClientHttpRequestFactory factory =
new org.springframework.http.client.SimpleClientHttpRequestFactory();
factory.setConnectTimeout(Duration.ofMillis(properties.getConnectionTimeout()));
factory.setReadTimeout(Duration.ofMillis(properties.getReadTimeout()));
return factory;
})
.build();
}
}

View File

@@ -0,0 +1,12 @@
package com.kamco.makesample.exception;
public class GeoServerRegistrationException extends RuntimeException {
public GeoServerRegistrationException(String message) {
super(message);
}
public GeoServerRegistrationException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@@ -0,0 +1,12 @@
package com.kamco.makesample.exception;
public class GeometryConversionException extends ShapefileConversionException {
public GeometryConversionException(String message) {
super(message);
}
public GeometryConversionException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@@ -0,0 +1,12 @@
package com.kamco.makesample.exception;
public class MixedGeometryException extends ShapefileConversionException {
public MixedGeometryException(String message) {
super(message);
}
public MixedGeometryException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@@ -0,0 +1,12 @@
package com.kamco.makesample.exception;
public class ShapefileConversionException extends RuntimeException {
public ShapefileConversionException(String message) {
super(message);
}
public ShapefileConversionException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@@ -0,0 +1,122 @@
package com.kamco.makesample.model;
import org.locationtech.jts.geom.Geometry;
public class InferenceResult {
private String uid;
private String mapId;
private Double probability;
private Long beforeYear;
private Long afterYear;
private String beforeC;
private Double beforeP;
private String afterC;
private Double afterP;
private Geometry geometry;
public InferenceResult() {}
public InferenceResult(
String uid,
String mapId,
Double probability,
Long beforeYear,
Long afterYear,
String beforeC,
Double beforeP,
String afterC,
Double afterP,
Geometry geometry) {
this.uid = uid;
this.mapId = mapId;
this.probability = probability;
this.beforeYear = beforeYear;
this.afterYear = afterYear;
this.beforeC = beforeC;
this.beforeP = beforeP;
this.afterC = afterC;
this.afterP = afterP;
this.geometry = geometry;
}
public String getUid() {
return uid;
}
public void setUid(String uid) {
this.uid = uid;
}
public String getMapId() {
return mapId;
}
public void setMapId(String mapId) {
this.mapId = mapId;
}
public Double getProbability() {
return probability;
}
public void setProbability(Double probability) {
this.probability = probability;
}
public Long getBeforeYear() {
return beforeYear;
}
public void setBeforeYear(Long beforeYear) {
this.beforeYear = beforeYear;
}
public Long getAfterYear() {
return afterYear;
}
public void setAfterYear(Long afterYear) {
this.afterYear = afterYear;
}
public String getBeforeC() {
return beforeC;
}
public void setBeforeC(String beforeC) {
this.beforeC = beforeC;
}
public Double getBeforeP() {
return beforeP;
}
public void setBeforeP(Double beforeP) {
this.beforeP = beforeP;
}
public String getAfterC() {
return afterC;
}
public void setAfterC(String afterC) {
this.afterC = afterC;
}
public Double getAfterP() {
return afterP;
}
public void setAfterP(Double afterP) {
this.afterP = afterP;
}
public Geometry getGeometry() {
return geometry;
}
public void setGeometry(Geometry geometry) {
this.geometry = geometry;
}
}

View File

@@ -0,0 +1,153 @@
package com.kamco.makesample.repository;
import com.kamco.makesample.model.InferenceResult;
import com.kamco.makesample.service.GeometryConverter;
import java.sql.Array;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.PreparedStatementCreator;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Repository;
@Repository
public class InferenceResultRepository {
private static final Logger log = LoggerFactory.getLogger(InferenceResultRepository.class);
private final JdbcTemplate jdbcTemplate;
private final GeometryConverter geometryConverter;
public InferenceResultRepository(JdbcTemplate jdbcTemplate, GeometryConverter geometryConverter) {
this.jdbcTemplate = jdbcTemplate;
this.geometryConverter = geometryConverter;
}
public List<InferenceResult> findByMapId(List<Long> batchIds, String mapId) {
String sql =
"""
SELECT uid, map_id, probability, before_year, after_year,
before_c, before_p, after_c, after_p, ST_AsText(geometry) as geometry_wkt
FROM inference_results_testing
WHERE batch_id = ANY(?) AND map_id = ?
AND after_c IS NOT NULL
AND after_p IS NOT NULL
""";
log.info("Querying database for map_id: {}, batch_ids: {}", mapId, batchIds);
PreparedStatementCreator psc =
new PreparedStatementCreator() {
@Override
public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
PreparedStatement ps = con.prepareStatement(sql);
Array batchIdsArray = con.createArrayOf("bigint", batchIds.toArray());
ps.setArray(1, batchIdsArray);
ps.setString(2, mapId);
return ps;
}
};
List<InferenceResult> results = jdbcTemplate.query(psc, new InferenceResultRowMapper());
log.info("Found {} results for map_id: {}", results.size(), mapId);
return results;
}
public List<InferenceResult> findByBatchIds(List<Long> batchIds) {
String sql =
"""
SELECT uid, map_id, probability, before_year, after_year,
before_c, before_p, after_c, after_p, ST_AsText(geometry) as geometry_wkt
FROM inference_results_testing
WHERE batch_id = ANY(?)
AND after_c IS NOT NULL
AND after_p IS NOT NULL
""";
log.info("Querying database for all map_ids, batch_ids: {}", batchIds);
PreparedStatementCreator psc =
new PreparedStatementCreator() {
@Override
public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
PreparedStatement ps = con.prepareStatement(sql);
Array batchIdsArray = con.createArrayOf("bigint", batchIds.toArray());
ps.setArray(1, batchIdsArray);
return ps;
}
};
List<InferenceResult> results = jdbcTemplate.query(psc, new InferenceResultRowMapper());
log.info("Found {} results across all map_ids", results.size());
return results;
}
public List<String> findMapIdByBatchIds(List<Long> batchIds) {
String sql =
"""
SELECT DISTINCT map_id
FROM inference_results_testing
WHERE batch_id = ANY(?)
AND after_c IS NOT NULL
AND after_p IS NOT NULL
""";
log.info("Querying database for all map_ids, batch_ids: {}", batchIds);
PreparedStatementCreator psc =
con -> {
PreparedStatement ps = con.prepareStatement(sql);
Array batchIdsArray = con.createArrayOf("bigint", batchIds.toArray());
ps.setArray(1, batchIdsArray);
return ps;
};
List<String> mapIds = jdbcTemplate.query(psc, (rs, rowNum) -> rs.getString("map_id"));
log.info("Found {} map_ids", mapIds.size());
return mapIds;
}
private class InferenceResultRowMapper implements RowMapper<InferenceResult> {
@Override
public InferenceResult mapRow(ResultSet rs, int rowNum) throws SQLException {
InferenceResult result = new InferenceResult();
result.setUid(rs.getString("uid"));
result.setMapId(rs.getString("map_id"));
result.setProbability(getDoubleOrNull(rs, "probability"));
result.setBeforeYear(getLongOrNull(rs, "before_year"));
result.setAfterYear(getLongOrNull(rs, "after_year"));
result.setBeforeC(rs.getString("before_c"));
result.setBeforeP(getDoubleOrNull(rs, "before_p"));
result.setAfterC(rs.getString("after_c"));
result.setAfterP(getDoubleOrNull(rs, "after_p"));
String geometryWkt = rs.getString("geometry_wkt");
if (geometryWkt != null) {
result.setGeometry(geometryConverter.convertWKTToJTS(geometryWkt));
}
return result;
}
private Long getLongOrNull(ResultSet rs, String columnName) throws SQLException {
long value = rs.getLong(columnName);
return rs.wasNull() ? null : value;
}
private Double getDoubleOrNull(ResultSet rs, String columnName) throws SQLException {
double value = rs.getDouble(columnName);
return rs.wasNull() ? null : value;
}
}
}

View File

@@ -0,0 +1,245 @@
package com.kamco.makesample.service;
import com.kamco.makesample.config.GeoServerProperties;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Base64;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.client.RestTemplate;
@Service
public class GeoServerRegistrationService {
private static final Logger log = LoggerFactory.getLogger(GeoServerRegistrationService.class);
private final RestTemplate restTemplate;
private final GeoServerProperties properties;
public GeoServerRegistrationService(GeoServerProperties properties) {
this.restTemplate = new RestTemplate();
this.properties = properties;
}
public void uploadShapefileZip(String filePath, String layerName) {
String zipFilePath = filePath;
boolean tempZipCreated = false;
try {
log.info("Starting shapefile upload to GeoServer");
log.info("Input file: {}", filePath);
log.info("Layer name: {}", layerName);
log.info("Workspace: {}", properties.getWorkspace());
// Validate inputs
validateInputs(filePath, layerName);
// If input is .shp file, create temporary ZIP
if (filePath.toLowerCase().endsWith(".shp")) {
log.info("Input is .shp file, creating ZIP archive with related files...");
zipFilePath = createZipFromShapefile(filePath);
tempZipCreated = true;
log.info("Temporary ZIP created: {}", zipFilePath);
}
// Check if layer exists and handle overwrite
if (properties.isOverwriteExisting() && layerExists(layerName)) {
log.info("Layer '{}' already exists. Deleting...", layerName);
deleteLayer(layerName);
}
// Read ZIP file
Path path = Paths.get(zipFilePath);
byte[] zipData = Files.readAllBytes(path);
log.info("ZIP file size: {} bytes", zipData.length);
// Upload to GeoServer
String url =
String.format(
"%s/rest/workspaces/%s/datastores/%s/file.shp",
properties.getBaseUrl(), properties.getWorkspace(), layerName);
HttpHeaders headers = createHeaders();
headers.setContentType(MediaType.valueOf("application/zip"));
HttpEntity<byte[]> request = new HttpEntity<>(zipData, headers);
log.info("Uploading to GeoServer: {}", url);
ResponseEntity<String> response =
restTemplate.exchange(url, HttpMethod.PUT, request, String.class);
if (response.getStatusCode() == HttpStatus.CREATED
|| response.getStatusCode() == HttpStatus.OK) {
log.info("Shapefile uploaded successfully to GeoServer");
log.info(
"Layer '{}' is now available in workspace '{}'", layerName, properties.getWorkspace());
} else {
log.warn("Unexpected response status: {}", response.getStatusCode());
}
} catch (IOException e) {
log.error("Failed to read file: {}", filePath, e);
throw new RuntimeException("Failed to read file", e);
} catch (HttpClientErrorException e) {
log.error(
"GeoServer upload failed. Status: {}, Response: {}",
e.getStatusCode(),
e.getResponseBodyAsString());
throw new RuntimeException("GeoServer upload failed", e);
} catch (Exception e) {
log.error("Unexpected error during shapefile upload", e);
throw new RuntimeException("Shapefile upload failed", e);
} finally {
// Clean up temporary ZIP file if created
if (tempZipCreated && zipFilePath != null) {
try {
Files.deleteIfExists(Paths.get(zipFilePath));
log.info("Temporary ZIP file deleted: {}", zipFilePath);
} catch (IOException e) {
log.warn("Failed to delete temporary ZIP file: {}", zipFilePath, e);
}
}
}
}
private void validateInputs(String filePath, String layerName) {
if (filePath == null || filePath.trim().isEmpty()) {
throw new IllegalArgumentException("File path cannot be empty");
}
if (layerName == null || layerName.trim().isEmpty()) {
throw new IllegalArgumentException("Layer name cannot be empty");
}
File file = new File(filePath);
if (!file.exists()) {
throw new IllegalArgumentException("File does not exist: " + filePath);
}
if (!file.canRead()) {
throw new IllegalArgumentException("Cannot read file: " + filePath);
}
String lowerPath = filePath.toLowerCase();
if (!lowerPath.endsWith(".zip") && !lowerPath.endsWith(".shp")) {
throw new IllegalArgumentException("File must be a .zip or .shp file: " + filePath);
}
}
private boolean layerExists(String layerName) {
try {
String url =
String.format(
"%s/rest/workspaces/%s/layers/%s",
properties.getBaseUrl(), properties.getWorkspace(), layerName);
HttpHeaders headers = createHeaders();
HttpEntity<Void> request = new HttpEntity<>(headers);
ResponseEntity<String> response =
restTemplate.exchange(url, HttpMethod.GET, request, String.class);
return response.getStatusCode() == HttpStatus.OK;
} catch (HttpClientErrorException e) {
if (e.getStatusCode() == HttpStatus.NOT_FOUND) {
return false;
}
log.warn("Error checking if layer exists: {}", e.getMessage());
return false;
}
}
private void deleteLayer(String layerName) {
try {
// Delete datastore (which will also delete the layer)
String datastoreUrl =
String.format(
"%s/rest/workspaces/%s/datastores/%s?recurse=true",
properties.getBaseUrl(), properties.getWorkspace(), layerName);
HttpHeaders headers = createHeaders();
HttpEntity<Void> request = new HttpEntity<>(headers);
restTemplate.exchange(datastoreUrl, HttpMethod.DELETE, request, String.class);
log.info("Successfully deleted existing layer/datastore: {}", layerName);
} catch (HttpClientErrorException e) {
if (e.getStatusCode() != HttpStatus.NOT_FOUND) {
log.warn("Failed to delete layer: {}", e.getMessage());
}
}
}
private String createZipFromShapefile(String shpFilePath) throws IOException {
File shpFile = new File(shpFilePath);
String parentDir = shpFile.getParent();
String baseName = shpFile.getName().replaceAll("\\.shp$", "");
// Shapefile related extensions
List<String> extensions = Arrays.asList(".shp", ".shx", ".dbf", ".prj", ".cpg", ".qpj");
// Find all related files
File[] relatedFiles =
new File(parentDir)
.listFiles(
f -> {
String name = f.getName();
String fileBaseName = name.substring(0, name.lastIndexOf('.'));
String ext = name.substring(name.lastIndexOf('.')).toLowerCase();
return fileBaseName.equals(baseName) && extensions.contains(ext);
});
if (relatedFiles == null || relatedFiles.length == 0) {
throw new IOException("No shapefile components found for: " + shpFilePath);
}
log.info("Found {} shapefile components to archive:", relatedFiles.length);
for (File f : relatedFiles) {
log.info(" - {}", f.getName());
}
// Create temporary ZIP file
Path tempZip = Files.createTempFile("shapefile_", ".zip");
try (ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(tempZip.toFile()))) {
for (File file : relatedFiles) {
try (FileInputStream fis = new FileInputStream(file)) {
ZipEntry zipEntry = new ZipEntry(file.getName());
zos.putNextEntry(zipEntry);
byte[] buffer = new byte[8192];
int length;
while ((length = fis.read(buffer)) > 0) {
zos.write(buffer, 0, length);
}
zos.closeEntry();
}
}
}
return tempZip.toString();
}
private HttpHeaders createHeaders() {
HttpHeaders headers = new HttpHeaders();
String auth = properties.getUsername() + ":" + properties.getPassword();
String encodedAuth = Base64.getEncoder().encodeToString(auth.getBytes());
headers.set("Authorization", "Basic " + encodedAuth);
return headers;
}
}

View File

@@ -0,0 +1,41 @@
package com.kamco.makesample.service;
import com.kamco.makesample.exception.GeometryConversionException;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.ParseException;
import org.locationtech.jts.io.WKTReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class GeometryConverter {
private static final Logger log = LoggerFactory.getLogger(GeometryConverter.class);
private final WKTReader wktReader;
public GeometryConverter() {
this.wktReader = new WKTReader();
}
public Geometry convertWKTToJTS(String wkt) {
if (wkt == null || wkt.trim().isEmpty()) {
return null;
}
try {
// WKT 문자열을 JTS Geometry로 변환
Geometry jtsGeometry = wktReader.read(wkt);
if (!jtsGeometry.isValid()) {
log.warn("Invalid geometry detected: {}", jtsGeometry);
}
return jtsGeometry;
} catch (ParseException e) {
throw new GeometryConversionException(
"Failed to convert WKT to JTS geometry: " + e.getMessage(), e);
}
}
}

View File

@@ -0,0 +1,231 @@
package com.kamco.makesample.service;
import com.kamco.makesample.config.ConverterProperties;
import com.kamco.makesample.exception.MixedGeometryException;
import com.kamco.makesample.model.InferenceResult;
import com.kamco.makesample.repository.InferenceResultRepository;
import com.kamco.makesample.writer.GeoJsonWriter;
import com.kamco.makesample.writer.ResultZipWriter;
import com.kamco.makesample.writer.ShapefileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
@Service
public class ShapefileConverterService {
private static final Logger log = LoggerFactory.getLogger(ShapefileConverterService.class);
private final ConverterProperties converterProperties;
private final InferenceResultRepository repository;
private final ShapefileWriter shapefileWriter;
private final GeoJsonWriter geoJsonWriter;
public ShapefileConverterService(
ConverterProperties converterProperties,
InferenceResultRepository repository,
ShapefileWriter shapefileWriter,
GeoJsonWriter geoJsonWriter) {
this.converterProperties = converterProperties;
this.repository = repository;
this.shapefileWriter = shapefileWriter;
this.geoJsonWriter = geoJsonWriter;
}
public void convertAll() {
List<String> mapIds = converterProperties.getMapIds();
String mode = converterProperties.getMode();
// Validation: ensure we have batch-ids
if (converterProperties.getBatchIds() == null || converterProperties.getBatchIds().isEmpty()) {
throw new IllegalStateException("Configuration error: batch-ids must be specified");
}
String m = (mode == null) ? "" : mode.trim().toUpperCase();
// mode 있으면
if (!m.isEmpty()) {
switch (m) {
case "RESOLVE" -> {
log.info("Starting shapefile conversion (map_ids resolved internally)");
convertByResolvedMapIds();
return;
}
case "MERGED" -> {
log.info("Starting merged shapefile conversion");
convertMerged();
return;
}
case "MAP_IDS" -> {
if (mapIds == null || mapIds.isEmpty()) {
throw new IllegalStateException(
"Configuration error: map-ids must be specified when mode=MAP_IDS");
}
log.info("Starting shapefile conversion for {} map_ids", mapIds.size());
convertByMapIds(mapIds);
return;
}
default ->
throw new IllegalStateException(
"Configuration error: unsupported mode="
+ mode
+ " (supported: MERGED, MAP_IDS, RESOLVE)");
}
}
// Branch: merged mode vs map-specific mode 기존 동작 유지
if (mapIds == null || mapIds.isEmpty()) {
log.info("Starting merged shapefile conversion (no map_ids specified)");
convertMerged();
} else {
log.info("Starting shapefile conversion for {} map_ids: {}", mapIds.size(), mapIds);
convertByMapIds(mapIds);
}
}
private void convertByMapIds(List<String> mapIds) {
int successCount = 0;
int failureCount = 0;
for (String mapId : mapIds) {
try {
convertSingle(mapId);
successCount++;
} catch (Exception e) {
log.error("Failed to convert map_id {}: {}", mapId, e.getMessage(), e);
failureCount++;
}
}
log.info("Conversion completed. Success: {}, Failures: {}", successCount, failureCount);
}
private void convertMerged() {
try {
log.info("Processing all records for batch_ids: {}", converterProperties.getBatchIds());
Path outputDir = createMergedOutputDirectory();
List<InferenceResult> results = repository.findByBatchIds(converterProperties.getBatchIds());
if (results.isEmpty()) {
log.warn("No results found for batch_ids: {}", converterProperties.getBatchIds());
return;
}
validateGeometries(results);
// Use inference-id as filename
String filename = converterProperties.getInferenceId() + ".shp";
String shapefilePath = outputDir.resolve(filename).toString();
shapefileWriter.write(results, shapefilePath, converterProperties.getCrs());
String geoJsonFilename = converterProperties.getInferenceId() + ".geojson";
String geoJsonPath = outputDir.resolve(geoJsonFilename).toString();
geoJsonWriter.write(results, geoJsonPath, converterProperties.getCrs());
// create zip file
ResultZipWriter.createZip(outputDir, converterProperties.getInferenceId());
log.info(
"Successfully created merged shapefile and GeoJSON with {} records from {} batch_ids",
results.size(),
converterProperties.getBatchIds().size());
} catch (Exception e) {
log.error("Failed to create merged shapefile: {}", e.getMessage(), e);
throw e;
}
}
private void convertSingle(String mapId) {
log.debug("Processing map_id: {}", mapId);
Path outputDir = createOutputDirectory(mapId);
List<InferenceResult> results =
repository.findByMapId(converterProperties.getBatchIds(), mapId);
if (results.isEmpty()) {
log.warn("No results found for map_id: {}", mapId);
return;
}
validateGeometries(results);
String shapefilePath = outputDir.resolve(mapId + ".shp").toString();
shapefileWriter.write(results, shapefilePath, converterProperties.getCrs());
String geoJsonPath = outputDir.resolve(mapId + ".geojson").toString();
geoJsonWriter.write(results, geoJsonPath, converterProperties.getCrs());
log.info(
"Successfully created shapefile and GeoJSON for map_id {} with {} records",
mapId,
results.size());
}
private void convertByResolvedMapIds() {
try {
log.info("Processing all records for batch_ids: {}", converterProperties.getBatchIds());
List<String> results = repository.findMapIdByBatchIds(converterProperties.getBatchIds());
convertByMapIds(results);
} catch (Exception e) {
log.error("Failed to create merged shapefile: {}", e.getMessage(), e);
throw e;
}
}
private Path createOutputDirectory(String mapId) {
try {
Path outputPath =
Paths.get(
converterProperties.getOutputBaseDir(), converterProperties.getInferenceId(), mapId);
Files.createDirectories(outputPath);
log.debug("Created output directory: {}", outputPath);
return outputPath;
} catch (IOException e) {
throw new RuntimeException("Failed to create output directory for map_id: " + mapId, e);
}
}
private Path createMergedOutputDirectory() {
try {
Path outputPath =
Paths.get(
converterProperties.getOutputBaseDir(),
converterProperties.getInferenceId(),
"merge");
Files.createDirectories(outputPath);
log.info("Created merged output directory: {}", outputPath);
return outputPath;
} catch (IOException e) {
throw new RuntimeException("Failed to create merged output directory", e);
}
}
private void validateGeometries(List<InferenceResult> results) {
Set<String> geometryTypes =
results.stream()
.filter(r -> r.getGeometry() != null)
.map(r -> r.getGeometry().getGeometryType())
.collect(Collectors.toSet());
if (geometryTypes.size() > 1) {
throw new MixedGeometryException(
"Shapefile requires homogeneous geometry type. Found: " + geometryTypes);
}
if (geometryTypes.isEmpty()) {
log.warn("No valid geometries found in results");
}
}
}

View File

@@ -0,0 +1,151 @@
package com.kamco.makesample.writer;
import com.kamco.makesample.exception.ShapefileConversionException;
import com.kamco.makesample.model.InferenceResult;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.List;
import org.geotools.api.feature.simple.SimpleFeature;
import org.geotools.api.feature.simple.SimpleFeatureType;
import org.geotools.api.referencing.FactoryException;
import org.geotools.api.referencing.crs.CoordinateReferenceSystem;
import org.geotools.feature.DefaultFeatureCollection;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
import org.geotools.geojson.feature.FeatureJSON;
import org.geotools.referencing.CRS;
import org.locationtech.jts.geom.Geometry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class GeoJsonWriter {
private static final Logger log = LoggerFactory.getLogger(GeoJsonWriter.class);
public void write(List<InferenceResult> results, String outputPath, String crsCode) {
if (results == null || results.isEmpty()) {
log.warn("No results to write to GeoJSON");
return;
}
try {
CoordinateReferenceSystem crs = CRS.decode(crsCode);
Class<?> geometryType = determineGeometryType(results);
SimpleFeatureType featureType = createFeatureType(crs, geometryType);
DefaultFeatureCollection collection = new DefaultFeatureCollection();
SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder(featureType);
int geometryCount = 0;
int nullGeometryCount = 0;
for (InferenceResult result : results) {
if (result.getGeometry() != null) {
geometryCount++;
} else {
nullGeometryCount++;
}
SimpleFeature feature = buildFeature(featureBuilder, result);
collection.add(feature);
}
log.info(
"Built {} features for GeoJSON: {} with geometry, {} without geometry",
results.size(),
geometryCount,
nullGeometryCount);
File geoJsonFile = new File(outputPath);
// Ensure parent directory exists
if (geoJsonFile.getParentFile() != null) {
geoJsonFile.getParentFile().mkdirs();
}
try (FileOutputStream fos = new FileOutputStream(geoJsonFile)) {
FeatureJSON featureJSON = new FeatureJSON();
featureJSON.setEncodeFeatureCollectionCRS(true); // Include CRS in GeoJSON
featureJSON.writeFeatureCollection(collection, fos);
log.info("Successfully wrote {} features to GeoJSON: {}", results.size(), outputPath);
}
} catch (FactoryException e) {
throw new ShapefileConversionException("Invalid CRS code: " + crsCode, e);
} catch (IOException e) {
throw new ShapefileConversionException("Failed to create GeoJSON at: " + outputPath, e);
}
}
private SimpleFeatureType createFeatureType(
CoordinateReferenceSystem crs, Class<?> geometryType) {
SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();
builder.setName("inference_results");
builder.setCRS(crs);
// Geometry field
builder.add("the_geom", geometryType);
builder.setDefaultGeometry("the_geom");
// Attribute fields - keeping consistent with ShapefileWriter
builder.add("uid", String.class);
builder.add("map_id", String.class);
builder.add("chn_dtct_p", String.class);
builder.add("cprs_yr", Long.class);
builder.add("crtr_yr", Long.class);
builder.add("bf_cls_cd", String.class);
builder.add("bf_cls_pro", String.class);
builder.add("af_cls_cd", String.class);
builder.add("af_cls_pro", String.class);
return builder.buildFeatureType();
}
private SimpleFeature buildFeature(SimpleFeatureBuilder builder, InferenceResult result) {
Geometry geom = result.getGeometry();
// Geometry (the_geom)
builder.add(geom);
// Attribute fields
String uid = result.getUid();
String mapId = result.getMapId();
Double probability = result.getProbability();
Long beforeYear = result.getBeforeYear();
Long afterYear = result.getAfterYear();
String beforeC = result.getBeforeC();
Double beforeP = result.getBeforeP();
String afterC = result.getAfterC();
Double afterP = result.getAfterP();
builder.add(uid);
builder.add(mapId);
builder.add(probability != null ? String.valueOf(probability) : "0.0");
builder.add(beforeYear != null ? beforeYear : 0L);
builder.add(afterYear != null ? afterYear : 0L);
builder.add(beforeC);
builder.add(beforeP != null ? String.valueOf(beforeP) : "0.0");
builder.add(afterC);
builder.add(afterP != null ? String.valueOf(afterP) : "0.0");
return builder.buildFeature(uid);
}
private Class<?> determineGeometryType(List<InferenceResult> results) {
Geometry firstGeometry = null;
for (InferenceResult result : results) {
if (result.getGeometry() != null) {
firstGeometry = result.getGeometry();
break;
}
}
if (firstGeometry == null) {
throw new ShapefileConversionException("No valid geometries found in results");
}
return firstGeometry.getClass();
}
}

View File

@@ -0,0 +1,68 @@
package com.kamco.makesample.writer;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.List;
import java.util.zip.Deflater;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
public class ResultZipWriter {
private static final int BUF = 1024 * 1024; // 1MB
/**
* @param dirPath 결과 파일들이 있는 디렉터리
* @param baseName uid
*/
public static void createZip(Path dirPath, String baseName) {
List<String> exts = List.of("shp", "shx", "dbf", "prj", "fix", "geojson");
Path zip = dirPath.resolve(baseName + ".zip");
Path tmp = dirPath.resolve(baseName + ".zip.tmp");
try (OutputStream os = Files.newOutputStream(tmp);
BufferedOutputStream bos = new BufferedOutputStream(os);
ZipOutputStream zos = new ZipOutputStream(bos)) {
zos.setLevel(Deflater.BEST_SPEED);
byte[] buffer = new byte[BUF];
for (String ext : exts) {
Path file = dirPath.resolve(baseName + "." + ext);
if (!Files.exists(file)) continue;
ZipEntry entry = new ZipEntry(file.getFileName().toString());
zos.putNextEntry(entry);
try (InputStream is = Files.newInputStream(file);
BufferedInputStream bis = new BufferedInputStream(is, BUF)) {
int len;
while ((len = bis.read(buffer)) != -1) {
zos.write(buffer, 0, len);
}
}
zos.closeEntry();
}
} catch (IOException e) {
throw new RuntimeException("ZIP 생성 실패", e);
}
try {
Files.move(tmp, zip, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE);
} catch (IOException e) {
throw new RuntimeException("ZIP 완료 처리 실패", e);
}
}
}

View File

@@ -0,0 +1,211 @@
package com.kamco.makesample.writer;
import com.kamco.makesample.exception.ShapefileConversionException;
import com.kamco.makesample.model.InferenceResult;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.geotools.api.data.SimpleFeatureSource;
import org.geotools.api.data.SimpleFeatureStore;
import org.geotools.api.data.Transaction;
import org.geotools.api.feature.simple.SimpleFeature;
import org.geotools.api.feature.simple.SimpleFeatureType;
import org.geotools.api.referencing.FactoryException;
import org.geotools.api.referencing.crs.CoordinateReferenceSystem;
import org.geotools.data.DefaultTransaction;
import org.geotools.data.shapefile.ShapefileDataStore;
import org.geotools.data.shapefile.ShapefileDataStoreFactory;
import org.geotools.feature.DefaultFeatureCollection;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
import org.geotools.referencing.CRS;
import org.locationtech.jts.geom.Geometry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class ShapefileWriter {
private static final Logger log = LoggerFactory.getLogger(ShapefileWriter.class);
public void write(List<InferenceResult> results, String outputPath, String crsCode) {
if (results == null || results.isEmpty()) {
log.warn("No results to write to shapefile");
return;
}
try {
CoordinateReferenceSystem crs = CRS.decode(crsCode);
Class<?> geometryType = determineGeometryType(results);
SimpleFeatureType featureType = createFeatureType(crs, geometryType);
File shpFile = new File(outputPath);
ShapefileDataStoreFactory factory = new ShapefileDataStoreFactory();
Map<String, Serializable> params = new HashMap<>();
params.put("url", shpFile.toURI().toURL());
params.put("create spatial index", Boolean.TRUE);
ShapefileDataStore dataStore = (ShapefileDataStore) factory.createNewDataStore(params);
dataStore.createSchema(featureType);
DefaultFeatureCollection collection = new DefaultFeatureCollection();
SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder(featureType);
int geometryCount = 0;
int nullGeometryCount = 0;
for (InferenceResult result : results) {
if (result.getGeometry() != null) {
geometryCount++;
} else {
nullGeometryCount++;
}
SimpleFeature feature = buildFeature(featureBuilder, result);
collection.add(feature);
}
log.info(
"Built {} features: {} with geometry, {} without geometry",
results.size(),
geometryCount,
nullGeometryCount);
Transaction transaction = new DefaultTransaction("create");
try {
String typeName = dataStore.getTypeNames()[0];
SimpleFeatureSource featureSource = dataStore.getFeatureSource(typeName);
if (featureSource instanceof SimpleFeatureStore) {
SimpleFeatureStore featureStore = (SimpleFeatureStore) featureSource;
featureStore.setTransaction(transaction);
featureStore.addFeatures(collection);
transaction.commit();
} else {
throw new ShapefileConversionException("Feature source is read-only");
}
log.info("Successfully wrote {} features to shapefile: {}", results.size(), outputPath);
} catch (Exception e) {
transaction.rollback();
throw new ShapefileConversionException("Failed to write features to shapefile", e);
} finally {
transaction.close();
dataStore.dispose();
}
} catch (FactoryException e) {
throw new ShapefileConversionException("Invalid CRS code: " + crsCode, e);
} catch (IOException e) {
throw new ShapefileConversionException("Failed to create shapefile at: " + outputPath, e);
}
}
private SimpleFeatureType createFeatureType(
CoordinateReferenceSystem crs, Class<?> geometryType) {
SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();
builder.setName("inference_results");
builder.setCRS(crs);
// Geometry 필드를 기본 geometry로 설정 (중요!)
builder.add("the_geom", geometryType);
builder.setDefaultGeometry("the_geom");
// 속성 필드들
// builder.add("uid", String.class);
// builder.add("map_id", String.class);
// builder.add("prob", Double.class);
// builder.add("before_yr", Long.class);
// builder.add("after_yr", Long.class);
// builder.add("before_c", String.class);
// builder.add("before_p", Double.class);
// builder.add("after_c", String.class);
// builder.add("after_p", Double.class);
builder.add("uid", String.class);
builder.add("map_id", String.class);
builder.add("chn_dtct_p", String.class);
builder.add("cprs_yr", Long.class);
builder.add("crtr_yr", Long.class);
builder.add("bf_cls_cd", String.class);
builder.add("bf_cls_pro", String.class);
builder.add("af_cls_cd", String.class);
builder.add("af_cls_pro", String.class);
return builder.buildFeatureType();
}
private SimpleFeature buildFeature(SimpleFeatureBuilder builder, InferenceResult result) {
Geometry geom = result.getGeometry();
if (geom == null) {
log.warn("Null geometry detected for uid: {}", result.getUid());
} else {
log.debug(
"Adding geometry for uid {}: type={}, valid={}, numPoints={}",
result.getUid(),
geom.getGeometryType(),
geom.isValid(),
geom.getNumPoints());
}
// Geometry (the_geom)
builder.add(geom);
// 속성 필드들
String uid = result.getUid();
String mapId = result.getMapId();
Double probability = result.getProbability();
Long beforeYear = result.getBeforeYear();
Long afterYear = result.getAfterYear();
String beforeC = result.getBeforeC();
Double beforeP = result.getBeforeP();
String afterC = result.getAfterC();
Double afterP = result.getAfterP();
log.debug(
"Feature values - uid: {}, mapId: {}, prob: {}, beforeYear: {}, afterYear: {}, beforeC: {}, beforeP: {}, afterC: {}, afterP: {}",
uid,
mapId,
probability,
beforeYear,
afterYear,
beforeC,
beforeP,
afterC,
afterP);
builder.add(uid);
builder.add(mapId);
builder.add(probability != null ? String.valueOf(probability) : "0.0");
builder.add(beforeYear != null ? beforeYear : 0L);
builder.add(afterYear != null ? afterYear : 0L);
builder.add(beforeC);
builder.add(beforeP != null ? String.valueOf(beforeP) : "0.0");
builder.add(afterC);
builder.add(afterP != null ? String.valueOf(afterP) : "0.0");
return builder.buildFeature(null);
}
private Class<?> determineGeometryType(List<InferenceResult> results) {
Geometry firstGeometry = null;
for (InferenceResult result : results) {
if (result.getGeometry() != null) {
firstGeometry = result.getGeometry();
break;
}
}
if (firstGeometry == null) {
throw new ShapefileConversionException("No valid geometries found in results");
}
return firstGeometry.getClass();
}
}

View File

@@ -0,0 +1,51 @@
spring:
datasource:
url: jdbc:postgresql://192.168.2.127:15432/kamco_cds
username: kamco_cds
password: kamco_cds_Q!W@E#R$
driver-class-name: org.postgresql.Driver
hikari:
maximum-pool-size: 5
connection-timeout: 30000
idle-timeout: 600000
max-lifetime: 1800000
application:
name: make-shapefile-service
main:
web-application-type: none # Disable web server for CLI application
converter:
inference-id: D5E46F60FC40B1A8BE0CD1F3547AA6
# Optional: omit or set empty to create merged shapefile for all batch-ids
batch-ids: # Required
- 252
- 253
- 257
output-base-dir: '/kamco-nfs/model_output/export/'
crs: 'EPSG:5186'
geoserver:
base-url: 'https://kamco.geo-dev.gs.dabeeo.com/geoserver'
workspace: 'cd'
overwrite-existing: true
connection-timeout: 30000
read-timeout: 60000
# Credentials (optional - environment variables take precedence)
# Uncomment and set values for development convenience
# For production, use GEOSERVER_USERNAME and GEOSERVER_PASSWORD environment variables
username: 'admin'
password: 'geoserver'
logging:
level:
com.kamco.makesample: DEBUG
org.springframework: WARN
pattern:
console: '%d{yyyy-MM-dd HH:mm:ss} - %msg%n'
layer:
geoserver-url: http://label-tile.gs.dabeeo.com
workspace: cd

View File

@@ -0,0 +1,52 @@
spring:
datasource:
url: jdbc:postgresql://192.168.2.127:15432/kamco_cds
username: kamco_cds
password: kamco_cds_Q!W@E#R$
driver-class-name: org.postgresql.Driver
hikari:
maximum-pool-size: 5
connection-timeout: 30000
idle-timeout: 600000
max-lifetime: 1800000
application:
name: make-shapefile-service
main:
web-application-type: none # Disable web server for CLI application
converter:
inference-id: D5E46F60FC40B1A8BE0CD1F3547AA6
# Optional: omit or set empty to create merged shapefile for all batch-ids
batch-ids: # Required
- 252
- 253
- 257
output-base-dir: '/kamco-nfs/model_output/export/'
#output-base-dir: '/Users/bokmin/export/'
crs: 'EPSG:5186'
geoserver:
base-url: 'https://kamco.geo-dev.gs.dabeeo.com/geoserver'
workspace: 'cd'
overwrite-existing: true
connection-timeout: 30000
read-timeout: 60000
# Credentials (optional - environment variables take precedence)
# Uncomment and set values for development convenience
# For production, use GEOSERVER_USERNAME and GEOSERVER_PASSWORD environment variables
username: 'admin'
password: 'geoserver'
logging:
level:
com.kamco.makesample: DEBUG
org.springframework: WARN
pattern:
console: '%d{yyyy-MM-dd HH:mm:ss} - %msg%n'
layer:
geoserver-url: http://label-tile.gs.dabeeo.com
workspace: cd

View File

@@ -0,0 +1,51 @@
spring:
datasource:
url: jdbc:postgresql://192.168.2.127:15432/kamco_cds
username: kamco_cds
password: kamco_cds_Q!W@E#R$
driver-class-name: org.postgresql.Driver
hikari:
maximum-pool-size: 5
connection-timeout: 30000
idle-timeout: 600000
max-lifetime: 1800000
application:
name: make-shapefile-service
main:
web-application-type: none # Disable web server for CLI application
converter:
inference-id: D5E46F60FC40B1A8BE0CD1F3547AA6
# Optional: omit or set empty to create merged shapefile for all batch-ids
batch-ids: # Required
- 252
- 253
- 257
output-base-dir: '/kamco-nfs/model_output/export/'
crs: 'EPSG:5186'
geoserver:
base-url: 'https://kamco.geo-dev.gs.dabeeo.com/geoserver'
workspace: 'cd'
overwrite-existing: true
connection-timeout: 30000
read-timeout: 60000
# Credentials (optional - environment variables take precedence)
# Uncomment and set values for development convenience
# For production, use GEOSERVER_USERNAME and GEOSERVER_PASSWORD environment variables
username: 'admin'
password: 'geoserver'
logging:
level:
com.kamco.makesample: DEBUG
org.springframework: WARN
pattern:
console: '%d{yyyy-MM-dd HH:mm:ss} - %msg%n'
layer:
geoserver-url: http://label-tile.gs.dabeeo.com
workspace: cd

View File

@@ -0,0 +1,3 @@
spring:
application:
name: make-shapefile-service