This commit is contained in:
dean
2026-04-15 12:36:58 +09:00
parent b23c3e2689
commit e358d9def5
19 changed files with 1266 additions and 1 deletions

View File

@@ -0,0 +1,50 @@
package com.kamco.shpexporter;
import com.kamco.shpexporter.config.ExporterProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;
@Component
public class ExporterRunner implements CommandLineRunner {
private static final Logger log = LoggerFactory.getLogger(ExporterRunner.class);
private final JobLauncher jobLauncher;
private final Job shpExporterJob;
private final ExporterProperties properties;
public ExporterRunner(JobLauncher jobLauncher, Job shpExporterJob,
ExporterProperties properties) {
this.jobLauncher = jobLauncher;
this.shpExporterJob = shpExporterJob;
this.properties = properties;
}
@Override
public void run(String... args) throws Exception {
if (properties.getBatchIds() == null || properties.getBatchIds().isEmpty()) {
log.error("exporter.batch-ids 가 설정되지 않았습니다.");
System.exit(1);
}
log.info("=== shp-exporter-v2 시작 ===");
log.info("inference-id : {}", properties.getInferenceId());
log.info("batch-ids : {}", properties.getBatchIds());
log.info("output : {}", properties.getOutputBaseDir());
JobParameters params = new JobParametersBuilder()
.addString("inferenceId", properties.getInferenceId())
.addString("batchIds", properties.getBatchIds().toString())
.addLong("timestamp", System.currentTimeMillis())
.toJobParameters();
var result = jobLauncher.run(shpExporterJob, params);
log.info("Job 완료: {}", result.getExitStatus());
}
}

View File

@@ -0,0 +1,14 @@
package com.kamco.shpexporter;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
@SpringBootApplication
@EnableConfigurationProperties
public class ShpExporterApplication {
public static void main(String[] args) {
System.exit(SpringApplication.exit(SpringApplication.run(ShpExporterApplication.class, args)));
}
}

View File

@@ -0,0 +1,124 @@
package com.kamco.shpexporter.batch;
import com.kamco.shpexporter.batch.reader.GeometryConvertingRowMapper;
import com.kamco.shpexporter.batch.tasklet.GeomTypeTasklet;
import com.kamco.shpexporter.batch.writer.MapIdSwitchingWriter;
import com.kamco.shpexporter.config.ExporterProperties;
import com.kamco.shpexporter.model.InferenceResult;
import java.util.List;
import javax.sql.DataSource;
import org.geotools.api.referencing.FactoryException;
import org.geotools.api.referencing.crs.CoordinateReferenceSystem;
import org.geotools.referencing.CRS;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.job.builder.JobBuilder;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.step.builder.StepBuilder;
import org.springframework.batch.item.database.JdbcCursorItemReader;
import org.springframework.batch.item.database.builder.JdbcCursorItemReaderBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.PreparedStatementSetter;
import org.springframework.transaction.PlatformTransactionManager;
@Configuration
public class BatchJobConfig {
private final ExporterProperties properties;
public BatchJobConfig(ExporterProperties properties) {
this.properties = properties;
}
// ─── CRS Bean ──────────────────────────────────────────────────
@Bean
public CoordinateReferenceSystem coordinateReferenceSystem() throws FactoryException {
return CRS.decode(properties.getCrs());
}
// ─── Job ───────────────────────────────────────────────────────
@Bean
public Job shpExporterJob(
JobRepository jobRepository,
Step geomTypeStep,
Step generateMapIdFilesStep) {
return new JobBuilder("shpExporterJob", jobRepository)
.start(geomTypeStep)
.next(generateMapIdFilesStep)
.build();
}
// ─── Step 1: geometry 타입 확인 ────────────────────────────────
@Bean
public Step geomTypeStep(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
GeomTypeTasklet geomTypeTasklet) {
return new StepBuilder("geomTypeStep", jobRepository)
.tasklet(geomTypeTasklet, transactionManager)
.build();
}
// ─── Step 2: map_id별 shapefile 생성 ──────────────────────────
@Bean
public Step generateMapIdFilesStep(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
JdbcCursorItemReader<InferenceResult> inferenceResultReader,
MapIdSwitchingWriter mapIdSwitchingWriter) {
return new StepBuilder("generateMapIdFilesStep", jobRepository)
.<InferenceResult, InferenceResult>chunk(properties.getChunkSize(), transactionManager)
.reader(inferenceResultReader)
.writer(mapIdSwitchingWriter)
.faultTolerant()
.skipLimit(properties.getSkipLimit())
.skip(Exception.class)
.listener(mapIdSwitchingWriter) // @BeforeStep / @AfterStep 등록
.build();
}
// ─── Reader: ORDER BY map_id, uid (전체 스캔) ──────────────────
@Bean
public JdbcCursorItemReader<InferenceResult> inferenceResultReader(
DataSource dataSource,
GeometryConvertingRowMapper rowMapper) {
List<Long> batchIds = properties.getBatchIds();
String sql =
"SELECT uid, map_id, probability, before_year, after_year, "
+ " before_c, before_p, after_c, after_p, "
+ " ST_AsText(geometry) as geometry_wkt "
+ "FROM inference_results_testing "
+ "WHERE batch_id = ANY(?) "
+ " AND ST_GeometryType(geometry) IN ('ST_Polygon', 'ST_MultiPolygon') "
+ " AND ST_SRID(geometry) = 5186 "
+ " AND ST_IsValid(geometry) = true "
+ " AND ST_XMin(geometry) >= 125000 AND ST_XMax(geometry) <= 530000 "
+ " AND ST_YMin(geometry) >= -600000 AND ST_YMax(geometry) <= 988000 "
+ "ORDER BY map_id, uid";
PreparedStatementSetter pss = ps -> {
var arr = ps.getConnection().createArrayOf("bigint", batchIds.toArray());
ps.setArray(1, arr);
};
return new JdbcCursorItemReaderBuilder<InferenceResult>()
.name("inferenceResultReader")
.dataSource(dataSource)
.sql(sql)
.preparedStatementSetter(pss)
.rowMapper(rowMapper)
.fetchSize(properties.getFetchSize())
.build();
}
}

View File

@@ -0,0 +1,47 @@
package com.kamco.shpexporter.batch.reader;
import com.kamco.shpexporter.model.InferenceResult;
import com.kamco.shpexporter.service.GeometryConverter;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Component;
@Component
public class GeometryConvertingRowMapper implements RowMapper<InferenceResult> {
private final GeometryConverter geometryConverter;
public GeometryConvertingRowMapper(GeometryConverter geometryConverter) {
this.geometryConverter = geometryConverter;
}
@Override
public InferenceResult mapRow(ResultSet rs, int rowNum) throws SQLException {
InferenceResult r = new InferenceResult();
r.setUid(rs.getString("uid"));
r.setMapId(rs.getString("map_id"));
r.setProbability(getDoubleOrNull(rs, "probability"));
r.setBeforeYear(getLongOrNull(rs, "before_year"));
r.setAfterYear(getLongOrNull(rs, "after_year"));
r.setBeforeC(rs.getString("before_c"));
r.setBeforeP(getDoubleOrNull(rs, "before_p"));
r.setAfterC(rs.getString("after_c"));
r.setAfterP(getDoubleOrNull(rs, "after_p"));
String wkt = rs.getString("geometry_wkt");
if (wkt != null) r.setGeometry(geometryConverter.convertWKTToJTS(wkt));
return r;
}
private Long getLongOrNull(ResultSet rs, String col) throws SQLException {
long v = rs.getLong(col);
return rs.wasNull() ? null : v;
}
private Double getDoubleOrNull(ResultSet rs, String col) throws SQLException {
double v = rs.getDouble(col);
return rs.wasNull() ? null : v;
}
}

View File

@@ -0,0 +1,65 @@
package com.kamco.shpexporter.batch.tasklet;
import com.kamco.shpexporter.config.ExporterProperties;
import java.sql.Array;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
/**
* Step 1: geometry 타입 사전 확인 후 JobExecutionContext에 저장.
* MapIdSwitchingWriter가 이 값을 읽어 SimpleFeatureType을 구성합니다.
*/
@Component
public class GeomTypeTasklet implements Tasklet {
private static final Logger log = LoggerFactory.getLogger(GeomTypeTasklet.class);
private final JdbcTemplate jdbcTemplate;
private final ExporterProperties properties;
public GeomTypeTasklet(JdbcTemplate jdbcTemplate, ExporterProperties properties) {
this.jdbcTemplate = jdbcTemplate;
this.properties = properties;
}
@Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext)
throws Exception {
List<Long> batchIds = properties.getBatchIds();
log.info("Validating geometry types for batch_ids: {}", batchIds);
List<String> geomTypes = jdbcTemplate.query(
con -> {
var ps = con.prepareStatement(
"SELECT DISTINCT ST_GeometryType(geometry) "
+ "FROM inference_results_testing "
+ "WHERE batch_id = ANY(?) AND geometry IS NOT NULL");
Array arr = con.createArrayOf("bigint", batchIds.toArray());
ps.setArray(1, arr);
return ps;
},
(rs, rowNum) -> rs.getString(1));
log.info("Detected geometry types: {}", geomTypes);
// MultiPolygon은 자동으로 Polygon으로 변환하므로 항상 Polygon으로 처리
String resolved = "Polygon";
log.info("Using geometry type: {}", resolved);
chunkContext.getStepContext()
.getStepExecution()
.getJobExecution()
.getExecutionContext()
.putString("geometryType", resolved);
return RepeatStatus.FINISHED;
}
}

View File

@@ -0,0 +1,244 @@
package com.kamco.shpexporter.batch.writer;
import com.kamco.shpexporter.config.ExporterProperties;
import com.kamco.shpexporter.model.InferenceResult;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.geotools.api.data.SimpleFeatureStore;
import org.geotools.api.data.Transaction;
import org.geotools.api.feature.simple.SimpleFeature;
import org.geotools.api.feature.simple.SimpleFeatureType;
import org.geotools.api.referencing.crs.CoordinateReferenceSystem;
import org.geotools.data.DefaultTransaction;
import org.geotools.data.collection.ListFeatureCollection;
import org.geotools.data.shapefile.ShapefileDataStore;
import org.geotools.data.shapefile.ShapefileDataStoreFactory;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
import org.locationtech.jts.geom.Polygon;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.annotation.AfterStep;
import org.springframework.batch.core.annotation.BeforeStep;
import org.springframework.batch.item.Chunk;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.ItemStreamException;
import org.springframework.batch.item.ItemStreamWriter;
import org.springframework.stereotype.Component;
/**
* map_id별 Shapefile을 생성하는 Writer.
*
* <p>데이터를 map_id, uid 순서로 읽어 map_id가 바뀔 때마다 파일을 전환합니다.
* 파티셔닝 없이 단일 JDBC 커서로 전체 데이터를 스트리밍하며, 메모리에는
* chunk 1개(기본 1000건)만 유지합니다.
*
* <p>출력 경로: {output-base-dir}/{inference-id}/{map_id}/{map_id}.shp
*/
@Component
public class MapIdSwitchingWriter implements ItemStreamWriter<InferenceResult> {
private static final Logger log = LoggerFactory.getLogger(MapIdSwitchingWriter.class);
private static final int LOG_INTERVAL = 500; // 500 map_id마다 진행 로그
private final ExporterProperties properties;
private final CoordinateReferenceSystem crs;
private final ShapefileDataStoreFactory dsFactory = new ShapefileDataStoreFactory();
private SimpleFeatureType featureType;
private SimpleFeatureBuilder featureBuilder;
// 현재 열려 있는 파일 관련 상태
private String currentMapId = null;
private ShapefileDataStore currentDataStore;
private Transaction currentTransaction;
private SimpleFeatureStore currentFeatureStore;
// 통계
private int totalFiles = 0;
private long totalRecords = 0;
private long startTimeMs;
public MapIdSwitchingWriter(ExporterProperties properties, CoordinateReferenceSystem crs) {
this.properties = properties;
this.crs = crs;
}
@BeforeStep
public void beforeStep(StepExecution stepExecution) {
String geomTypeStr = stepExecution.getJobExecution()
.getExecutionContext()
.getString("geometryType", "Polygon");
Class<?> geomClass = resolveGeometryClass(geomTypeStr);
this.featureType = buildFeatureType(geomClass);
this.featureBuilder = new SimpleFeatureBuilder(featureType);
this.startTimeMs = System.currentTimeMillis();
log.info("MapIdSwitchingWriter initialized. geometryType={}, outputBase={}/{}",
geomTypeStr, properties.getOutputBaseDir(), properties.getInferenceId());
}
@Override
public void open(ExecutionContext executionContext) throws ItemStreamException {
// beforeStep에서 초기화 완료. 별도 작업 없음.
}
@Override
public void write(Chunk<? extends InferenceResult> chunk) throws Exception {
List<SimpleFeature> buffer = new ArrayList<>();
for (InferenceResult result : chunk) {
if (result.getGeometry() == null) continue;
String mapId = result.getMapId();
if (mapId == null) continue;
// map_id가 바뀌면 버퍼를 flush하고 파일을 전환합니다
if (!mapId.equals(currentMapId)) {
if (!buffer.isEmpty()) {
currentFeatureStore.addFeatures(new ListFeatureCollection(featureType, buffer));
buffer.clear();
}
if (currentMapId != null) {
commitAndClose(currentMapId);
}
openForMapId(mapId);
currentMapId = mapId;
totalFiles++;
if (totalFiles % LOG_INTERVAL == 0) {
long elapsed = Math.max((System.currentTimeMillis() - startTimeMs) / 1000, 1);
log.info("진행: map_id {}개 완료 | 총 {}건 | 경과 {}s | 속도 {}건/s",
totalFiles, totalRecords, elapsed, totalRecords / elapsed);
}
}
buffer.add(buildFeature(result));
totalRecords++;
}
// chunk 끝 - 마지막 map_id 구간 flush
if (!buffer.isEmpty()) {
currentFeatureStore.addFeatures(new ListFeatureCollection(featureType, buffer));
}
}
@AfterStep
public ExitStatus afterStep(StepExecution stepExecution) {
if (currentMapId != null) {
commitAndClose(currentMapId);
currentMapId = null;
}
long elapsed = Math.max((System.currentTimeMillis() - startTimeMs) / 1000, 1);
log.info("=== 완료: map_id {}개, 총 {}건, 소요 {}s, 평균 {}건/s ===",
totalFiles, totalRecords, elapsed, totalRecords / elapsed);
return ExitStatus.COMPLETED;
}
@Override
public void close() throws ItemStreamException {
// afterStep에서 처리. 혹시 남은 경우 안전하게 정리.
if (currentMapId != null) {
commitAndClose(currentMapId);
currentMapId = null;
}
}
@Override
public void update(ExecutionContext executionContext) throws ItemStreamException {
executionContext.putLong("totalRecords", totalRecords);
executionContext.putInt("totalFiles", totalFiles);
}
// ─── private helpers ───────────────────────────────────────────
private void openForMapId(String mapId) throws IOException {
Path dir = Paths.get(properties.getOutputBaseDir(), properties.getInferenceId(), mapId);
Files.createDirectories(dir);
File shpFile = dir.resolve(mapId + ".shp").toFile();
Map<String, Serializable> params = new HashMap<>();
params.put("url", shpFile.toURI().toURL());
params.put("create spatial index", Boolean.FALSE);
currentDataStore = (ShapefileDataStore) dsFactory.createNewDataStore(params);
currentDataStore.createSchema(featureType);
currentTransaction = new DefaultTransaction("create-" + mapId);
String typeName = currentDataStore.getTypeNames()[0];
currentFeatureStore = (SimpleFeatureStore) currentDataStore.getFeatureSource(typeName);
currentFeatureStore.setTransaction(currentTransaction);
}
private void commitAndClose(String mapId) {
try {
currentTransaction.commit();
} catch (IOException e) {
log.error("[{}] commit 실패, rollback 시도", mapId, e);
try {
currentTransaction.rollback();
} catch (IOException ignored) {}
} finally {
try { currentTransaction.close(); } catch (IOException ignored) {}
currentDataStore.dispose();
currentTransaction = null;
currentDataStore = null;
currentFeatureStore = null;
}
}
private SimpleFeature buildFeature(InferenceResult r) {
featureBuilder.add(r.getGeometry());
featureBuilder.add(r.getUid());
featureBuilder.add(r.getMapId());
featureBuilder.add(r.getProbability() != null ? String.valueOf(r.getProbability()) : "0.0");
featureBuilder.add(r.getBeforeYear() != null ? r.getBeforeYear() : 0L);
featureBuilder.add(r.getAfterYear() != null ? r.getAfterYear() : 0L);
featureBuilder.add(r.getBeforeC());
featureBuilder.add(r.getBeforeP() != null ? String.valueOf(r.getBeforeP()) : "0.0");
featureBuilder.add(r.getAfterC());
featureBuilder.add(r.getAfterP() != null ? String.valueOf(r.getAfterP()) : "0.0");
return featureBuilder.buildFeature(null);
}
private SimpleFeatureType buildFeatureType(Class<?> geomClass) {
SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();
builder.setName("inference_results");
builder.setCRS(crs);
builder.add("the_geom", geomClass);
builder.setDefaultGeometry("the_geom");
builder.add("uid", String.class);
builder.add("map_id", String.class);
builder.add("chn_dtct_p", String.class);
builder.add("cprs_yr", Long.class);
builder.add("crtr_yr", Long.class);
builder.add("bf_cls_cd", String.class);
builder.add("bf_cls_pro", String.class);
builder.add("af_cls_cd", String.class);
builder.add("af_cls_pro", String.class);
return builder.buildFeatureType();
}
private Class<?> resolveGeometryClass(String typeStr) {
try {
String name = typeStr.replace("ST_", "");
return Class.forName("org.locationtech.jts.geom." + name);
} catch (ClassNotFoundException e) {
log.warn("알 수 없는 geometry type '{}', Polygon 사용", typeStr);
return Polygon.class;
}
}
}

View File

@@ -0,0 +1,39 @@
package com.kamco.shpexporter.config;
import java.util.List;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@Component
@ConfigurationProperties(prefix = "exporter")
public class ExporterProperties {
private String inferenceId;
private List<Long> batchIds;
private String outputBaseDir;
private String crs = "EPSG:5186";
private int chunkSize = 1000;
private int fetchSize = 1000;
private int skipLimit = 100;
public String getInferenceId() { return inferenceId; }
public void setInferenceId(String inferenceId) { this.inferenceId = inferenceId; }
public List<Long> getBatchIds() { return batchIds; }
public void setBatchIds(List<Long> batchIds) { this.batchIds = batchIds; }
public String getOutputBaseDir() { return outputBaseDir; }
public void setOutputBaseDir(String outputBaseDir) { this.outputBaseDir = outputBaseDir; }
public String getCrs() { return crs; }
public void setCrs(String crs) { this.crs = crs; }
public int getChunkSize() { return chunkSize; }
public void setChunkSize(int chunkSize) { this.chunkSize = chunkSize; }
public int getFetchSize() { return fetchSize; }
public void setFetchSize(int fetchSize) { this.fetchSize = fetchSize; }
public int getSkipLimit() { return skipLimit; }
public void setSkipLimit(int skipLimit) { this.skipLimit = skipLimit; }
}

View File

@@ -0,0 +1,47 @@
package com.kamco.shpexporter.model;
import org.locationtech.jts.geom.Geometry;
public class InferenceResult {
private String uid;
private String mapId;
private Double probability;
private Long beforeYear;
private Long afterYear;
private String beforeC;
private Double beforeP;
private String afterC;
private Double afterP;
private Geometry geometry;
public String getUid() { return uid; }
public void setUid(String uid) { this.uid = uid; }
public String getMapId() { return mapId; }
public void setMapId(String mapId) { this.mapId = mapId; }
public Double getProbability() { return probability; }
public void setProbability(Double probability) { this.probability = probability; }
public Long getBeforeYear() { return beforeYear; }
public void setBeforeYear(Long beforeYear) { this.beforeYear = beforeYear; }
public Long getAfterYear() { return afterYear; }
public void setAfterYear(Long afterYear) { this.afterYear = afterYear; }
public String getBeforeC() { return beforeC; }
public void setBeforeC(String beforeC) { this.beforeC = beforeC; }
public Double getBeforeP() { return beforeP; }
public void setBeforeP(Double beforeP) { this.beforeP = beforeP; }
public String getAfterC() { return afterC; }
public void setAfterC(String afterC) { this.afterC = afterC; }
public Double getAfterP() { return afterP; }
public void setAfterP(Double afterP) { this.afterP = afterP; }
public Geometry getGeometry() { return geometry; }
public void setGeometry(Geometry geometry) { this.geometry = geometry; }
}

View File

@@ -0,0 +1,37 @@
package com.kamco.shpexporter.service;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.MultiPolygon;
import org.locationtech.jts.geom.Polygon;
import org.locationtech.jts.io.ParseException;
import org.locationtech.jts.io.WKTReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class GeometryConverter {
private static final Logger log = LoggerFactory.getLogger(GeometryConverter.class);
private final WKTReader wktReader = new WKTReader();
public Geometry convertWKTToJTS(String wkt) {
if (wkt == null || wkt.isBlank()) return null;
try {
Geometry geom = wktReader.read(wkt);
// MultiPolygon → 첫 번째 Polygon으로 자동 변환
if (geom instanceof MultiPolygon mp) {
if (mp.getNumGeometries() == 0) return null;
geom = (Polygon) mp.getGeometryN(0);
}
return geom;
} catch (ParseException e) {
log.warn("WKT parse failed: {}", e.getMessage());
return null;
}
}
}

View File

@@ -0,0 +1,30 @@
spring:
datasource:
url: jdbc:postgresql://172.16.4.56:15432/kamco_cds
username: kamco_cds
password: kamco_cds_Q!W@E#R$
driver-class-name: org.postgresql.Driver
hikari:
maximum-pool-size: 5 # cursor 1개 + 여유분. 단일 스텝이므로 많이 필요 없음
connection-timeout: 30000
idle-timeout: 600000
max-lifetime: 1800000
exporter:
inference-id: 'D5E46F60FC40B1A8BE0CD1F3547AA6'
batch-ids:
- 252
- 253
- 257
output-base-dir: '/data/model_output/export/'
crs: 'EPSG:5186'
chunk-size: 1000
fetch-size: 1000
skip-limit: 100
logging:
level:
com.kamco.shpexporter: INFO
org.springframework: WARN
pattern:
console: '%d{yyyy-MM-dd HH:mm:ss} - %msg%n'

View File

@@ -0,0 +1,13 @@
spring:
application:
name: shp-exporter-v2
profiles:
active: prod
main:
web-application-type: none
batch:
job:
enabled: false
jdbc:
initialize-schema: always
table-prefix: BATCH_