shp 파일 생성 추가

This commit is contained in:
2025-12-26 15:42:59 +09:00
parent ef4cd48827
commit 064e4897d5
9 changed files with 381 additions and 152 deletions

View File

@@ -18,9 +18,16 @@ public class InferenceResultShpApiController {
private final InferenceResultShpService inferenceResultShpService;
@Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장")
@PostMapping
public ApiResponseDto<Void> saveInferenceResultData() {
@PostMapping("/save")
public ApiResponseDto<String> saveInferenceData() {
inferenceResultShpService.saveInferenceResultData();
return ApiResponseDto.createOK(null);
return ApiResponseDto.createOK("OK");
}
@Operation(summary = "shp 파일 생성", description = "shp 파일 생성")
@PostMapping("/create")
public ApiResponseDto<String> createShpFile() {
inferenceResultShpService.createShpFile();
return ApiResponseDto.createOK("OK");
}
}

View File

@@ -1,7 +1,6 @@
package com.kamco.cd.kamcoback.inference.dto;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import java.time.ZonedDateTime;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
@@ -13,72 +12,55 @@ public class InferenceResultShpDto {
@Setter
public static class Basic {
private Long id;
// ===== 식별 =====
private Long geoUid;
private UUID uuid;
// ===== 그룹 키 =====
private Integer stage;
private Float cdProb;
private Integer input1;
private Integer input2;
private Long mapId;
private Integer input1; // compare_yyyy
private Integer input2; // target_yyyy
// ===== 추론 결과 =====
private Float cdProb;
private String beforeClass;
private Float beforeProbability;
private String afterClass;
private Float afterProbability;
private Float area;
private ZonedDateTime createdDttm;
private ZonedDateTime updatedDttm;
// ===== 공간 정보 =====
private Geometry geometry;
private Float area;
public Basic(
Long id,
UUID uuid,
Integer stage,
Float cdProb,
Integer input1,
Integer input2,
Long mapId,
String beforeClass,
Float beforeProbability,
String afterClass,
Float afterProbability,
Float area,
ZonedDateTime createdDttm,
ZonedDateTime updatedDttm,
Geometry geometry) {
this.id = id;
this.uuid = uuid;
this.stage = stage;
this.cdProb = cdProb;
this.input1 = input1;
this.input2 = input2;
this.mapId = mapId;
this.beforeClass = beforeClass;
this.beforeProbability = beforeProbability;
this.afterClass = afterClass;
this.afterProbability = afterProbability;
this.area = area;
this.createdDttm = createdDttm;
this.updatedDttm = updatedDttm;
this.geometry = geometry;
}
/**
* Entity → DTO 변환
*/
public static Basic from(MapSheetAnalDataInferenceGeomEntity e) {
Basic d = new Basic();
public static Basic from(InferenceResultEntity e) {
return new Basic(
e.getId(),
e.getUuid(),
e.getStage(),
e.getCdProb(),
e.getInput1(),
e.getInput2(),
e.getMapId(),
e.getBeforeClass(),
e.getBeforeProbability(),
e.getAfterClass(),
e.getAfterProbability(),
e.getArea(),
e.getCreatedDttm(),
e.getUpdatedDttm(),
e.getGeometry());
d.geoUid = e.getGeoUid();
d.uuid = e.getUuid();
d.stage = e.getStage();
d.mapId = e.getMapSheetNum();
d.input1 = e.getCompareYyyy();
d.input2 = e.getTargetYyyy();
d.cdProb = e.getCdProb();
d.beforeClass = e.getClassBeforeCd();
d.beforeProbability = e.getClassBeforeProb();
d.afterClass = e.getClassAfterCd();
d.afterProbability = e.getClassAfterProb();
d.geometry = e.getGeom();
d.area = e.getArea();
return d;
}
}
}

View File

@@ -3,7 +3,7 @@ package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.inference.ShpWriter;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService.ShpKey;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
@@ -17,42 +17,66 @@ public class InferenceResultShpService {
private final InferenceResultShpCoreService coreService;
private final ShpWriter shpWriter;
/**
* inference_results -> tb_map_sheet_anal_data_inference / geom 업서트
*/
@Transactional
public void saveInferenceResultData() {
coreService.streamGrouped(
1000,
(key, entities) -> {
// Entity -> DTO
List<InferenceResultShpDto.Basic> dtoList =
entities.stream().map(InferenceResultShpDto.Basic::from).toList();
flushGroup(key, dtoList);
});
coreService.buildInferenceData();
}
/**
* 파일명 및 파일 경로
*
* @param key
* @param dtoList
* dataUid 단위로 재생성(덮어쓰기) - reset(inference false + geom 전부 false) - geom 엔티티 조회 -> dto 변환 - shp/geojson 생성 - 성공 geo_uid만 true - inference true
*/
private void flushGroup(ShpKey key, List<InferenceResultShpDto.Basic> dtoList) {
@Transactional
public void createShpFile() {
// TODO 경로 정해지면 수정해야함
// TODO 경로는 설정으로 빼는 게 좋음
String baseDir = System.getProperty("user.home") + "/export";
// 파일명 stage_input1_input2_mapId
int batchSize = 100; // 한번에 처리할 data_uid 개수
int geomLimit = 500000; // data_uid 당 최대 geom 로딩 수 (메모리/시간 보고 조절)
List<Long> dataUids = coreService.findPendingDataUids(batchSize);
for (Long dataUid : dataUids) {
// 1) 덮어쓰기 시작: 리셋
coreService.resetForRegenerate(dataUid);
// 2) 생성 대상 조회(엔티티)
List<MapSheetAnalDataInferenceGeomEntity> entities =
coreService.loadGeomEntities(dataUid, geomLimit);
if (entities.isEmpty()) {
// 실패 상태(false 유지) -> 다음 배치에서 다시 덮어쓰기로 시도
continue;
}
// 3) 엔티티 -> DTO
List<InferenceResultShpDto.Basic> dtoList =
entities.stream().map(InferenceResultShpDto.Basic::from).toList();
// 4) 파일명: stage_mapSheet_compare_target (첫 row 기준)
MapSheetAnalDataInferenceGeomEntity first = entities.get(0);
String baseName =
String.format("%d_%d_%d_%d", key.stage(), key.mapId(), key.input1(), key.input2());
String.format(
"%d_%d_%d_%d",
first.getStage(),
first.getMapSheetNum(),
first.getCompareYyyy(),
first.getTargetYyyy());
String shpBasePath = baseDir + "/shp/" + baseName; // 확장자 없이
String geoJsonPath = baseDir + "/geojson/" + baseName + ".geojson";
// shp: .shp/.shx/.dbf 생성
// 5) 파일 생성 (예외 발생 시 성공 마킹 안 됨 -> 다음에 덮어쓰기 재시도)
shpWriter.writeShp(shpBasePath, dtoList);
// geojson: .geojson 생성
shpWriter.writeGeoJson(geoJsonPath, dtoList);
// 6) 성공 마킹: geo_uid만 true
List<Long> geoUids = entities.stream().map(MapSheetAnalDataInferenceGeomEntity::getGeoUid).toList();
coreService.markSuccess(dataUid, geoUids);
}
}
}

View File

@@ -1,12 +1,9 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
import java.util.ArrayList;
import java.util.List;
import java.util.function.BiConsumer;
import lombok.RequiredArgsConstructor;
import org.hibernate.ScrollableResults;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -14,43 +11,48 @@ import org.springframework.transaction.annotation.Transactional;
@RequiredArgsConstructor
public class InferenceResultShpCoreService {
private final InferenceResultRepository inferenceResultRepository;
private final InferenceResultRepository repo;
public record ShpKey(Integer stage, Long mapId, Integer input1, Integer input2) {}
/**
* inference_results -> (inference, geom) upsert
*/
@Transactional
public void buildInferenceData() {
repo.upsertGroupsFromInferenceResults();
repo.upsertGeomsFromInferenceResults();
}
/** DB를 스트리밍하면서 그룹이 완성될 때마다 handler로 넘김 handler: (key, groupRows) */
/**
* file_created_yn = false/null 인 data_uid 목록
*/
@Transactional(readOnly = true)
public void streamGrouped(
int fetchSize, BiConsumer<ShpKey, List<InferenceResultEntity>> handler) {
ScrollableResults cursor = inferenceResultRepository.scrollAllOrdered(fetchSize);
ShpKey currentKey = null;
List<InferenceResultEntity> buffer = new ArrayList<>(2000);
try {
while (cursor.next()) {
InferenceResultEntity row = (InferenceResultEntity) cursor.get();
ShpKey key = new ShpKey(row.getStage(), row.getMapId(), row.getInput1(), row.getInput2());
// 키 변경 -> 이전 그룹 완료
if (currentKey != null && !currentKey.equals(key)) {
handler.accept(currentKey, buffer);
buffer = new ArrayList<>(2000);
public List<Long> findPendingDataUids(int limit) {
return repo.findPendingDataUids(limit);
}
currentKey = key;
buffer.add(row);
/**
* 재생성 시작: 덮어쓰기 기준(무조건 처음부터) - inference.file_created_yn = false, file_created_dttm = null - geom(file_created_yn) 전부 false 리셋
*/
@Transactional
public void resetForRegenerate(Long dataUid) {
repo.resetInferenceCreated(dataUid);
repo.resetGeomCreatedByDataUid(dataUid);
}
// 마지막 그룹
if (currentKey != null && !buffer.isEmpty()) {
handler.accept(currentKey, buffer);
/**
* 생성 대상 geom 엔티티 로드 (file_created_yn=false/null + geom not null)
*/
@Transactional(readOnly = true)
public List<MapSheetAnalDataInferenceGeomEntity> loadGeomEntities(Long dataUid, int limit) {
return repo.findGeomEntitiesByDataUid(dataUid, limit);
}
} finally {
cursor.close();
}
/**
* 성공 마킹: - 성공 geo_uid만 geom.file_created_yn=true - inference.file_created_yn=true
*/
@Transactional
public void markSuccess(Long dataUid, List<Long> geoUids) {
repo.markGeomCreatedByGeoUids(geoUids);
repo.markInferenceCreated(dataUid);
}
}

View File

@@ -161,4 +161,7 @@ public class MapSheetAnalDataInferenceEntity {
@Size(max = 100)
@Column(name = "m3", length = 100)
private String m3;
@Column(name = "file_created_dttm")
private ZonedDateTime fileCreatedDttm;
}

View File

@@ -7,7 +7,7 @@ import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
@@ -63,14 +63,14 @@ public class MapSheetAnalDataInferenceGeomEntity {
@ColumnDefault("now()")
@Column(name = "created_dttm")
private OffsetDateTime createdDttm;
private ZonedDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
@ColumnDefault("now()")
@Column(name = "updated_dttm")
private OffsetDateTime updatedDttm;
private ZonedDateTime updatedDttm;
@Column(name = "updated_uid")
private Long updatedUid;
@@ -90,7 +90,7 @@ public class MapSheetAnalDataInferenceGeomEntity {
@ColumnDefault("now()")
@Column(name = "fit_state_dttm")
private OffsetDateTime fitStateDttm;
private ZonedDateTime fitStateDttm;
@Column(name = "labeler_uid")
private Long labelerUid;
@@ -102,7 +102,7 @@ public class MapSheetAnalDataInferenceGeomEntity {
@ColumnDefault("now()")
@Column(name = "label_state_dttm")
private OffsetDateTime labelStateDttm;
private ZonedDateTime labelStateDttm;
@Column(name = "tester_uid")
private Long testerUid;
@@ -114,7 +114,7 @@ public class MapSheetAnalDataInferenceGeomEntity {
@ColumnDefault("now()")
@Column(name = "test_state_dttm")
private OffsetDateTime testStateDttm;
private ZonedDateTime testStateDttm;
@Column(name = "fit_state_cmmnt", length = Integer.MAX_VALUE)
private String fitStateCmmnt;
@@ -143,4 +143,7 @@ public class MapSheetAnalDataInferenceGeomEntity {
@Column(name = "before_geom", columnDefinition = "geometry")
private Geometry beforeGeom;
@Column(name = "file_created_dttm")
private ZonedDateTime fileCreatedDttm;
}

View File

@@ -1,7 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface InferenceResultRepository
extends JpaRepository<InferenceResultEntity, Long>, InferenceResultRepositoryCustom {}
extends JpaRepository<com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity, Long>,
InferenceResultRepositoryCustom {}

View File

@@ -1,8 +1,23 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import org.hibernate.ScrollableResults;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import java.util.List;
public interface InferenceResultRepositoryCustom {
ScrollableResults scrollAllOrdered(int fetchSize);
int upsertGroupsFromInferenceResults();
int upsertGeomsFromInferenceResults();
List<Long> findPendingDataUids(int limit);
int resetInferenceCreated(Long dataUid);
int markInferenceCreated(Long dataUid);
int resetGeomCreatedByDataUid(Long dataUid);
int markGeomCreatedByGeoUids(List<Long> geoUids);
List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(Long dataUid, int limit);
}

View File

@@ -1,13 +1,14 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import com.kamco.cd.kamcoback.postgres.entity.QInferenceResultEntity;
import com.querydsl.jpa.impl.JPAQuery;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import java.time.ZonedDateTime;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.query.Query;
import org.springframework.stereotype.Repository;
@Repository
@@ -16,20 +17,212 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
private final JPAQueryFactory queryFactory;
public ScrollableResults scrollAllOrdered(int fetchSize) {
QInferenceResultEntity e = QInferenceResultEntity.inferenceResultEntity;
@PersistenceContext
private final EntityManager em;
JPAQuery<InferenceResultEntity> q =
private final QMapSheetAnalDataInferenceEntity i =
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
private final QMapSheetAnalDataInferenceGeomEntity g =
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
// ===============================
// Upsert (Native only)
// ===============================
@Override
public int upsertGroupsFromInferenceResults() {
String sql = """
INSERT INTO tb_map_sheet_anal_data_inference (
stage,
compare_yyyy,
target_yyyy,
map_sheet_num,
created_dttm,
updated_dttm,
file_created_yn,
detecting_cnt
)
SELECT
r.stage,
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS map_sheet_num,
now() AS created_dttm,
now() AS updated_dttm,
false AS file_created_yn,
count(*) AS detecting_cnt
FROM inference_results r
GROUP BY r.stage, r.input1, r.input2, r.map_id
ON CONFLICT (stage, compare_yyyy, target_yyyy, map_sheet_num)
DO UPDATE SET
updated_dttm = now(),
detecting_cnt = EXCLUDED.detecting_cnt
""";
return em.createNativeQuery(sql).executeUpdate();
}
@Override
public int upsertGeomsFromInferenceResults() {
// class_after_prob 컬럼 매핑 오타 주의(여기 제대로 넣음)
String sql = """
INSERT INTO tb_map_sheet_anal_data_inference_geom (
uuid, stage, cd_prob, compare_yyyy, target_yyyy, map_sheet_num,
class_before_cd, class_before_prob, class_after_cd, class_after_prob,
geom, area, data_uid, created_dttm, updated_dttm,
file_created_yn
)
SELECT
x.uuid, x.stage, x.cd_prob, x.compare_yyyy, x.target_yyyy, x.map_sheet_num,
x.class_before_cd, x.class_before_prob, x.class_after_cd, x.class_after_prob,
x.geom, x.area, x.data_uid, x.created_dttm, x.updated_dttm,
false AS file_created_yn
FROM (
SELECT DISTINCT ON (r.uuid)
r.uuid,
r.stage,
r.cd_prob,
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS map_sheet_num,
r.before_class AS class_before_cd,
r.before_probability AS class_before_prob,
r.after_class AS class_after_cd,
r.after_probability AS class_after_prob,
CASE
WHEN r.geometry IS NULL THEN NULL
WHEN left(r.geometry, 2) = '01'
THEN ST_SetSRID(ST_GeomFromWKB(decode(r.geometry, 'hex')), 5186)
ELSE ST_GeomFromText(r.geometry, 5186)
END AS geom,
r.area,
di.data_uid,
r.created_dttm,
r.updated_dttm
FROM inference_results r
JOIN tb_map_sheet_anal_data_inference di
ON di.stage = r.stage
AND di.compare_yyyy = r.input1
AND di.target_yyyy = r.input2
AND di.map_sheet_num = r.map_id
ORDER BY r.uuid, r.updated_dttm DESC NULLS LAST, r.uid DESC
) x
ON CONFLICT (uuid)
DO UPDATE SET
stage = EXCLUDED.stage,
cd_prob = EXCLUDED.cd_prob,
compare_yyyy = EXCLUDED.compare_yyyy,
target_yyyy = EXCLUDED.target_yyyy,
map_sheet_num = EXCLUDED.map_sheet_num,
class_before_cd = EXCLUDED.class_before_cd,
class_before_prob = EXCLUDED.class_before_prob,
class_after_cd = EXCLUDED.class_after_cd,
class_after_prob = EXCLUDED.class_after_prob,
geom = EXCLUDED.geom,
area = EXCLUDED.area,
data_uid = EXCLUDED.data_uid,
updated_dttm = now()
""";
return em.createNativeQuery(sql).executeUpdate();
}
// ===============================
// Jobs
// ===============================
@Override
public List<Long> findPendingDataUids(int limit) {
return queryFactory
.select(i.id) // data_uid
.from(i)
.where(i.fileCreatedYn.isFalse().or(i.fileCreatedYn.isNull()))
.orderBy(i.id.asc())
.limit(limit)
.fetch();
}
// ===============================
// Reset / Mark (전부 ZonedDateTime)
// ===============================
@Override
public int resetInferenceCreated(Long dataUid) {
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.selectFrom(e)
.orderBy(e.stage.asc(), e.mapId.asc(), e.input1.asc(), e.input2.asc(), e.id.asc());
.update(i)
.set(i.fileCreatedYn, false)
.set(i.fileCreatedDttm, (ZonedDateTime) null)
.set(i.updatedDttm, now)
.where(i.id.eq(dataUid))
.execute();
}
// QueryDSL -> Hibernate Query로 unwrap 해서 커서 스트리밍
Query<?> hQuery = q.createQuery().unwrap(Query.class);
@Override
public int markInferenceCreated(Long dataUid) {
ZonedDateTime now = ZonedDateTime.now();
return hQuery
.setReadOnly(true)
.setFetchSize(fetchSize) // PostgreSQL 커서/스트리밍에 영향
.scroll(ScrollMode.FORWARD_ONLY);
return (int)
queryFactory
.update(i)
.set(i.fileCreatedYn, true)
.set(i.fileCreatedDttm, now)
.set(i.updatedDttm, now)
.where(i.id.eq(dataUid))
.execute();
}
@Override
public int resetGeomCreatedByDataUid(Long dataUid) {
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(g)
.set(g.fileCreatedYn, false)
.set(g.fileCreatedDttm, (ZonedDateTime) null)
.set(g.updatedDttm, now) // ✅ 엔티티/Q가 ZonedDateTime이면 정상
.where(g.dataUid.eq(dataUid))
.execute();
}
@Override
public int markGeomCreatedByGeoUids(List<Long> geoUids) {
if (geoUids == null || geoUids.isEmpty()) {
return 0;
}
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(g)
.set(g.fileCreatedYn, true)
.set(g.fileCreatedDttm, now)
.set(g.updatedDttm, now)
.where(g.geoUid.in(geoUids))
.execute();
}
// ===============================
// Export source (Entity only)
// ===============================
@Override
public List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(Long dataUid, int limit) {
return queryFactory
.selectFrom(g)
.where(
g.dataUid.eq(dataUid),
g.geom.isNotNull(),
g.fileCreatedYn.isFalse().or(g.fileCreatedYn.isNull()))
.orderBy(g.geoUid.asc())
.limit(limit)
.fetch();
}
}