shp 파일 생성 추가

This commit is contained in:
2025-12-26 15:42:59 +09:00
parent ef4cd48827
commit 064e4897d5
9 changed files with 381 additions and 152 deletions

View File

@@ -1,12 +1,9 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
import java.util.ArrayList;
import java.util.List;
import java.util.function.BiConsumer;
import lombok.RequiredArgsConstructor;
import org.hibernate.ScrollableResults;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -14,43 +11,48 @@ import org.springframework.transaction.annotation.Transactional;
@RequiredArgsConstructor
public class InferenceResultShpCoreService {
private final InferenceResultRepository inferenceResultRepository;
private final InferenceResultRepository repo;
public record ShpKey(Integer stage, Long mapId, Integer input1, Integer input2) {}
/**
* inference_results -> (inference, geom) upsert
*/
@Transactional
public void buildInferenceData() {
repo.upsertGroupsFromInferenceResults();
repo.upsertGeomsFromInferenceResults();
}
/** DB를 스트리밍하면서 그룹이 완성될 때마다 handler로 넘김 handler: (key, groupRows) */
/**
* file_created_yn = false/null 인 data_uid 목록
*/
@Transactional(readOnly = true)
public void streamGrouped(
int fetchSize, BiConsumer<ShpKey, List<InferenceResultEntity>> handler) {
public List<Long> findPendingDataUids(int limit) {
return repo.findPendingDataUids(limit);
}
ScrollableResults cursor = inferenceResultRepository.scrollAllOrdered(fetchSize);
/**
* 재생성 시작: 덮어쓰기 기준(무조건 처음부터) - inference.file_created_yn = false, file_created_dttm = null - geom(file_created_yn) 전부 false 리셋
*/
@Transactional
public void resetForRegenerate(Long dataUid) {
repo.resetInferenceCreated(dataUid);
repo.resetGeomCreatedByDataUid(dataUid);
}
ShpKey currentKey = null;
List<InferenceResultEntity> buffer = new ArrayList<>(2000);
/**
* 생성 대상 geom 엔티티 로드 (file_created_yn=false/null + geom not null)
*/
@Transactional(readOnly = true)
public List<MapSheetAnalDataInferenceGeomEntity> loadGeomEntities(Long dataUid, int limit) {
return repo.findGeomEntitiesByDataUid(dataUid, limit);
}
try {
while (cursor.next()) {
InferenceResultEntity row = (InferenceResultEntity) cursor.get();
ShpKey key = new ShpKey(row.getStage(), row.getMapId(), row.getInput1(), row.getInput2());
// 키 변경 -> 이전 그룹 완료
if (currentKey != null && !currentKey.equals(key)) {
handler.accept(currentKey, buffer);
buffer = new ArrayList<>(2000);
}
currentKey = key;
buffer.add(row);
}
// 마지막 그룹
if (currentKey != null && !buffer.isEmpty()) {
handler.accept(currentKey, buffer);
}
} finally {
cursor.close();
}
/**
* 성공 마킹: - 성공 geo_uid만 geom.file_created_yn=true - inference.file_created_yn=true
*/
@Transactional
public void markSuccess(Long dataUid, List<Long> geoUids) {
repo.markGeomCreatedByGeoUids(geoUids);
repo.markInferenceCreated(dataUid);
}
}

View File

@@ -161,4 +161,7 @@ public class MapSheetAnalDataInferenceEntity {
@Size(max = 100)
@Column(name = "m3", length = 100)
private String m3;
@Column(name = "file_created_dttm")
private ZonedDateTime fileCreatedDttm;
}

View File

@@ -7,7 +7,7 @@ import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
@@ -63,14 +63,14 @@ public class MapSheetAnalDataInferenceGeomEntity {
@ColumnDefault("now()")
@Column(name = "created_dttm")
private OffsetDateTime createdDttm;
private ZonedDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
@ColumnDefault("now()")
@Column(name = "updated_dttm")
private OffsetDateTime updatedDttm;
private ZonedDateTime updatedDttm;
@Column(name = "updated_uid")
private Long updatedUid;
@@ -90,7 +90,7 @@ public class MapSheetAnalDataInferenceGeomEntity {
@ColumnDefault("now()")
@Column(name = "fit_state_dttm")
private OffsetDateTime fitStateDttm;
private ZonedDateTime fitStateDttm;
@Column(name = "labeler_uid")
private Long labelerUid;
@@ -102,7 +102,7 @@ public class MapSheetAnalDataInferenceGeomEntity {
@ColumnDefault("now()")
@Column(name = "label_state_dttm")
private OffsetDateTime labelStateDttm;
private ZonedDateTime labelStateDttm;
@Column(name = "tester_uid")
private Long testerUid;
@@ -114,7 +114,7 @@ public class MapSheetAnalDataInferenceGeomEntity {
@ColumnDefault("now()")
@Column(name = "test_state_dttm")
private OffsetDateTime testStateDttm;
private ZonedDateTime testStateDttm;
@Column(name = "fit_state_cmmnt", length = Integer.MAX_VALUE)
private String fitStateCmmnt;
@@ -143,4 +143,7 @@ public class MapSheetAnalDataInferenceGeomEntity {
@Column(name = "before_geom", columnDefinition = "geometry")
private Geometry beforeGeom;
@Column(name = "file_created_dttm")
private ZonedDateTime fileCreatedDttm;
}

View File

@@ -1,7 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface InferenceResultRepository
extends JpaRepository<InferenceResultEntity, Long>, InferenceResultRepositoryCustom {}
extends JpaRepository<com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity, Long>,
InferenceResultRepositoryCustom {}

View File

@@ -1,8 +1,23 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import org.hibernate.ScrollableResults;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import java.util.List;
public interface InferenceResultRepositoryCustom {
ScrollableResults scrollAllOrdered(int fetchSize);
int upsertGroupsFromInferenceResults();
int upsertGeomsFromInferenceResults();
List<Long> findPendingDataUids(int limit);
int resetInferenceCreated(Long dataUid);
int markInferenceCreated(Long dataUid);
int resetGeomCreatedByDataUid(Long dataUid);
int markGeomCreatedByGeoUids(List<Long> geoUids);
List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(Long dataUid, int limit);
}

View File

@@ -1,13 +1,14 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import com.kamco.cd.kamcoback.postgres.entity.QInferenceResultEntity;
import com.querydsl.jpa.impl.JPAQuery;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import java.time.ZonedDateTime;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.query.Query;
import org.springframework.stereotype.Repository;
@Repository
@@ -16,20 +17,212 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
private final JPAQueryFactory queryFactory;
public ScrollableResults scrollAllOrdered(int fetchSize) {
QInferenceResultEntity e = QInferenceResultEntity.inferenceResultEntity;
@PersistenceContext
private final EntityManager em;
JPAQuery<InferenceResultEntity> q =
queryFactory
.selectFrom(e)
.orderBy(e.stage.asc(), e.mapId.asc(), e.input1.asc(), e.input2.asc(), e.id.asc());
private final QMapSheetAnalDataInferenceEntity i =
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
// QueryDSL -> Hibernate Query로 unwrap 해서 커서 스트리밍
Query<?> hQuery = q.createQuery().unwrap(Query.class);
private final QMapSheetAnalDataInferenceGeomEntity g =
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
return hQuery
.setReadOnly(true)
.setFetchSize(fetchSize) // PostgreSQL 커서/스트리밍에 영향
.scroll(ScrollMode.FORWARD_ONLY);
// ===============================
// Upsert (Native only)
// ===============================
@Override
public int upsertGroupsFromInferenceResults() {
String sql = """
INSERT INTO tb_map_sheet_anal_data_inference (
stage,
compare_yyyy,
target_yyyy,
map_sheet_num,
created_dttm,
updated_dttm,
file_created_yn,
detecting_cnt
)
SELECT
r.stage,
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS map_sheet_num,
now() AS created_dttm,
now() AS updated_dttm,
false AS file_created_yn,
count(*) AS detecting_cnt
FROM inference_results r
GROUP BY r.stage, r.input1, r.input2, r.map_id
ON CONFLICT (stage, compare_yyyy, target_yyyy, map_sheet_num)
DO UPDATE SET
updated_dttm = now(),
detecting_cnt = EXCLUDED.detecting_cnt
""";
return em.createNativeQuery(sql).executeUpdate();
}
@Override
public int upsertGeomsFromInferenceResults() {
// class_after_prob 컬럼 매핑 오타 주의(여기 제대로 넣음)
String sql = """
INSERT INTO tb_map_sheet_anal_data_inference_geom (
uuid, stage, cd_prob, compare_yyyy, target_yyyy, map_sheet_num,
class_before_cd, class_before_prob, class_after_cd, class_after_prob,
geom, area, data_uid, created_dttm, updated_dttm,
file_created_yn
)
SELECT
x.uuid, x.stage, x.cd_prob, x.compare_yyyy, x.target_yyyy, x.map_sheet_num,
x.class_before_cd, x.class_before_prob, x.class_after_cd, x.class_after_prob,
x.geom, x.area, x.data_uid, x.created_dttm, x.updated_dttm,
false AS file_created_yn
FROM (
SELECT DISTINCT ON (r.uuid)
r.uuid,
r.stage,
r.cd_prob,
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS map_sheet_num,
r.before_class AS class_before_cd,
r.before_probability AS class_before_prob,
r.after_class AS class_after_cd,
r.after_probability AS class_after_prob,
CASE
WHEN r.geometry IS NULL THEN NULL
WHEN left(r.geometry, 2) = '01'
THEN ST_SetSRID(ST_GeomFromWKB(decode(r.geometry, 'hex')), 5186)
ELSE ST_GeomFromText(r.geometry, 5186)
END AS geom,
r.area,
di.data_uid,
r.created_dttm,
r.updated_dttm
FROM inference_results r
JOIN tb_map_sheet_anal_data_inference di
ON di.stage = r.stage
AND di.compare_yyyy = r.input1
AND di.target_yyyy = r.input2
AND di.map_sheet_num = r.map_id
ORDER BY r.uuid, r.updated_dttm DESC NULLS LAST, r.uid DESC
) x
ON CONFLICT (uuid)
DO UPDATE SET
stage = EXCLUDED.stage,
cd_prob = EXCLUDED.cd_prob,
compare_yyyy = EXCLUDED.compare_yyyy,
target_yyyy = EXCLUDED.target_yyyy,
map_sheet_num = EXCLUDED.map_sheet_num,
class_before_cd = EXCLUDED.class_before_cd,
class_before_prob = EXCLUDED.class_before_prob,
class_after_cd = EXCLUDED.class_after_cd,
class_after_prob = EXCLUDED.class_after_prob,
geom = EXCLUDED.geom,
area = EXCLUDED.area,
data_uid = EXCLUDED.data_uid,
updated_dttm = now()
""";
return em.createNativeQuery(sql).executeUpdate();
}
// ===============================
// Jobs
// ===============================
@Override
public List<Long> findPendingDataUids(int limit) {
return queryFactory
.select(i.id) // data_uid
.from(i)
.where(i.fileCreatedYn.isFalse().or(i.fileCreatedYn.isNull()))
.orderBy(i.id.asc())
.limit(limit)
.fetch();
}
// ===============================
// Reset / Mark (전부 ZonedDateTime)
// ===============================
@Override
public int resetInferenceCreated(Long dataUid) {
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(i)
.set(i.fileCreatedYn, false)
.set(i.fileCreatedDttm, (ZonedDateTime) null)
.set(i.updatedDttm, now)
.where(i.id.eq(dataUid))
.execute();
}
@Override
public int markInferenceCreated(Long dataUid) {
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(i)
.set(i.fileCreatedYn, true)
.set(i.fileCreatedDttm, now)
.set(i.updatedDttm, now)
.where(i.id.eq(dataUid))
.execute();
}
@Override
public int resetGeomCreatedByDataUid(Long dataUid) {
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(g)
.set(g.fileCreatedYn, false)
.set(g.fileCreatedDttm, (ZonedDateTime) null)
.set(g.updatedDttm, now) // ✅ 엔티티/Q가 ZonedDateTime이면 정상
.where(g.dataUid.eq(dataUid))
.execute();
}
@Override
public int markGeomCreatedByGeoUids(List<Long> geoUids) {
if (geoUids == null || geoUids.isEmpty()) {
return 0;
}
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(g)
.set(g.fileCreatedYn, true)
.set(g.fileCreatedDttm, now)
.set(g.updatedDttm, now)
.where(g.geoUid.in(geoUids))
.execute();
}
// ===============================
// Export source (Entity only)
// ===============================
@Override
public List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(Long dataUid, int limit) {
return queryFactory
.selectFrom(g)
.where(
g.dataUid.eq(dataUid),
g.geom.isNotNull(),
g.fileCreatedYn.isFalse().or(g.fileCreatedYn.isNull()))
.orderBy(g.geoUid.asc())
.limit(limit)
.fetch();
}
}