Merge remote-tracking branch 'origin/feat/dev_251201' into feat/dev_251201

This commit is contained in:
Moon
2025-12-26 17:31:19 +09:00
26 changed files with 894 additions and 358 deletions

View File

@@ -0,0 +1,33 @@
package com.kamco.cd.kamcoback.common.utils.zip;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class CsvFileProcessor implements ZipEntryProcessor {
@Override
public boolean supports(String fileName) {
return fileName.toLowerCase().endsWith(".csv");
}
@Override
public void process(String fileName, InputStream is) throws IOException {
try (BufferedReader br = new BufferedReader(new InputStreamReader(is))) {
br.lines()
.forEach(
line -> {
String[] cols = line.split(",");
// CSV 처리
for (String col : cols) {
log.info(col); // TODO : 추후에 csv 파일 읽어서 작업 필요할 때 정의하기
}
});
}
}
}

View File

@@ -0,0 +1,73 @@
package com.kamco.cd.kamcoback.common.utils.zip;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.io.InputStream;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class JsonStreamingFileProcessor implements ZipEntryProcessor {
private final JsonFactory jsonFactory;
public JsonStreamingFileProcessor(ObjectMapper objectMapper) {
// ZipInputStream 보호용 설정
objectMapper.configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, false);
this.jsonFactory = objectMapper.getFactory();
}
@Override
public boolean supports(String fileName) {
return fileName.toLowerCase().endsWith(".json");
}
@Override
public void process(String fileName, InputStream is) throws IOException {
log.info("JSON process start: {}", fileName);
JsonParser parser = jsonFactory.createParser(is);
// JSON 구조에 상관없이 token 단위로 순회
while (parser.nextToken() != null) {
handleToken(parser);
}
log.info("JSON process end: {}", fileName);
}
private void handleToken(JsonParser parser) throws IOException {
JsonToken token = parser.currentToken();
if (token == JsonToken.FIELD_NAME) {
String fieldName = parser.getCurrentName();
// TODO: json 파일 읽어야 할 내용 정의되면 항목 확정하기
switch (fieldName) {
case "type" -> {
parser.nextToken();
String type = parser.getValueAsString();
log.info("type: {}", type);
}
case "name" -> {
parser.nextToken();
String name = parser.getValueAsString();
log.info("Name: {}", name);
}
case "features" -> {
parser.nextToken();
String features = parser.readValueAsTree().toString();
log.info("features: {}", features);
}
default -> {
parser.nextToken();
parser.skipChildren();
}
}
}
}
}

View File

@@ -0,0 +1,27 @@
package com.kamco.cd.kamcoback.common.utils.zip;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class TextFileProcessor implements ZipEntryProcessor {
@Override
public boolean supports(String fileName) {
return fileName.toLowerCase().endsWith(".txt");
}
@Override
public void process(String fileName, InputStream is) throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line;
while ((line = br.readLine()) != null) {
log.info(line); // TODO : 추후 txt 파일 읽어서 작업할 때 정의하기
}
}
}

View File

@@ -0,0 +1,11 @@
package com.kamco.cd.kamcoback.common.utils.zip;
import java.io.IOException;
import java.io.InputStream;
public interface ZipEntryProcessor {
boolean supports(String fileName);
void process(String fileName, InputStream is) throws IOException;
}

View File

@@ -0,0 +1,49 @@
package com.kamco.cd.kamcoback.common.utils.zip;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class ZipUtils {
private final List<ZipEntryProcessor> processors;
public ZipUtils(List<ZipEntryProcessor> processors) {
this.processors = processors;
}
public void processZip(InputStream zipStream) throws IOException {
try (ZipInputStream zis = new ZipInputStream(zipStream)) {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
if (entry.isDirectory()) {
continue;
}
String fileName = entry.getName();
processors.stream()
.filter(p -> p.supports(fileName))
.findFirst()
.ifPresent(
processor -> {
try {
processor.process(fileName, zis);
} catch (IOException ioe) {
throw new UncheckedIOException(ioe);
}
});
zis.closeEntry();
}
}
}
}

View File

@@ -1,8 +1,13 @@
package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.PostMapping;
@@ -18,9 +23,39 @@ public class InferenceResultShpApiController {
private final InferenceResultShpService inferenceResultShpService;
@Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장")
@PostMapping
public ApiResponseDto<Void> saveInferenceResultData() {
inferenceResultShpService.saveInferenceResultData();
return ApiResponseDto.createOK(null);
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "데이터 저장 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(implementation = InferenceResultShpDto.InferenceCntDto.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/save")
public ApiResponseDto<InferenceResultShpDto.InferenceCntDto> saveInferenceData() {
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData());
}
@Operation(summary = "shp 파일 생성", description = "shp 파일 생성")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "파일생성 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = InferenceResultShpDto.FileCntDto.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/create")
public ApiResponseDto<InferenceResultShpDto.FileCntDto> createShpFile() {
return ApiResponseDto.createOK(inferenceResultShpService.createShpFile());
}
}

View File

@@ -1,13 +0,0 @@
package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import java.util.List;
public interface ShpWriter {
// SHP (.shp/.shx/.dbf)
void writeShp(String shpBasePath, List<InferenceResultShpDto.Basic> rows);
// GeoJSON (.geojson)
void writeGeoJson(String geoJsonPath, List<InferenceResultShpDto.Basic> rows);
}

View File

@@ -1,9 +1,11 @@
package com.kamco.cd.kamcoback.inference.dto;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import java.time.ZonedDateTime;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.UUID;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.locationtech.jts.geom.Geometry;
@@ -13,72 +15,88 @@ public class InferenceResultShpDto {
@Setter
public static class Basic {
private Long id;
// ===== 식별 =====
private Long geoUid;
private UUID uuid;
// ===== 그룹 키 =====
private Integer stage;
private Float cdProb;
private Integer input1;
private Integer input2;
private Long mapId;
private Integer input1; // compare_yyyy
private Integer input2; // target_yyyy
// ===== 추론 결과 =====
private Float cdProb;
private String beforeClass;
private Float beforeProbability;
private String afterClass;
private Float afterProbability;
private Float area;
private ZonedDateTime createdDttm;
private ZonedDateTime updatedDttm;
// ===== 공간 정보 =====
private Geometry geometry;
private Float area;
public Basic(
Long id,
UUID uuid,
Integer stage,
Float cdProb,
Integer input1,
Integer input2,
Long mapId,
String beforeClass,
Float beforeProbability,
String afterClass,
Float afterProbability,
Float area,
ZonedDateTime createdDttm,
ZonedDateTime updatedDttm,
Geometry geometry) {
this.id = id;
this.uuid = uuid;
this.stage = stage;
this.cdProb = cdProb;
this.input1 = input1;
this.input2 = input2;
this.mapId = mapId;
this.beforeClass = beforeClass;
this.beforeProbability = beforeProbability;
this.afterClass = afterClass;
this.afterProbability = afterProbability;
this.area = area;
this.createdDttm = createdDttm;
this.updatedDttm = updatedDttm;
this.geometry = geometry;
}
/** Entity → DTO 변환 */
public static Basic from(MapSheetAnalDataInferenceGeomEntity e) {
Basic d = new Basic();
public static Basic from(InferenceResultEntity e) {
return new Basic(
e.getId(),
e.getUuid(),
e.getStage(),
e.getCdProb(),
e.getInput1(),
e.getInput2(),
e.getMapId(),
e.getBeforeClass(),
e.getBeforeProbability(),
e.getAfterClass(),
e.getAfterProbability(),
e.getArea(),
e.getCreatedDttm(),
e.getUpdatedDttm(),
e.getGeometry());
d.geoUid = e.getGeoUid();
d.uuid = e.getUuid();
d.stage = e.getStage();
d.mapId = e.getMapSheetNum();
d.input1 = e.getCompareYyyy();
d.input2 = e.getTargetYyyy();
d.cdProb = e.getCdProb();
d.beforeClass = e.getClassBeforeCd();
d.beforeProbability = e.getClassBeforeProb();
d.afterClass = e.getClassAfterCd();
d.afterProbability = e.getClassAfterProb();
d.geometry = e.getGeom();
d.area = e.getArea();
return d;
}
}
@Setter
@Getter
@AllArgsConstructor
@NoArgsConstructor
public static class InferenceCntDto {
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 데이터 건수", example = "120")
int inferenceCnt;
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 Geom 데이터 건수", example = "120")
int inferenceGeomCnt;
}
@Setter
@Getter
@AllArgsConstructor
@NoArgsConstructor
public static class FileCntDto {
@Schema(description = "shp 파일 생성 수 (덮어쓰기 포함)", example = "120")
private int shp;
@Schema(description = "shx 파일 생성 수 (덮어쓰기 포함)", example = "120")
private int shx;
@Schema(description = "dbf 파일 생성 수 (덮어쓰기 포함)", example = "120")
private int dbf;
@Schema(description = "prj 파일 생성 수 (덮어쓰기 포함)", example = "120")
private int prj;
@Schema(description = "geojson 파일 생성 수 (덮어쓰기 포함)", example = "120")
private int geojson;
}
}

View File

@@ -0,0 +1,17 @@
package com.kamco.cd.kamcoback.inference.dto;
public record WriteCnt(int shp, int shx, int dbf, int prj, int geojson) {
public static WriteCnt zero() {
return new WriteCnt(0, 0, 0, 0, 0);
}
public WriteCnt plus(WriteCnt o) {
return new WriteCnt(
this.shp + o.shp,
this.shx + o.shx,
this.dbf + o.dbf,
this.prj + o.prj,
this.geojson + o.geojson);
}
}

View File

@@ -4,8 +4,8 @@ import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.kamco.cd.kamcoback.inference.ShpWriter;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.dto.WriteCnt;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
@@ -56,12 +56,13 @@ public class GeoToolsShpWriter implements ShpWriter {
*
* @param shpBasePath 확장자를 제외한 SHP 파일 기본 경로
* @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록
* @return 이번 호출로 write(생성/덮어쓰기)가 수행된 파일 개수
*/
@Override
public void writeShp(String shpBasePath, List<InferenceResultShpDto.Basic> rows) {
public WriteCnt writeShp(String shpBasePath, List<InferenceResultShpDto.Basic> rows) {
if (rows == null || rows.isEmpty()) {
return;
return WriteCnt.zero();
}
// SHP는 Geometry.class를 허용하지 않으므로
@@ -86,7 +87,7 @@ public class GeoToolsShpWriter implements ShpWriter {
// FeatureType(schema) 생성
SimpleFeatureType schema = createSchema(geomType, crs);
// ShapefileDataStore 생성
// ShapefileDataStore 생성 (기존 파일이 있어도 새로 생성/overwrite 동작)
dataStore = createDataStore(shpFile, schema);
// FeatureCollection 생성
@@ -100,6 +101,9 @@ public class GeoToolsShpWriter implements ShpWriter {
log.info("SHP 생성 완료: {} ({} features)", shpFile.getAbsolutePath(), collection.size());
// 덮어쓰기 포함: 이번 호출이 정상 종료되면 4개 파일 write가 발생했다고 카운트
return new WriteCnt(1, 1, 1, 1, 0);
} catch (Exception e) {
throw new RuntimeException("SHP 생성 실패: " + shpBasePath, e);
} finally {
@@ -124,12 +128,13 @@ public class GeoToolsShpWriter implements ShpWriter {
*
* @param geoJsonPath 생성할 GeoJSON 파일의 전체 경로 (.geojson 포함)
* @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록
* @return 이번 호출로 write(생성/덮어쓰기)가 수행된 파일 개수
*/
@Override
public void writeGeoJson(String geoJsonPath, List<InferenceResultShpDto.Basic> rows) {
public WriteCnt writeGeoJson(String geoJsonPath, List<InferenceResultShpDto.Basic> rows) {
if (rows == null || rows.isEmpty()) {
return;
return WriteCnt.zero();
}
try {
@@ -167,10 +172,6 @@ public class GeoToolsShpWriter implements ShpWriter {
groupProps.put("input1", first.getInput1());
groupProps.put("input2", first.getInput2());
groupProps.put("map_id", first.getMapId());
// 학습서버 버전은 추후 추가
// groupProps.put("m1", "v1.2222.251223121212");
// groupProps.put("m2", "v2.211.251223121213");
// groupProps.put("m3", "v3.233.251223121214");
root.set("properties", groupProps);
// features 배열
@@ -231,19 +232,14 @@ public class GeoToolsShpWriter implements ShpWriter {
log.info("GeoJSON 생성 완료: {} ({} features)", geoJsonFile.getAbsolutePath(), features.size());
// 덮어쓰기 포함: 이번 호출이 정상 종료되면 geojson 1개 write로 카운트
return new WriteCnt(0, 0, 0, 0, 1);
} catch (Exception e) {
throw new RuntimeException("GeoJSON 생성 실패: " + geoJsonPath, e);
}
}
/**
* rows 목록에서 첫 번째로 발견되는 non-null Geometry를 반환한다.
*
* <p>- SHP 스키마 생성 시 geometry 타입 결정을 위해 사용된다.
*
* @param rows DTO 목록
* @return 첫 번째 non-null Geometry, 없으면 null
*/
private Geometry firstNonNullGeometry(List<InferenceResultShpDto.Basic> rows) {
for (InferenceResultShpDto.Basic r : rows) {
if (r != null && r.getGeometry() != null) {
@@ -253,15 +249,6 @@ public class GeoToolsShpWriter implements ShpWriter {
return null;
}
/**
* SHP 파일에 사용할 SimpleFeatureType(schema)를 생성한다.
*
* <p>- geometry 컬럼은 반드시 첫 번째 컬럼이어야 한다. - DBF 컬럼은 SHP 제약(컬럼명 10자, 길이 제한)을 고려한다.
*
* @param geomType geometry의 구체 타입 (Polygon, MultiPolygon 등)
* @param crs 좌표계(EPSG:5186)
* @return SimpleFeatureType
*/
private SimpleFeatureType createSchema(
Class<? extends Geometry> geomType, CoordinateReferenceSystem crs) {
SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder();
@@ -286,15 +273,6 @@ public class GeoToolsShpWriter implements ShpWriter {
return b.buildFeatureType();
}
/**
* ShapefileDataStore를 생성하고 스키마를 등록한다.
*
* <p>- DBF 파일 인코딩은 EUC-KR로 설정한다. - spatial index(.qix)를 생성한다.
*
* @param shpFile SHP 파일 객체
* @param schema SimpleFeatureType
* @return 생성된 ShapefileDataStore
*/
private ShapefileDataStore createDataStore(File shpFile, SimpleFeatureType schema)
throws Exception {
@@ -311,15 +289,6 @@ public class GeoToolsShpWriter implements ShpWriter {
return dataStore;
}
/**
* DTO 목록을 SimpleFeatureCollection으로 변환한다.
*
* <p>- DTO 1건당 Feature 1개 생성 - geometry가 null인 데이터는 제외한다.
*
* @param schema FeatureType
* @param rows DTO 목록
* @return DefaultFeatureCollection
*/
private DefaultFeatureCollection buildFeatureCollection(
SimpleFeatureType schema, List<InferenceResultShpDto.Basic> rows) {
DefaultFeatureCollection collection = new DefaultFeatureCollection();
@@ -352,12 +321,6 @@ public class GeoToolsShpWriter implements ShpWriter {
return collection;
}
/**
* FeatureCollection을 SHP 파일에 실제로 기록한다.
*
* @param dataStore ShapefileDataStore
* @param collection FeatureCollection
*/
private void writeFeatures(ShapefileDataStore dataStore, DefaultFeatureCollection collection)
throws Exception {
@@ -373,12 +336,6 @@ public class GeoToolsShpWriter implements ShpWriter {
store.getTransaction().commit();
}
/**
* SHP 좌표계 정보를 담은 .prj 파일을 생성한다.
*
* @param shpBasePath SHP 기본 경로 (확장자 제외)
* @param crs 좌표계(EPSG:5186)
*/
private void writePrjFile(String shpBasePath, CoordinateReferenceSystem crs) throws Exception {
File prjFile = new File(shpBasePath + ".prj");
@@ -387,11 +344,6 @@ public class GeoToolsShpWriter implements ShpWriter {
Files.writeString(prjFile.toPath(), crs.toWKT(), StandardCharsets.UTF_8);
}
/**
* 파일이 생성될 디렉토리가 없으면 생성한다.
*
* @param file 생성 대상 파일
*/
private void createDirectories(File file) throws Exception {
File parent = file.getParentFile();
if (parent != null) {

View File

@@ -1,9 +1,8 @@
package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.inference.ShpWriter;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.dto.WriteCnt;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService.ShpKey;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
@@ -17,42 +16,76 @@ public class InferenceResultShpService {
private final InferenceResultShpCoreService coreService;
private final ShpWriter shpWriter;
public void saveInferenceResultData() {
coreService.streamGrouped(
1000,
(key, entities) -> {
// Entity -> DTO
List<InferenceResultShpDto.Basic> dtoList =
entities.stream().map(InferenceResultShpDto.Basic::from).toList();
flushGroup(key, dtoList);
});
/** inference_results 테이블을 기준으로 분석 결과 테이블과 도형 테이블을 최신 상태로 반영한다. */
@Transactional
public InferenceResultShpDto.InferenceCntDto saveInferenceResultData() {
return coreService.buildInferenceData();
}
/**
* 파일명 및 파일 경로
* 분석 데이터 단위로 SHP / GeoJSON 파일을 생성한다.
*
* @param key
* @param dtoList
* <p>처리 흐름: 1. 파일 미생성 상태의 분석 데이터 조회 2. 재생성을 위한 상태 초기화 3. 도형 데이터 조회 4. SHP / GeoJSON 파일 생성 5. 파일
* 생성 완료 상태 반영
*
* <p>중간 실패 시 다음 실행에서 전체 재생성된다.
*/
private void flushGroup(ShpKey key, List<InferenceResultShpDto.Basic> dtoList) {
@Transactional
public InferenceResultShpDto.FileCntDto createShpFile() {
// TODO 경로 정해지면 수정해야함
String baseDir = System.getProperty("user.home") + "/export";
// 파일명 stage_input1_input2_mapId
String baseName =
String.format("%d_%d_%d_%d", key.stage(), key.mapId(), key.input1(), key.input2());
int batchSize = 100;
int geomLimit = 500_000;
String shpBasePath = baseDir + "/shp/" + baseName; // 확장자 없이
String geoJsonPath = baseDir + "/geojson/" + baseName + ".geojson";
WriteCnt total = WriteCnt.zero();
// shp: .shp/.shx/.dbf 생성
shpWriter.writeShp(shpBasePath, dtoList);
List<Long> dataUids = coreService.findPendingDataUids(batchSize);
// geojson: .geojson 생성
shpWriter.writeGeoJson(geoJsonPath, dtoList);
for (Long dataUid : dataUids) {
// 재생성을 위한 생성 상태 초기화
coreService.resetForRegenerate(dataUid);
// 도형 데이터 조회
List<InferenceResultShpDto.Basic> dtoList = coreService.loadGeomDtos(dataUid, geomLimit);
if (dtoList.isEmpty()) {
continue;
}
// 파일명 생성 (stage_mapSheet_compare_target)
InferenceResultShpDto.Basic first = dtoList.get(0);
String baseName =
String.format(
"%d_%d_%d_%d",
first.getStage(), first.getMapId(), first.getInput1(), first.getInput2());
String shpBasePath = baseDir + "/shp/" + baseName;
String geoJsonPath = baseDir + "/geojson/" + baseName + ".geojson";
try {
// 폴더 안 파일을 세지 않고, Writer가 "이번 호출에서 write한 개수"를 반환
total = total.plus(shpWriter.writeShp(shpBasePath, dtoList));
total = total.plus(shpWriter.writeGeoJson(geoJsonPath, dtoList));
// 파일 생성 완료 상태 반영
List<Long> geoUids = dtoList.stream().map(InferenceResultShpDto.Basic::getGeoUid).toList();
coreService.markSuccess(dataUid, geoUids);
} catch (Exception e) {
// 실패 시 markSuccess 하지 않음 -> 다음 실행에서 재생성
// log.warn("파일 생성 실패: dataUid={}, baseName={}", dataUid, baseName, e);
continue;
}
}
InferenceResultShpDto.FileCntDto fileCntDto = new InferenceResultShpDto.FileCntDto();
fileCntDto.setShp(total.shp());
fileCntDto.setShx(total.shx());
fileCntDto.setDbf(total.dbf());
fileCntDto.setPrj(total.prj());
fileCntDto.setGeojson(total.geojson());
return fileCntDto;
}
}

View File

@@ -0,0 +1,14 @@
package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.dto.WriteCnt;
import java.util.List;
public interface ShpWriter {
// SHP (.shp/.shx/.dbf)
WriteCnt writeShp(String shpBasePath, List<InferenceResultShpDto.Basic> rows);
// GeoJSON (.geojson)
WriteCnt writeGeoJson(String geoJsonPath, List<InferenceResultShpDto.Basic> rows);
}

View File

@@ -185,13 +185,13 @@ public class MapSheetMngApiController {
@Operation(summary = "페어 파일 업로드", description = "TFW/TIF 두 파일을 쌍으로 업로드 및 검증")
@PostMapping(value = "/upload-pair", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public ApiResponseDto<MapSheetMngDto.DmlReturn> uploadPair(
@RequestPart("tfw") MultipartFile tfwFile,
@RequestPart("tif") MultipartFile tifFile,
@RequestParam(value = "hstUid", required = false) Long hstUid,
@RequestParam(value = "overwrite", required = false) boolean overwrite) {
@RequestPart("tfw") MultipartFile tfwFile,
@RequestPart("tif") MultipartFile tifFile,
@RequestParam(value = "hstUid", required = false) Long hstUid,
@RequestParam(value = "overwrite", required = false) boolean overwrite) {
return ApiResponseDto.createOK(
mapSheetMngService.uploadPair(tfwFile, tifFile, hstUid, overwrite));
mapSheetMngService.uploadPair(tfwFile, tifFile, hstUid, overwrite));
}
@Operation(summary = "영상관리 > 파일조회", description = "영상관리 > 파일조회")
@@ -214,42 +214,41 @@ public class MapSheetMngApiController {
return ApiResponseDto.ok(mapSheetMngService.findByHstUidMapSheetFileList(hstUid));
}
@Operation(summary = "영상관리 > 파일사용설정 및 중복제거", description = "영상관리 >파일사용설정 및 중복제거(중복파일제거 및 선택파일사용설정)")
@Operation(
summary = "영상관리 > 파일사용설정 및 중복제거",
description = "영상관리 >파일사용설정 및 중복제거(중복파일제거 및 선택파일사용설정)")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "파일사용설정 처리 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Long.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
value = {
@ApiResponse(
responseCode = "201",
description = "파일사용설정 처리 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Long.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PutMapping("/update-use-mng-files")
public ApiResponseDto<MapSheetMngDto.DmlReturn> updateUseByFileUidMngFile(
@RequestParam @Valid List<Long> fileUids) {
@RequestParam @Valid List<Long> fileUids) {
return ApiResponseDto.ok(mapSheetMngService.setUseByFileUidMngFile(fileUids));
}
@Operation(summary = "폴더 조회", description = "폴더 조회 (ROOT:/app/original-images 이하로 경로입력)")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/folder-list")
public ApiResponseDto<FoldersDto> getDir(@RequestBody SrchFoldersDto srchDto) {
@@ -258,22 +257,20 @@ public class MapSheetMngApiController {
@Operation(summary = "지정폴더내 파일목록 조회", description = "지정폴더내 파일목록 조회")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/file-list")
public ApiResponseDto<FilesDto> getFiles(@RequestBody SrchFilesDto srchDto) {
return ApiResponseDto.createOK(mapSheetMngService.getFilesAll(srchDto));
}
}

View File

@@ -90,8 +90,6 @@ public class MapSheetMngDto {
@JsonFormatDttm private ZonedDateTime rgstStrtDttm;
@JsonFormatDttm private ZonedDateTime rgstEndDttm;
public String getSyncState() {
if (this.syncStateDoneCnt == 0) return "NOTYET";
@@ -130,7 +128,6 @@ public class MapSheetMngDto {
return this.syncNotPaireExecCnt + this.syncDuplicateExecCnt + this.syncFaultExecCnt;
}
public String getMngState() {
String mngState = "DONE";
@@ -153,7 +150,6 @@ public class MapSheetMngDto {
MngStateType type = Enums.fromId(MngStateType.class, enumId);
return type.getText();
}
}
@Schema(name = "ErrorSearchReq", description = "영상관리 오류데이터 검색 요청")

View File

@@ -14,7 +14,6 @@ import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngDto;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngFilesDto;
import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService;
import jakarta.validation.Valid;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -75,16 +74,16 @@ public class MapSheetMngService {
@Transactional
public DmlReturn mngDataSave(AddReq addReq) {
int execCnt = mapSheetMngCoreService.mngDataSave(addReq);
return new MapSheetMngDto.DmlReturn("success", addReq.getMngYyyy()+"년, "+execCnt+"건 생성");
return new MapSheetMngDto.DmlReturn("success", addReq.getMngYyyy() + "년, " + execCnt + "건 생성");
}
public DmlReturn updateExceptUseInference(@Valid List<Long> hstUidList) {
return mapSheetMngCoreService.updateExceptUseInference(hstUidList);
}
@Transactional
public DmlReturn uploadPair(MultipartFile tfwFile, MultipartFile tifFile, Long hstUid, Boolean overwrite) {
public DmlReturn uploadPair(
MultipartFile tfwFile, MultipartFile tifFile, Long hstUid, Boolean overwrite) {
String rootPath = syncRootDir;
String tmpPath = syncTmpDir;
@@ -102,26 +101,29 @@ public class MapSheetMngService {
return new DmlReturn("fail", "CREATE TEMP FOLDER ERROR");
}
//업로드 파일 사이즈,확장자명 체크
// 업로드 파일 사이즈,확장자명 체크
dmlReturn = this.validationFile(tfwFile, tifFile);
if( dmlReturn.getFlag().equals("fail") )return dmlReturn;
if (dmlReturn.getFlag().equals("fail")) return dmlReturn;
MngDto mngDto = mapSheetMngCoreService.findMapSheetMng(errDto.getMngYyyy());
String targetYearDir = mngDto.getMngPath();
// 중복체크
if( !overwrite ) {
dmlReturn = this.duplicateFile(errDto.getMngYyyy(), tfwFile.getOriginalFilename(), tifFile.getOriginalFilename());
if( dmlReturn.getFlag().equals("duplicate") )return dmlReturn;
if (!overwrite) {
dmlReturn =
this.duplicateFile(
errDto.getMngYyyy(), tfwFile.getOriginalFilename(), tifFile.getOriginalFilename());
if (dmlReturn.getFlag().equals("duplicate")) return dmlReturn;
}
//멀티파트 파일 tmp폴더 저장(파일형식 체크를 위해)
// 멀티파트 파일 tmp폴더 저장(파일형식 체크를 위해)
String tfwTmpPath = tmpPath + tfwFile.getOriginalFilename();
String tifTmpPath = tmpPath + tifFile.getOriginalFilename();
if(!FIleChecker.multipartSaveTo(tfwFile, tfwTmpPath))return new DmlReturn("fail", "UPLOAD ERROR");
if(!FIleChecker.multipartSaveTo(tifFile, tifTmpPath))return new DmlReturn("fail", "UPLOAD ERROR");
if (!FIleChecker.multipartSaveTo(tfwFile, tfwTmpPath))
return new DmlReturn("fail", "UPLOAD ERROR");
if (!FIleChecker.multipartSaveTo(tifFile, tifTmpPath))
return new DmlReturn("fail", "UPLOAD ERROR");
if (!FIleChecker.cmmndGdalInfo(tifTmpPath)) return new DmlReturn("fail", "TIF TYPE ERROR");
if (!FIleChecker.checkTfw(tfwTmpPath)) return new DmlReturn("fail", "TFW TYPE ERROR");
@@ -134,7 +136,6 @@ public class MapSheetMngService {
break;
}
Path tfwTmpSavePath = Paths.get(tfwTmpPath);
Path tifTmpSavePath = Paths.get(tifTmpPath);
Path tfwTargetPath = null;
@@ -201,7 +202,6 @@ public class MapSheetMngService {
return mapSheetMngCoreService.findByHstUidMapSheetFileList(hstUid);
}
@Transactional
public DmlReturn setUseByFileUidMngFile(List<Long> fileUids) {
@@ -209,7 +209,8 @@ public class MapSheetMngService {
DmlReturn dmlReturn = new DmlReturn("success", "정상처리되었습니다.");
MapSheetMngDto.SyncCheckStateReqUpdateDto reqDto = new MapSheetMngDto.SyncCheckStateReqUpdateDto();
MapSheetMngDto.SyncCheckStateReqUpdateDto reqDto =
new MapSheetMngDto.SyncCheckStateReqUpdateDto();
for (Long uid : fileUids) {
MapSheetMngDto.MngFilesDto dto = mapSheetMngCoreService.findByFileUidMapSheetFile(uid);
@@ -224,39 +225,36 @@ public class MapSheetMngService {
mapSheetMngCoreService.updateByFileUidFileState(uid, "DONE");
}
//선택제외 삭제처리
// 선택제외 삭제처리
mapSheetMngCoreService.deleteByNotInFileUidMngFile(reqDto.getHstUid(), fileUids);
//Hst(내역) 테이블 상태 업데이트
// Hst(내역) 테이블 상태 업데이트
mapSheetMngCoreService.updateByHstUidSyncCheckState(reqDto);
return new DmlReturn("success", fileUids.size() + "개 파일이 사용설정되었습니다.");
}
public DmlReturn validationFile(MultipartFile tfwFile, MultipartFile tifFile)
{
if( !FIleChecker.validationMultipart(tfwFile) )return new DmlReturn("fail", "TFW SIZE 오류");
else if( !FIleChecker.validationMultipart(tifFile) )return new DmlReturn("fail", "TFW SIZE 오류");
else if (!FIleChecker.checkExtensions(tfwFile.getOriginalFilename(), "tfw"))return new DmlReturn("fail", "TFW FILENAME ERROR");
else if (!FIleChecker.checkExtensions(tifFile.getOriginalFilename(), "tif"))return new DmlReturn("fail", "TIF FILENAME ERROR");
public DmlReturn validationFile(MultipartFile tfwFile, MultipartFile tifFile) {
if (!FIleChecker.validationMultipart(tfwFile)) return new DmlReturn("fail", "TFW SIZE 오류");
else if (!FIleChecker.validationMultipart(tifFile)) return new DmlReturn("fail", "TFW SIZE 오류");
else if (!FIleChecker.checkExtensions(tfwFile.getOriginalFilename(), "tfw"))
return new DmlReturn("fail", "TFW FILENAME ERROR");
else if (!FIleChecker.checkExtensions(tifFile.getOriginalFilename(), "tif"))
return new DmlReturn("fail", "TIF FILENAME ERROR");
return new DmlReturn("success", "파일체크");
}
public DmlReturn duplicateFile(int mngYyyy, String tfwFileName, String tifFileName)
{
public DmlReturn duplicateFile(int mngYyyy, String tfwFileName, String tifFileName) {
int tfwCnt = mapSheetMngCoreService.findByYearFileNameFileCount(mngYyyy, tfwFileName);
int tifCnt = mapSheetMngCoreService.findByYearFileNameFileCount(mngYyyy, tifFileName);
if (tfwCnt > 0 || tifCnt > 0) {
String resMsg = "";
if (tfwCnt > 0)
resMsg = tfwFileName;
if (tfwCnt > 0) resMsg = tfwFileName;
if (tifCnt > 0) {
if (tfwCnt > 0)
resMsg = resMsg + "," + tifFileName;
else
resMsg = tifFileName;
if (tfwCnt > 0) resMsg = resMsg + "," + tifFileName;
else resMsg = tifFileName;
}
return new DmlReturn("duplicate", resMsg);
}
@@ -274,8 +272,8 @@ public class MapSheetMngService {
int folderTotCnt = folderList.size();
int folderErrTotCnt =
(int)
folderList.stream().filter(dto -> dto.getIsValid().toString().equals("false")).count();
(int)
folderList.stream().filter(dto -> dto.getIsValid().toString().equals("false")).count();
return new FoldersDto(dirPath, folderTotCnt, folderErrTotCnt, folderList);
}
@@ -287,14 +285,14 @@ public class MapSheetMngService {
int endPos = srchDto.getEndPos();
List<FIleChecker.Basic> files =
FIleChecker.getFilesFromAllDepth(
srchDto.getDirPath(),
"*",
srchDto.getExtension(),
1,
srchDto.getSortType(),
startPos,
endPos);
FIleChecker.getFilesFromAllDepth(
srchDto.getDirPath(),
"*",
srchDto.getExtension(),
1,
srchDto.getSortType(),
startPos,
endPos);
int fileListPos = 0;
int fileTotCnt = files.size();

View File

@@ -1,5 +1,6 @@
package com.kamco.cd.kamcoback.model;
import com.kamco.cd.kamcoback.common.utils.zip.ZipUtils;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.model.service.ModelMngService;
@@ -10,15 +11,21 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.transaction.Transactional;
import java.io.IOException;
import java.time.LocalDate;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
@Tag(name = "모델 관리", description = "모델 관리 API")
@RequiredArgsConstructor
@@ -29,6 +36,8 @@ public class ModelMngApiController {
private final ModelMngService modelMngService;
@Autowired private ZipUtils zipUtils;
@Operation(summary = "모델관리 목록")
@GetMapping
public ApiResponseDto<Page<ModelMngDto.ModelList>> findModelMgmtList(
@@ -70,4 +79,10 @@ public class ModelMngApiController {
String modelVer) {
return ApiResponseDto.okObject(modelMngService.removeModel(modelVer));
}
@Operation(summary = "모델 zip 파일 업로드", description = "모델 zip 파일 업로드")
@PostMapping(value = "/upload/zip", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public void upload(@RequestPart MultipartFile zipFilie) throws IOException {
zipUtils.processZip(zipFilie.getInputStream());
}
}

View File

@@ -1,12 +1,10 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
import java.util.ArrayList;
import java.util.List;
import java.util.function.BiConsumer;
import lombok.RequiredArgsConstructor;
import org.hibernate.ScrollableResults;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -14,43 +12,63 @@ import org.springframework.transaction.annotation.Transactional;
@RequiredArgsConstructor
public class InferenceResultShpCoreService {
private final InferenceResultRepository inferenceResultRepository;
private final InferenceResultRepository repo;
public record ShpKey(Integer stage, Long mapId, Integer input1, Integer input2) {}
/**
* inference_results 기준으로 - tb_map_sheet_anal_data_inference -
* tb_map_sheet_anal_data_inference_geom 테이블을 최신 상태로 구성한다.
*/
@Transactional
public InferenceResultShpDto.InferenceCntDto buildInferenceData() {
int inferenceCnt = repo.upsertGroupsFromInferenceResults();
int inferenceGeomCnt = repo.upsertGeomsFromInferenceResults();
/** DB를 스트리밍하면서 그룹이 완성될 때마다 handler로 넘김 handler: (key, groupRows) */
InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto();
cntDto.setInferenceCnt(inferenceCnt);
cntDto.setInferenceGeomCnt(inferenceGeomCnt);
return cntDto;
}
/** 파일 생성이 완료되지 않은 분석 데이터(data_uid) 목록을 조회한다. */
@Transactional(readOnly = true)
public void streamGrouped(
int fetchSize, BiConsumer<ShpKey, List<InferenceResultEntity>> handler) {
public List<Long> findPendingDataUids(int limit) {
return repo.findPendingDataUids(limit);
}
ScrollableResults cursor = inferenceResultRepository.scrollAllOrdered(fetchSize);
/**
* 분석 데이터 재생성을 위해 기존 파일 생성 상태를 초기화한다.
*
* <p>- 분석 데이터(file_created_yn)를 미생성 상태로 변경 - 해당 분석 데이터에 속한 모든 도형의 생성 상태를 미생성으로 변경
*/
@Transactional
public void resetForRegenerate(Long dataUid) {
repo.resetInferenceCreated(dataUid);
repo.resetGeomCreatedByDataUid(dataUid);
}
ShpKey currentKey = null;
List<InferenceResultEntity> buffer = new ArrayList<>(2000);
/**
* 지정된 분석 데이터에 속한 도형 정보를 조회한다.
*
* <p>- 파일 미생성 상태의 도형만 대상 - geometry가 존재하는 도형만 조회
*/
@Transactional(readOnly = true)
public List<InferenceResultShpDto.Basic> loadGeomDtos(Long dataUid, int limit) {
List<MapSheetAnalDataInferenceGeomEntity> entities =
repo.findGeomEntitiesByDataUid(dataUid, limit);
try {
while (cursor.next()) {
InferenceResultEntity row = (InferenceResultEntity) cursor.get();
return entities.stream().map(InferenceResultShpDto.Basic::from).toList();
}
ShpKey key = new ShpKey(row.getStage(), row.getMapId(), row.getInput1(), row.getInput2());
// 키 변경 -> 이전 그룹 완료
if (currentKey != null && !currentKey.equals(key)) {
handler.accept(currentKey, buffer);
buffer = new ArrayList<>(2000);
}
currentKey = key;
buffer.add(row);
}
// 마지막 그룹
if (currentKey != null && !buffer.isEmpty()) {
handler.accept(currentKey, buffer);
}
} finally {
cursor.close();
}
/**
* 파일 생성이 성공한 도형 및 분석 데이터에 대해 생성 완료 상태로 갱신한다.
*
* @param dataUid 분석 데이터 UID
* @param geoUids 파일 생성이 완료된 도형 UID 목록
*/
@Transactional
public void markSuccess(Long dataUid, List<Long> geoUids) {
repo.markGeomCreatedByGeoUids(geoUids);
repo.markInferenceCreated(dataUid);
}
}

View File

@@ -6,16 +6,9 @@ import com.kamco.cd.kamcoback.postgres.entity.MapSheetMngHstEntity;
import com.kamco.cd.kamcoback.postgres.repository.mapsheet.MapSheetMngRepository;
import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.Valid;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
@@ -50,7 +43,6 @@ public class MapSheetMngCoreService {
mapSheetMngRepository.deleteByHstUidMngFile(hstUid);
}
public int findByYearFileNameFileCount(int mngYyyy, String fileName) {
return mapSheetMngRepository.findByYearFileNameFileCount(mngYyyy, fileName);
}
@@ -87,8 +79,6 @@ public class MapSheetMngCoreService {
return mapSheetMngRepository.findByFileUidMapSheetFile(fileUid);
}
public MapSheetMngDto.DmlReturn updateExceptUseInference(@Valid List<Long> hstUidList) {
if (!Objects.isNull(hstUidList) && !hstUidList.isEmpty()) {
for (Long hstUid : hstUidList) {
@@ -104,8 +94,6 @@ public class MapSheetMngCoreService {
return new MapSheetMngDto.DmlReturn("success", hstUidList.size() + "개 추론제외 업데이트 하였습니다.");
}
public int mngDataSave(@Valid MapSheetMngDto.AddReq addReq) {
MapSheetMngEntity entity = new MapSheetMngEntity();
@@ -115,7 +103,9 @@ public class MapSheetMngCoreService {
mapSheetMngRepository.deleteByMngYyyyMngAll(addReq.getMngYyyy());
MapSheetMngEntity saved = mapSheetMngRepository.save(entity);
int hstCnt = mapSheetMngRepository.insertMapSheetOrgDataToMapSheetMngHst(saved.getMngYyyy(), saved.getMngPath());
int hstCnt =
mapSheetMngRepository.insertMapSheetOrgDataToMapSheetMngHst(
saved.getMngYyyy(), saved.getMngPath());
mapSheetMngRepository.updateYearState(saved.getMngYyyy(), "DONE");
return hstCnt;
@@ -133,14 +123,11 @@ public class MapSheetMngCoreService {
mapSheetMngRepository.updateMapSheetMngHstSyncCheckState(reqDto);
}
public void updateByFileUidFileState(Long fileUid, String fileState) {
mapSheetMngRepository.updateByFileUidMngFileState(fileUid, fileState);
}
public void deleteByNotInFileUidMngFile(Long hstUid, List<Long> fileUids){
public void deleteByNotInFileUidMngFile(Long hstUid, List<Long> fileUids) {
mapSheetMngRepository.deleteByNotInFileUidMngFile(hstUid, fileUids);
}
}

View File

@@ -161,4 +161,7 @@ public class MapSheetAnalDataInferenceEntity {
@Size(max = 100)
@Column(name = "m3", length = 100)
private String m3;
@Column(name = "file_created_dttm")
private ZonedDateTime fileCreatedDttm;
}

View File

@@ -7,7 +7,7 @@ import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
@@ -63,14 +63,14 @@ public class MapSheetAnalDataInferenceGeomEntity {
@ColumnDefault("now()")
@Column(name = "created_dttm")
private OffsetDateTime createdDttm;
private ZonedDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
@ColumnDefault("now()")
@Column(name = "updated_dttm")
private OffsetDateTime updatedDttm;
private ZonedDateTime updatedDttm;
@Column(name = "updated_uid")
private Long updatedUid;
@@ -90,7 +90,7 @@ public class MapSheetAnalDataInferenceGeomEntity {
@ColumnDefault("now()")
@Column(name = "fit_state_dttm")
private OffsetDateTime fitStateDttm;
private ZonedDateTime fitStateDttm;
@Column(name = "labeler_uid")
private Long labelerUid;
@@ -102,7 +102,7 @@ public class MapSheetAnalDataInferenceGeomEntity {
@ColumnDefault("now()")
@Column(name = "label_state_dttm")
private OffsetDateTime labelStateDttm;
private ZonedDateTime labelStateDttm;
@Column(name = "tester_uid")
private Long testerUid;
@@ -114,7 +114,7 @@ public class MapSheetAnalDataInferenceGeomEntity {
@ColumnDefault("now()")
@Column(name = "test_state_dttm")
private OffsetDateTime testStateDttm;
private ZonedDateTime testStateDttm;
@Column(name = "fit_state_cmmnt", length = Integer.MAX_VALUE)
private String fitStateCmmnt;
@@ -143,4 +143,7 @@ public class MapSheetAnalDataInferenceGeomEntity {
@Column(name = "before_geom", columnDefinition = "geometry")
private Geometry beforeGeom;
@Column(name = "file_created_dttm")
private ZonedDateTime fileCreatedDttm;
}

View File

@@ -60,5 +60,4 @@ public class MapSheetMngFileEntity {
@ColumnDefault("false")
@Column(name = "file_del", nullable = false)
private Boolean fileDel = false;
}

View File

@@ -1,7 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface InferenceResultRepository
extends JpaRepository<InferenceResultEntity, Long>, InferenceResultRepositoryCustom {}
extends JpaRepository<com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity, Long>,
InferenceResultRepositoryCustom {}

View File

@@ -1,8 +1,23 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import org.hibernate.ScrollableResults;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import java.util.List;
public interface InferenceResultRepositoryCustom {
ScrollableResults scrollAllOrdered(int fetchSize);
int upsertGroupsFromInferenceResults();
int upsertGeomsFromInferenceResults();
List<Long> findPendingDataUids(int limit);
int resetInferenceCreated(Long dataUid);
int markInferenceCreated(Long dataUid);
int resetGeomCreatedByDataUid(Long dataUid);
int markGeomCreatedByGeoUids(List<Long> geoUids);
List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(Long dataUid, int limit);
}

View File

@@ -1,13 +1,14 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import com.kamco.cd.kamcoback.postgres.entity.QInferenceResultEntity;
import com.querydsl.jpa.impl.JPAQuery;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import java.time.ZonedDateTime;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.query.Query;
import org.springframework.stereotype.Repository;
@Repository
@@ -16,20 +17,268 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
private final JPAQueryFactory queryFactory;
public ScrollableResults scrollAllOrdered(int fetchSize) {
QInferenceResultEntity e = QInferenceResultEntity.inferenceResultEntity;
@PersistenceContext private final EntityManager em;
JPAQuery<InferenceResultEntity> q =
/** tb_map_sheet_anal_data_inference */
private final QMapSheetAnalDataInferenceEntity inferenceEntity =
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
/** tb_map_sheet_anal_data_inference_geom */
private final QMapSheetAnalDataInferenceGeomEntity inferenceGeomEntity =
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
// ===============================
// Upsert (Native only)
// ===============================
/**
* inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를
* 생성/갱신한다.
*
* <p>- 최초 생성 시 file_created_yn = false - detecting_cnt는 inference_results 건수 기준
*
* @return 반영된 행 수
*/
@Override
public int upsertGroupsFromInferenceResults() {
String sql =
"""
INSERT INTO tb_map_sheet_anal_data_inference (
stage,
compare_yyyy,
target_yyyy,
map_sheet_num,
created_dttm,
updated_dttm,
file_created_yn,
detecting_cnt
)
SELECT
r.stage,
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS map_sheet_num,
now() AS created_dttm,
now() AS updated_dttm,
false AS file_created_yn,
count(*) AS detecting_cnt
FROM inference_results r
GROUP BY r.stage, r.input1, r.input2, r.map_id
ON CONFLICT (stage, compare_yyyy, target_yyyy, map_sheet_num)
DO UPDATE SET
updated_dttm = now(),
detecting_cnt = EXCLUDED.detecting_cnt
""";
return em.createNativeQuery(sql).executeUpdate();
}
/**
* inference_results 테이블을 기준으로 도형 단위(uuid) 분석 결과를 생성/갱신한다.
*
* <p>- uuid 기준 중복 제거(DISTINCT ON) - 최신 updated_dttm 우선 - geometry는 WKB / WKT 모두 처리 - 최초 생성 시
* file_created_yn = false
*
* @return 반영된 행 수
*/
@Override
public int upsertGeomsFromInferenceResults() {
String sql =
"""
INSERT INTO tb_map_sheet_anal_data_inference_geom (
uuid, stage, cd_prob, compare_yyyy, target_yyyy, map_sheet_num,
class_before_cd, class_before_prob, class_after_cd, class_after_prob,
geom, area, data_uid, created_dttm, updated_dttm,
file_created_yn
)
SELECT
x.uuid, x.stage, x.cd_prob, x.compare_yyyy, x.target_yyyy, x.map_sheet_num,
x.class_before_cd, x.class_before_prob, x.class_after_cd, x.class_after_prob,
x.geom, x.area, x.data_uid, x.created_dttm, x.updated_dttm,
false AS file_created_yn
FROM (
SELECT DISTINCT ON (r.uuid)
r.uuid,
r.stage,
r.cd_prob,
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS map_sheet_num,
r.before_class AS class_before_cd,
r.before_probability AS class_before_prob,
r.after_class AS class_after_cd,
r.after_probability AS class_after_prob,
CASE
WHEN r.geometry IS NULL THEN NULL
WHEN left(r.geometry, 2) = '01'
THEN ST_SetSRID(ST_GeomFromWKB(decode(r.geometry, 'hex')), 5186)
ELSE ST_GeomFromText(r.geometry, 5186)
END AS geom,
r.area,
di.data_uid,
r.created_dttm,
r.updated_dttm
FROM inference_results r
JOIN tb_map_sheet_anal_data_inference di
ON di.stage = r.stage
AND di.compare_yyyy = r.input1
AND di.target_yyyy = r.input2
AND di.map_sheet_num = r.map_id
ORDER BY r.uuid, r.updated_dttm DESC NULLS LAST, r.uid DESC
) x
ON CONFLICT (uuid)
DO UPDATE SET
stage = EXCLUDED.stage,
cd_prob = EXCLUDED.cd_prob,
compare_yyyy = EXCLUDED.compare_yyyy,
target_yyyy = EXCLUDED.target_yyyy,
map_sheet_num = EXCLUDED.map_sheet_num,
class_before_cd = EXCLUDED.class_before_cd,
class_before_prob = EXCLUDED.class_before_prob,
class_after_cd = EXCLUDED.class_after_cd,
class_after_prob = EXCLUDED.class_after_prob,
geom = EXCLUDED.geom,
area = EXCLUDED.area,
data_uid = EXCLUDED.data_uid,
updated_dttm = now()
""";
return em.createNativeQuery(sql).executeUpdate();
}
// ===============================
// Jobs
// ===============================
/**
* 파일 생성이 완료되지 않은 분석 데이터(data_uid) 목록을 조회한다.
*
* @param limit 최대 조회 건수
* @return data_uid 목록
*/
@Override
public List<Long> findPendingDataUids(int limit) {
return queryFactory
.select(inferenceEntity.id)
.from(inferenceEntity)
.where(inferenceEntity.fileCreatedYn.isFalse().or(inferenceEntity.fileCreatedYn.isNull()))
.orderBy(inferenceEntity.id.asc())
.limit(limit)
.fetch();
}
// ===============================
// Reset / Mark
// ===============================
/**
* 분석 데이터의 파일 생성 상태를 재생성 가능 상태로 초기화한다.
*
* <p>- file_created_yn = false - file_created_dttm = null
*
* @return 갱신된 행 수
*/
@Override
public int resetInferenceCreated(Long dataUid) {
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.selectFrom(e)
.orderBy(e.stage.asc(), e.mapId.asc(), e.input1.asc(), e.input2.asc(), e.id.asc());
.update(inferenceEntity)
.set(inferenceEntity.fileCreatedYn, false)
.set(inferenceEntity.fileCreatedDttm, (ZonedDateTime) null)
.set(inferenceEntity.updatedDttm, now)
.where(inferenceEntity.id.eq(dataUid))
.execute();
}
// QueryDSL -> Hibernate Query로 unwrap 해서 커서 스트리밍
Query<?> hQuery = q.createQuery().unwrap(Query.class);
/**
* 분석 데이터의 파일 생성 완료 상태를 반영한다.
*
* @return 갱신된 행 수
*/
@Override
public int markInferenceCreated(Long dataUid) {
ZonedDateTime now = ZonedDateTime.now();
return hQuery
.setReadOnly(true)
.setFetchSize(fetchSize) // PostgreSQL 커서/스트리밍에 영향
.scroll(ScrollMode.FORWARD_ONLY);
return (int)
queryFactory
.update(inferenceEntity)
.set(inferenceEntity.fileCreatedYn, true)
.set(inferenceEntity.fileCreatedDttm, now)
.set(inferenceEntity.updatedDttm, now)
.where(inferenceEntity.id.eq(dataUid))
.execute();
}
/**
* 분석 데이터에 속한 모든 도형의 파일 생성 상태를 초기화한다.
*
* @return 갱신된 행 수
*/
@Override
public int resetGeomCreatedByDataUid(Long dataUid) {
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(inferenceGeomEntity)
.set(inferenceGeomEntity.fileCreatedYn, false)
.set(inferenceGeomEntity.fileCreatedDttm, (ZonedDateTime) null)
.set(inferenceGeomEntity.updatedDttm, now)
.where(inferenceGeomEntity.dataUid.eq(dataUid))
.execute();
}
/**
* 파일 생성이 완료된 도형(geo_uid)을 생성 완료 상태로 반영한다.
*
* @param geoUids 생성 완료된 도형 UID 목록
* @return 갱신된 행 수
*/
@Override
public int markGeomCreatedByGeoUids(List<Long> geoUids) {
if (geoUids == null || geoUids.isEmpty()) {
return 0;
}
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(inferenceGeomEntity)
.set(inferenceGeomEntity.fileCreatedYn, true)
.set(inferenceGeomEntity.fileCreatedDttm, now)
.set(inferenceGeomEntity.updatedDttm, now)
.where(inferenceGeomEntity.geoUid.in(geoUids))
.execute();
}
// ===============================
// Export source (Entity only)
// ===============================
/**
* SHP / GeoJSON 파일 생성을 위한 도형 데이터 조회
*
* <p>- 특정 분석 데이터(data_uid)에 속한 도형 - geometry 존재 - 파일 미생성 상태만 대상
*/
@Override
public List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(
Long dataUid, int limit) {
return queryFactory
.selectFrom(inferenceGeomEntity)
.where(
inferenceGeomEntity.dataUid.eq(dataUid),
inferenceGeomEntity.geom.isNotNull(),
inferenceGeomEntity
.fileCreatedYn
.isFalse()
.or(inferenceGeomEntity.fileCreatedYn.isNull()))
.orderBy(inferenceGeomEntity.geoUid.asc())
.limit(limit)
.fetch();
}
}

View File

@@ -648,8 +648,8 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
public void deleteByFileUidMngFile(Long fileUid) {
long fileCount =
queryFactory
.update(mapSheetMngFileEntity)
.set(mapSheetMngFileEntity.fileDel, true)
.update(mapSheetMngFileEntity)
.set(mapSheetMngFileEntity.fileDel, true)
.where(mapSheetMngFileEntity.fileUid.eq(fileUid))
.execute();
}
@@ -665,34 +665,41 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
}
@Override
public void deleteByNotInFileUidMngFile(Long hstUid, List<Long> fileUids){
public void deleteByNotInFileUidMngFile(Long hstUid, List<Long> fileUids) {
long execCount =
queryFactory
.update(mapSheetMngFileEntity)
.set(mapSheetMngFileEntity.fileDel, true)
.where(mapSheetMngFileEntity.hstUid.eq(hstUid)
.and(mapSheetMngFileEntity.fileUid.notIn(fileUids)))
.execute();
queryFactory
.update(mapSheetMngFileEntity)
.set(mapSheetMngFileEntity.fileDel, true)
.where(
mapSheetMngFileEntity
.hstUid
.eq(hstUid)
.and(mapSheetMngFileEntity.fileUid.notIn(fileUids)))
.execute();
}
@Override
public void updateByFileUidMngFileState(Long fileUid, String fileState) {
long execCount =
queryFactory
.update(mapSheetMngFileEntity)
.set(mapSheetMngFileEntity.fileState, fileState)
.where(mapSheetMngFileEntity.fileUid.eq(fileUid))
.execute();
queryFactory
.update(mapSheetMngFileEntity)
.set(mapSheetMngFileEntity.fileState, fileState)
.where(mapSheetMngFileEntity.fileUid.eq(fileUid))
.execute();
}
@Override
public int findByYearFileNameFileCount(int mngYyyy, String fileName){
Long execCount = queryFactory
.select(mapSheetMngFileEntity.count())
.from(mapSheetMngFileEntity)
.where(mapSheetMngFileEntity.mngYyyy.eq(mngYyyy)
.and(mapSheetMngFileEntity.fileName.eq(fileName)))
.fetchOne();
public int findByYearFileNameFileCount(int mngYyyy, String fileName) {
Long execCount =
queryFactory
.select(mapSheetMngFileEntity.count())
.from(mapSheetMngFileEntity)
.where(
mapSheetMngFileEntity
.mngYyyy
.eq(mngYyyy)
.and(mapSheetMngFileEntity.fileName.eq(fileName)))
.fetchOne();
return Math.toIntExact(execCount);
}