merge develop

This commit is contained in:
2025-12-29 21:03:58 +09:00
63 changed files with 3101 additions and 1389 deletions

View File

@@ -38,6 +38,9 @@ dependencies {
//geometry
implementation 'com.fasterxml.jackson.core:jackson-databind'
implementation "org.geotools:gt-shapefile:30.0"
implementation "org.geotools:gt-referencing:30.0"
implementation "org.geotools:gt-geojson:30.0"
implementation 'org.locationtech.jts.io:jts-io-common:1.20.0'
implementation 'org.locationtech.jts:jts-core:1.19.0'
implementation 'org.hibernate:hibernate-spatial:6.2.7.Final'

View File

@@ -1,22 +0,0 @@
package com.kamco.cd.kamcoback.auth;
import java.security.SecureRandom;
import java.util.Base64;
public class BCryptSaltGenerator {
public static String generateSaltWithEmployeeNo(String employeeNo) {
// bcrypt salt는 16바이트(128비트) 필요
byte[] randomBytes = new byte[16];
new SecureRandom().nextBytes(randomBytes);
String base64 = Base64.getEncoder().encodeToString(randomBytes);
// 사번을 포함 (22자 제한 → 잘라내기)
String mixedSalt = (employeeNo + base64).substring(0, 22);
// bcrypt 포맷에 맞게 구성
return "$2a$10$" + mixedSalt;
}
}

View File

@@ -9,10 +9,10 @@ import lombok.Getter;
@Getter
@AllArgsConstructor
public enum MngStateType implements EnumType {
NOTYET("미처리"),
PROCESSING("진행중"),
DONE("싱크완"),
COMPLETE("작업완료");
NOTYET("동기화 시작"),
PROCESSING("데이터 체크"),
DONE("동기화 작업 종"),
TAKINGERROR("오류 데이터 처리중");
private final String desc;

View File

@@ -1,6 +1,7 @@
package com.kamco.cd.kamcoback.common.enums;
import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose;
import com.kamco.cd.kamcoback.common.utils.enums.CodeHidden;
import com.kamco.cd.kamcoback.common.utils.enums.EnumType;
import lombok.AllArgsConstructor;
import lombok.Getter;
@@ -9,12 +10,13 @@ import lombok.Getter;
@Getter
@AllArgsConstructor
public enum SyncStateType implements EnumType {
@CodeHidden
NOTYET("미처리"),
NOFILE("파일없음"),
NOTPAIR("페어파일누락"),
DUPLICATE("파일중복"),
SIZEERROR("파일용량오류"),
TYPEERROR("파일형식오류"),
TYPEERROR("손상파일"),
@CodeHidden
DONE("완료");
private final String desc;

View File

@@ -1,11 +1,12 @@
package com.kamco.cd.kamcoback.common.utils;
import com.kamco.cd.kamcoback.auth.BCryptSaltGenerator;
import java.util.regex.Pattern;
import org.mindrot.jbcrypt.BCrypt;
public class CommonStringUtils {
private static final int BCRYPT_COST = 10;
/**
* 영문, 숫자, 특수문자를 모두 포함하여 8~20자 이내의 비밀번호
*
@@ -22,11 +23,12 @@ public class CommonStringUtils {
* 패스워드 암호화
*
* @param password 암호화 필요한 패스워드
* @param employeeNo salt 생성에 필요한 사원번호
* @return
*/
public static String hashPassword(String password, String employeeNo) {
String salt = BCryptSaltGenerator.generateSaltWithEmployeeNo(employeeNo.trim());
return BCrypt.hashpw(password.trim(), salt);
public static String hashPassword(String password) {
if (password == null) {
throw new IllegalArgumentException("password must not be null");
}
return BCrypt.hashpw(password.trim(), BCrypt.gensalt(BCRYPT_COST));
}
}

View File

@@ -29,6 +29,7 @@ import lombok.Getter;
import org.apache.commons.io.FilenameUtils;
import org.geotools.coverage.grid.GridCoverage2D;
import org.geotools.gce.geotiff.GeoTiffReader;
import org.springframework.web.multipart.MultipartFile;
public class FIleChecker {
@@ -455,6 +456,15 @@ public class FIleChecker {
return FIleChecker.getFilesFromAllDepth(dir, targetFileNm, extension, 100, "name", 0, 100);
}
public static int getFileCountFromAllDepth(String dir, String targetFileNm, String extension) {
List<FIleChecker.Basic> basicList =
FIleChecker.getFilesFromAllDepth(dir, targetFileNm, extension);
return (int)
basicList.stream().filter(dto -> dto.getExtension().toString().equals(extension)).count();
}
public static Long getFileTotSize(List<FIleChecker.Basic> files) {
Long fileTotSize = 0L;
@@ -465,6 +475,29 @@ public class FIleChecker {
return fileTotSize;
}
public static boolean multipartSaveTo(MultipartFile mfile, String targetPath) {
Path tmpSavePath = Paths.get(targetPath);
boolean fileUpload = true;
try {
mfile.transferTo(tmpSavePath);
} catch (IOException e) {
// throw new RuntimeException(e);
return false;
}
return true;
}
public static boolean validationMultipart(MultipartFile mfile) {
// 파일 유효성 검증
if (mfile == null || mfile.isEmpty() || mfile.getSize() == 0) {
return false;
}
return true;
}
public static boolean checkExtensions(String fileName, String ext) {
if (fileName == null) return false;

View File

@@ -0,0 +1,10 @@
package com.kamco.cd.kamcoback.common.utils.enums;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface CodeHidden {}

View File

@@ -35,10 +35,19 @@ public class Enums {
return Arrays.stream(enums)
.map(e -> (EnumType) e)
.filter(e -> !isHidden(enumClass, (Enum<?>) e))
.map(e -> new CodeDto(e.getId(), e.getText()))
.toList();
}
private static boolean isHidden(Class<? extends Enum<?>> enumClass, Enum<?> e) {
try {
return enumClass.getField(e.name()).isAnnotationPresent(CodeHidden.class);
} catch (NoSuchFieldException ex) {
return false;
}
}
/** 특정 타입(enum)만 조회 /codes/{type} -> type = RoleType 같은 값 */
public static List<CodeDto> getCodes(String type) {
Class<? extends Enum<?>> enumClass = exposedEnumMap.get(type);

View File

@@ -0,0 +1,33 @@
package com.kamco.cd.kamcoback.common.utils.zip;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class CsvFileProcessor implements ZipEntryProcessor {
@Override
public boolean supports(String fileName) {
return fileName.toLowerCase().endsWith(".csv");
}
@Override
public void process(String fileName, InputStream is) throws IOException {
try (BufferedReader br = new BufferedReader(new InputStreamReader(is))) {
br.lines()
.forEach(
line -> {
String[] cols = line.split(",");
// CSV 처리
for (String col : cols) {
log.info(col); // TODO : 추후에 csv 파일 읽어서 작업 필요할 때 정의하기
}
});
}
}
}

View File

@@ -0,0 +1,73 @@
package com.kamco.cd.kamcoback.common.utils.zip;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.io.InputStream;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class JsonStreamingFileProcessor implements ZipEntryProcessor {
private final JsonFactory jsonFactory;
public JsonStreamingFileProcessor(ObjectMapper objectMapper) {
// ZipInputStream 보호용 설정
objectMapper.configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, false);
this.jsonFactory = objectMapper.getFactory();
}
@Override
public boolean supports(String fileName) {
return fileName.toLowerCase().endsWith(".json");
}
@Override
public void process(String fileName, InputStream is) throws IOException {
log.info("JSON process start: {}", fileName);
JsonParser parser = jsonFactory.createParser(is);
// JSON 구조에 상관없이 token 단위로 순회
while (parser.nextToken() != null) {
handleToken(parser);
}
log.info("JSON process end: {}", fileName);
}
private void handleToken(JsonParser parser) throws IOException {
JsonToken token = parser.currentToken();
if (token == JsonToken.FIELD_NAME) {
String fieldName = parser.getCurrentName();
// TODO: json 파일 읽어야 할 내용 정의되면 항목 확정하기
switch (fieldName) {
case "type" -> {
parser.nextToken();
String type = parser.getValueAsString();
log.info("type: {}", type);
}
case "name" -> {
parser.nextToken();
String name = parser.getValueAsString();
log.info("Name: {}", name);
}
case "features" -> {
parser.nextToken();
String features = parser.readValueAsTree().toString();
log.info("features: {}", features);
}
default -> {
parser.nextToken();
parser.skipChildren();
}
}
}
}
}

View File

@@ -0,0 +1,27 @@
package com.kamco.cd.kamcoback.common.utils.zip;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class TextFileProcessor implements ZipEntryProcessor {
@Override
public boolean supports(String fileName) {
return fileName.toLowerCase().endsWith(".txt");
}
@Override
public void process(String fileName, InputStream is) throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line;
while ((line = br.readLine()) != null) {
log.info(line); // TODO : 추후 txt 파일 읽어서 작업할 때 정의하기
}
}
}

View File

@@ -0,0 +1,11 @@
package com.kamco.cd.kamcoback.common.utils.zip;
import java.io.IOException;
import java.io.InputStream;
public interface ZipEntryProcessor {
boolean supports(String fileName);
void process(String fileName, InputStream is) throws IOException;
}

View File

@@ -0,0 +1,49 @@
package com.kamco.cd.kamcoback.common.utils.zip;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class ZipUtils {
private final List<ZipEntryProcessor> processors;
public ZipUtils(List<ZipEntryProcessor> processors) {
this.processors = processors;
}
public void processZip(InputStream zipStream) throws IOException {
try (ZipInputStream zis = new ZipInputStream(zipStream)) {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
if (entry.isDirectory()) {
continue;
}
String fileName = entry.getName();
processors.stream()
.filter(p -> p.supports(fileName))
.findFirst()
.ifPresent(
processor -> {
try {
processor.process(fileName, zis);
} catch (IOException ioe) {
throw new UncheckedIOException(ioe);
}
});
zis.closeEntry();
}
}
}
}

View File

@@ -1,22 +0,0 @@
package com.kamco.cd.kamcoback.config;
import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
/** GeoJSON 파일 모니터링 설정 */
@Component
@ConfigurationProperties(prefix = "file.config")
@Getter
@Setter
public class FileConfig {
// private String rootSyncDir = "D:\\app\\original-images\\";
// private String tmpSyncDir = rootSyncDir + "tmp\\";
private String rootSyncDir = "/app/original-images/";
private String tmpSyncDir = rootSyncDir + "tmp/";
private String syncFileExt = "tfw,tif";
}

View File

@@ -46,11 +46,18 @@ public class OpenApiConfig {
// profile 별 server url 분기
List<Server> servers = new ArrayList<>();
switch (profile) {
case "prod" -> servers.add(new Server().url(prodUrl).description("운영 서버"));
case "dev" -> servers.add(new Server().url(devUrl).description("개발 서버"));
default ->
servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 개발 서버"));
if ("dev".equals(profile)) {
servers.add(new Server().url(devUrl).description("개발 서버"));
servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버"));
// servers.add(new Server().url(prodUrl).description("운영 서버"));
} else if ("prod".equals(profile)) {
// servers.add(new Server().url(prodUrl).description("운영 서버"));
servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버"));
servers.add(new Server().url(devUrl).description("개발 서버"));
} else {
servers.add(new Server().url("http://localhost:" + serverPort).description("로컬 서버"));
servers.add(new Server().url(devUrl).description("개발 서버"));
// servers.add(new Server().url(prodUrl).description("운영 서버"));
}
return new OpenAPI()

View File

@@ -2,6 +2,7 @@ package com.kamco.cd.kamcoback.config;
import com.kamco.cd.kamcoback.auth.CustomAuthenticationProvider;
import com.kamco.cd.kamcoback.auth.JwtAuthenticationFilter;
import com.kamco.cd.kamcoback.auth.MenuAuthorizationManager;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.context.annotation.Bean;
@@ -28,6 +29,7 @@ public class SecurityConfig {
private final JwtAuthenticationFilter jwtAuthenticationFilter;
private final CustomAuthenticationProvider customAuthenticationProvider;
private final MenuAuthorizationManager menuAuthorizationManager;
@Bean
public SecurityFilterChain securityFilterChain(HttpSecurity http) throws Exception {

View File

@@ -0,0 +1,61 @@
package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "추론결과 데이터 생성", description = "추론결과 데이터 생성 API")
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/inference/shp")
public class InferenceResultShpApiController {
private final InferenceResultShpService inferenceResultShpService;
@Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "데이터 저장 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(implementation = InferenceResultShpDto.InferenceCntDto.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/save")
public ApiResponseDto<InferenceResultShpDto.InferenceCntDto> saveInferenceData() {
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData());
}
@Operation(summary = "shp 파일 생성", description = "shp 파일 생성")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "파일생성 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = InferenceResultShpDto.FileCntDto.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/create")
public ApiResponseDto<InferenceResultShpDto.FileCntDto> createShpFile() {
return ApiResponseDto.createOK(inferenceResultShpService.createShpFile());
}
}

View File

@@ -0,0 +1,105 @@
package com.kamco.cd.kamcoback.inference.dto;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.UUID;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.locationtech.jts.geom.Geometry;
public class InferenceResultShpDto {
@Getter
@Setter
public static class Basic {
// ===== 식별 =====
private Long geoUid;
private UUID uuid;
// ===== 그룹 키 =====
private Integer stage;
private Long mapId;
private Integer input1; // compare_yyyy
private Integer input2; // target_yyyy
// ===== 추론 결과 =====
private Float cdProb;
private String beforeClass;
private Float beforeProbability;
private String afterClass;
private Float afterProbability;
// ===== 공간 정보 =====
private Geometry geometry;
private Float area;
/** Entity → DTO 변환 */
public static Basic from(MapSheetAnalDataInferenceGeomEntity e) {
Basic d = new Basic();
d.geoUid = e.getGeoUid();
d.uuid = e.getUuid();
d.stage = e.getStage();
d.mapId = e.getMapSheetNum();
d.input1 = e.getCompareYyyy();
d.input2 = e.getTargetYyyy();
d.cdProb = e.getCdProb();
d.beforeClass = e.getClassBeforeCd();
d.beforeProbability = e.getClassBeforeProb();
d.afterClass = e.getClassAfterCd();
d.afterProbability = e.getClassAfterProb();
d.geometry = e.getGeom();
d.area = e.getArea();
return d;
}
}
@Setter
@Getter
@AllArgsConstructor
@NoArgsConstructor
public static class InferenceCntDto {
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 목록 저장 터이터 건수", example = "120")
int sheetAnalDataCnt;
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 데이터 건수", example = "120")
int inferenceCnt;
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 Geom 데이터 건수", example = "120")
int inferenceGeomCnt;
}
@Setter
@Getter
@AllArgsConstructor
@NoArgsConstructor
public static class FileCntDto {
@Schema(description = "shp 파일 생성 수 (덮어쓰기 포함)", example = "120")
private int shp;
@Schema(description = "shx 파일 생성 수 (덮어쓰기 포함)", example = "120")
private int shx;
@Schema(description = "dbf 파일 생성 수 (덮어쓰기 포함)", example = "120")
private int dbf;
@Schema(description = "prj 파일 생성 수 (덮어쓰기 포함)", example = "120")
private int prj;
@Schema(description = "geojson 파일 생성 수 (덮어쓰기 포함)", example = "120")
private int geojson;
}
}

View File

@@ -0,0 +1,17 @@
package com.kamco.cd.kamcoback.inference.dto;
public record WriteCnt(int shp, int shx, int dbf, int prj, int geojson) {
public static WriteCnt zero() {
return new WriteCnt(0, 0, 0, 0, 0);
}
public WriteCnt plus(WriteCnt o) {
return new WriteCnt(
this.shp + o.shp,
this.shx + o.shx,
this.dbf + o.dbf,
this.prj + o.prj,
this.geojson + o.geojson);
}
}

View File

@@ -0,0 +1,364 @@
package com.kamco.cd.kamcoback.inference.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.dto.WriteCnt;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.Serializable;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.geotools.api.data.SimpleFeatureSource;
import org.geotools.api.data.SimpleFeatureStore;
import org.geotools.api.data.Transaction;
import org.geotools.api.feature.simple.SimpleFeature;
import org.geotools.api.feature.simple.SimpleFeatureType;
import org.geotools.api.referencing.crs.CoordinateReferenceSystem;
import org.geotools.data.shapefile.ShapefileDataStore;
import org.geotools.data.shapefile.ShapefileDataStoreFactory;
import org.geotools.feature.DefaultFeatureCollection;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
import org.geotools.geojson.geom.GeometryJSON;
import org.geotools.referencing.CRS;
import org.locationtech.jts.geom.Geometry;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class GeoToolsShpWriter implements ShpWriter {
// DBF 파일 한글 깨짐 방지를 위해 EUC-KR 사용
private static final Charset DBF_CHARSET = Charset.forName("EUC-KR");
// GeoJSON 출력은 UTF-8
private static final Charset GEOJSON_CHARSET = StandardCharsets.UTF_8;
// 좌표계: Korea 2000 / Central Belt 2010
private static final String EPSG_5186 = "EPSG:5186";
/**
* SHP 파일(.shp/.shx/.dbf/.prj)을 생성한다.
*
* <p>- shpBasePath를 기준으로 파일을 생성한다. 예) /Users/kim/export/shp/1_map_2021_2022 → 1_map_2021_2022.shp
* → 1_map_2021_2022.shx → 1_map_2021_2022.dbf → 1_map_2021_2022.prj
*
* <p>- geometry 타입은 첫 번째 유효 geometry 기준으로 스키마를 생성한다. - 좌표계는 EPSG:5186으로 설정하며, .prj 파일을 직접 생성한다.
*
* @param shpBasePath 확장자를 제외한 SHP 파일 기본 경로
* @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록
* @return 이번 호출로 write(생성/덮어쓰기)가 수행된 파일 개수
*/
@Override
public WriteCnt writeShp(String shpBasePath, List<InferenceResultShpDto.Basic> rows) {
if (rows == null || rows.isEmpty()) {
return WriteCnt.zero();
}
// SHP는 Geometry.class를 허용하지 않으므로
// 첫 번째 유효 geometry의 "구체 타입"을 기준으로 스키마를 생성한다.
Geometry firstGeom = firstNonNullGeometry(rows);
if (firstGeom == null) {
throw new IllegalArgumentException("SHP 생성 실패: geometry가 전부 null 입니다. path=" + shpBasePath);
}
@SuppressWarnings("unchecked")
Class<? extends Geometry> geomType = (Class<? extends Geometry>) firstGeom.getClass();
ShapefileDataStore dataStore = null;
try {
File shpFile = new File(shpBasePath + ".shp");
createDirectories(shpFile);
// EPSG:5186 CRS 로딩
CoordinateReferenceSystem crs = CRS.decode(EPSG_5186, false);
// FeatureType(schema) 생성
SimpleFeatureType schema = createSchema(geomType, crs);
// ShapefileDataStore 생성 (기존 파일이 있어도 새로 생성/overwrite 동작)
dataStore = createDataStore(shpFile, schema);
// FeatureCollection 생성
DefaultFeatureCollection collection = buildFeatureCollection(schema, rows);
// 실제 SHP 파일에 feature 쓰기
writeFeatures(dataStore, collection);
// .prj 파일 직접 생성 (EPSG:5186)
writePrjFile(shpBasePath, crs);
log.info("SHP 생성 완료: {} ({} features)", shpFile.getAbsolutePath(), collection.size());
// 덮어쓰기 포함: 이번 호출이 정상 종료되면 4개 파일 write가 발생했다고 카운트
return new WriteCnt(1, 1, 1, 1, 0);
} catch (Exception e) {
throw new RuntimeException("SHP 생성 실패: " + shpBasePath, e);
} finally {
if (dataStore != null) {
try {
dataStore.dispose();
} catch (Exception ignore) {
}
}
}
}
/**
* GeoJSON 파일(.geojson)을 생성한다.
*
* <p>- FeatureCollection 형태로 출력한다. - 최상단에 name / crs / properties를 포함한다. - 각 Feature는 polygon 단위로
* 생성된다. - geometry는 GeoTools GeometryJSON을 사용하여 직렬화한다.
*
* <p>GeoJSON 구조 예: { "type": "FeatureCollection", "name": "stage_input1_input2_mapId", "crs": {
* "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::5186" } }, "properties": { ...
* }, "features": [ ... ] }
*
* @param geoJsonPath 생성할 GeoJSON 파일의 전체 경로 (.geojson 포함)
* @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록
* @return 이번 호출로 write(생성/덮어쓰기)가 수행된 파일 개수
*/
@Override
public WriteCnt writeGeoJson(String geoJsonPath, List<InferenceResultShpDto.Basic> rows) {
if (rows == null || rows.isEmpty()) {
return WriteCnt.zero();
}
try {
File geoJsonFile = new File(geoJsonPath);
createDirectories(geoJsonFile);
// 그룹 공통 메타 정보는 첫 row 기준
InferenceResultShpDto.Basic first = rows.get(0);
ObjectMapper om = new ObjectMapper();
GeometryJSON gj = new GeometryJSON(15);
// FeatureCollection 루트
ObjectNode root = om.createObjectNode();
root.put("type", "FeatureCollection");
// name: stage_input1_input2_mapId
String name =
String.format(
"%d_%d_%d_%d",
first.getStage(), first.getInput1(), first.getInput2(), first.getMapId());
root.put("name", name);
// CRS (EPSG:5186)
ObjectNode crs = om.createObjectNode();
crs.put("type", "name");
ObjectNode crsProps = om.createObjectNode();
crsProps.put("name", "urn:ogc:def:crs:EPSG::5186");
crs.set("properties", crsProps);
root.set("crs", crs);
// 그룹 공통 properties
ObjectNode groupProps = om.createObjectNode();
groupProps.put("stage", first.getStage());
groupProps.put("input1", first.getInput1());
groupProps.put("input2", first.getInput2());
groupProps.put("map_id", first.getMapId());
root.set("properties", groupProps);
// features 배열
ArrayNode features = om.createArrayNode();
for (InferenceResultShpDto.Basic dto : rows) {
if (dto.getGeometry() == null) {
continue;
}
ObjectNode feature = om.createObjectNode();
feature.put("type", "Feature");
// feature properties
ObjectNode p = om.createObjectNode();
p.put("polygon_id", dto.getUuid() != null ? dto.getUuid().toString() : null);
if (dto.getCdProb() != null) {
p.put("cd_prob", dto.getCdProb());
}
if (dto.getInput1() != null) {
p.put("input1", dto.getInput1());
}
if (dto.getInput2() != null) {
p.put("input2", dto.getInput2());
}
if (dto.getMapId() != null) {
p.put("map_id", dto.getMapId());
}
if (dto.getArea() != null) {
p.put("area", dto.getArea());
}
p.put("before_c", dto.getBeforeClass());
if (dto.getBeforeProbability() != null) {
p.put("before_p", dto.getBeforeProbability());
}
p.put("after_c", dto.getAfterClass());
if (dto.getAfterProbability() != null) {
p.put("after_p", dto.getAfterProbability());
}
feature.set("properties", p);
// geometry
String geomJson = gj.toString(dto.getGeometry());
JsonNode geomNode = om.readTree(geomJson);
feature.set("geometry", geomNode);
features.add(feature);
}
root.set("features", features);
// 파일 쓰기
try (OutputStreamWriter w =
new OutputStreamWriter(new FileOutputStream(geoJsonFile), GEOJSON_CHARSET)) {
om.writerWithDefaultPrettyPrinter().writeValue(w, root);
}
log.info("GeoJSON 생성 완료: {} ({} features)", geoJsonFile.getAbsolutePath(), features.size());
// 덮어쓰기 포함: 이번 호출이 정상 종료되면 geojson 1개 write로 카운트
return new WriteCnt(0, 0, 0, 0, 1);
} catch (Exception e) {
throw new RuntimeException("GeoJSON 생성 실패: " + geoJsonPath, e);
}
}
private Geometry firstNonNullGeometry(List<InferenceResultShpDto.Basic> rows) {
for (InferenceResultShpDto.Basic r : rows) {
if (r != null && r.getGeometry() != null) {
return r.getGeometry();
}
}
return null;
}
private SimpleFeatureType createSchema(
Class<? extends Geometry> geomType, CoordinateReferenceSystem crs) {
SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder();
b.setName("inference_result");
b.setCRS(crs);
// geometry는 반드시 첫 컬럼
b.add("the_geom", geomType);
// DBF 컬럼 정의 (10자 제한 고려)
b.length(36).add("poly_id", String.class);
b.add("cd_prob", Double.class);
b.add("input1", Integer.class);
b.add("input2", Integer.class);
b.add("map_id", Long.class);
b.add("area", Double.class);
b.length(20).add("before_c", String.class);
b.add("before_p", Double.class);
b.length(20).add("after_c", String.class);
b.add("after_p", Double.class);
return b.buildFeatureType();
}
/**
* .shp .shx .dbf .fix 파일 생성 (껍데기 생성)
*
* @param shpFile
* @param schema
* @return
* @throws Exception
*/
private ShapefileDataStore createDataStore(File shpFile, SimpleFeatureType schema)
throws Exception {
Map<String, Serializable> params = new HashMap<>();
params.put("url", shpFile.toURI().toURL());
// .fix 파일 생성 Boolean.TRUE, 미생성 Boolean.FALSE
params.put("create spatial index", Boolean.FALSE);
ShapefileDataStoreFactory factory = new ShapefileDataStoreFactory();
ShapefileDataStore dataStore = (ShapefileDataStore) factory.createNewDataStore(params);
dataStore.setCharset(DBF_CHARSET);
dataStore.createSchema(schema);
return dataStore;
}
private DefaultFeatureCollection buildFeatureCollection(
SimpleFeatureType schema, List<InferenceResultShpDto.Basic> rows) {
DefaultFeatureCollection collection = new DefaultFeatureCollection();
SimpleFeatureBuilder builder = new SimpleFeatureBuilder(schema);
for (InferenceResultShpDto.Basic dto : rows) {
if (dto == null || dto.getGeometry() == null) {
continue;
}
builder.add(dto.getGeometry());
builder.add(dto.getUuid() != null ? dto.getUuid().toString() : null);
builder.add(dto.getCdProb() != null ? dto.getCdProb().doubleValue() : null);
builder.add(dto.getInput1());
builder.add(dto.getInput2());
builder.add(dto.getMapId());
builder.add(dto.getArea() != null ? dto.getArea().doubleValue() : null);
builder.add(dto.getBeforeClass());
builder.add(
dto.getBeforeProbability() != null ? dto.getBeforeProbability().doubleValue() : null);
builder.add(dto.getAfterClass());
builder.add(
dto.getAfterProbability() != null ? dto.getAfterProbability().doubleValue() : null);
SimpleFeature feature = builder.buildFeature(null);
collection.add(feature);
builder.reset();
}
return collection;
}
private void writeFeatures(ShapefileDataStore dataStore, DefaultFeatureCollection collection)
throws Exception {
String typeName = dataStore.getTypeNames()[0];
SimpleFeatureSource featureSource = dataStore.getFeatureSource(typeName);
if (!(featureSource instanceof SimpleFeatureStore store)) {
throw new IllegalStateException("FeatureStore 생성 실패");
}
store.setTransaction(Transaction.AUTO_COMMIT);
store.addFeatures(collection);
store.getTransaction().commit();
}
private void writePrjFile(String shpBasePath, CoordinateReferenceSystem crs) throws Exception {
File prjFile = new File(shpBasePath + ".prj");
createDirectories(prjFile);
Files.writeString(prjFile.toPath(), crs.toWKT(), StandardCharsets.UTF_8);
}
private void createDirectories(File file) throws Exception {
File parent = file.getParentFile();
if (parent != null) {
Files.createDirectories(parent.toPath());
}
}
}

View File

@@ -0,0 +1,94 @@
package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.dto.WriteCnt;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@RequiredArgsConstructor
@Transactional(readOnly = true)
public class InferenceResultShpService {
private final InferenceResultShpCoreService coreService;
private final ShpWriter shpWriter;
/** inference_results 테이블을 기준으로 분석 결과 테이블과 도형 테이블을 최신 상태로 반영한다. */
@Transactional
public InferenceResultShpDto.InferenceCntDto saveInferenceResultData() {
return coreService.buildInferenceData();
}
/**
* 분석 데이터 단위로 SHP / GeoJSON 파일을 생성한다.
*
* <p>처리 흐름: 1. 파일 미생성 상태의 분석 데이터 조회 2. 재생성을 위한 상태 초기화 3. 도형 데이터 조회 4. SHP / GeoJSON 파일 생성 5. 파일
* 생성 완료 상태 반영
*
* <p>중간 실패 시 다음 실행에서 전체 재생성된다.
*/
@Transactional
public InferenceResultShpDto.FileCntDto createShpFile() {
// TODO 파일 경로는 정해지면 수정, properties 사용
String baseDir = System.getProperty("user.home") + "/detect/result";
// TODO 배치 실행으로 변경 필요
int batchSize = 100;
int geomLimit = 500_000;
WriteCnt total = WriteCnt.zero();
List<Long> dataUids = coreService.findPendingDataUids(batchSize);
for (Long dataUid : dataUids) {
// 재생성을 위한 생성 상태 초기화
coreService.resetForRegenerate(dataUid);
// 추론 데이터 조회
List<InferenceResultShpDto.Basic> dtoList = coreService.loadGeomDtos(dataUid, geomLimit);
if (dtoList.isEmpty()) {
continue;
}
// 파일명 생성 (stage_mapSheet_compare_target)
InferenceResultShpDto.Basic first = dtoList.get(0);
String baseName =
String.format(
"%d_%d_%d_%d",
first.getStage(), first.getMapId(), first.getInput1(), first.getInput2());
String baseDir2 = "/" + first.getInput1() + "_" + first.getInput2() + "/" + first.getStage();
String shpBasePath = baseDir + baseDir2 + "/shp/" + baseName;
String geoJsonPath = baseDir + baseDir2 + "/geojson/" + baseName + ".geojson";
try {
// Writer가 "이번 호출에서 write한 개수"를 반환
total = total.plus(shpWriter.writeShp(shpBasePath, dtoList));
total = total.plus(shpWriter.writeGeoJson(geoJsonPath, dtoList));
// 파일 생성 완료 상태 반영
List<Long> geoUids = dtoList.stream().map(InferenceResultShpDto.Basic::getGeoUid).toList();
coreService.markSuccess(dataUid, geoUids);
} catch (Exception e) {
// 실패 시 markSuccess 하지 않음 -> 다음 실행에서 재생성
// log.warn("파일 생성 실패: dataUid={}, baseName={}", dataUid, baseName, e);
continue;
}
}
InferenceResultShpDto.FileCntDto fileCntDto = new InferenceResultShpDto.FileCntDto();
fileCntDto.setShp(total.shp());
fileCntDto.setShx(total.shx());
fileCntDto.setDbf(total.dbf());
fileCntDto.setPrj(total.prj());
fileCntDto.setGeojson(total.geojson());
return fileCntDto;
}
}

View File

@@ -0,0 +1,14 @@
package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.dto.WriteCnt;
import java.util.List;
public interface ShpWriter {
// SHP (.shp/.shx/.dbf/.fix)
WriteCnt writeShp(String shpBasePath, List<InferenceResultShpDto.Basic> rows);
// GeoJSON (.geojson)
WriteCnt writeGeoJson(String geoJsonPath, List<InferenceResultShpDto.Basic> rows);
}

View File

@@ -3,6 +3,10 @@ package com.kamco.cd.kamcoback.mapsheet;
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
import com.kamco.cd.kamcoback.code.service.CommonCodeService;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.FilesDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.FoldersDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.SrchFilesDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.SrchFoldersDto;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
import com.kamco.cd.kamcoback.mapsheet.service.MapSheetMngService;
import io.swagger.v3.oas.annotations.Operation;
@@ -143,6 +147,7 @@ public class MapSheetMngApiController {
return ApiResponseDto.ok(mapSheetMngService.findMapSheetErrorList(searchReq));
}
/*
@Operation(summary = "오류데이터 팝업 > 업로드 처리", description = "오류데이터 팝업 > 업로드 처리")
@ApiResponses(
value = {
@@ -163,6 +168,9 @@ public class MapSheetMngApiController {
return ApiResponseDto.ok(mapSheetMngService.uploadProcess(hstUidList));
}
*/
/*
@Operation(summary = "오류데이터 팝업 > 추론 제외", description = "오류데이터 팝업 > 추론 제외")
@PutMapping("/except-inference")
public ApiResponseDto<MapSheetMngDto.DmlReturn> updateExceptUseInference(
@@ -170,6 +178,10 @@ public class MapSheetMngApiController {
return ApiResponseDto.ok(mapSheetMngService.updateExceptUseInference(hstUidList));
}
*/
@Operation(summary = "페어 파일 업로드", description = "TFW/TIF 두 파일을 쌍으로 업로드 및 검증")
@PostMapping(value = "/upload-pair", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public ApiResponseDto<MapSheetMngDto.DmlReturn> uploadPair(
@@ -177,6 +189,7 @@ public class MapSheetMngApiController {
@RequestPart("tif") MultipartFile tifFile,
@RequestParam(value = "hstUid", required = false) Long hstUid,
@RequestParam(value = "overwrite", required = false) boolean overwrite) {
return ApiResponseDto.createOK(
mapSheetMngService.uploadPair(tfwFile, tifFile, hstUid, overwrite));
}
@@ -186,7 +199,7 @@ public class MapSheetMngApiController {
value = {
@ApiResponse(
responseCode = "201",
description = "파일삭제 처리 성공",
description = "파일조회 성공",
content =
@Content(
mediaType = "application/json",
@@ -196,17 +209,19 @@ public class MapSheetMngApiController {
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@GetMapping("/mng-file-list")
public ApiResponseDto<List<MapSheetMngDto.MngFilesDto>> findHstUidToMapSheetFileList(
public ApiResponseDto<List<MapSheetMngDto.MngFilesDto>> findByHstUidMapSheetFileList(
@RequestParam @Valid Long hstUid) {
return ApiResponseDto.ok(mapSheetMngService.findHstUidToMapSheetFileList(hstUid));
return ApiResponseDto.ok(mapSheetMngService.findByHstUidMapSheetFileList(hstUid));
}
@Operation(summary = "영상관리 > 파일삭제", description = "영상관리 > 파일삭제")
@Operation(
summary = "영상관리 > 파일사용설정 및 중복제거",
description = "영상관리 >파일사용설정 및 중복제거(중복파일제거 및 선택파일사용설정)")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "파일삭제 처리 성공",
description = "파일사용설정 처리 성공",
content =
@Content(
mediaType = "application/json",
@@ -215,9 +230,47 @@ public class MapSheetMngApiController {
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PutMapping("/del-mng-files")
public ApiResponseDto<MapSheetMngDto.DmlReturn> deleteByFileUidMngFile(
@PutMapping("/update-use-mng-files")
public ApiResponseDto<MapSheetMngDto.DmlReturn> updateUseByFileUidMngFile(
@RequestParam @Valid List<Long> fileUids) {
return ApiResponseDto.ok(mapSheetMngService.deleteByFileUidMngFile(fileUids));
return ApiResponseDto.ok(mapSheetMngService.setUseByFileUidMngFile(fileUids));
}
@Operation(summary = "폴더 조회", description = "폴더 조회 (ROOT:/app/original-images 이하로 경로입력)")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/folder-list")
public ApiResponseDto<FoldersDto> getDir(@RequestBody SrchFoldersDto srchDto) {
return ApiResponseDto.createOK(mapSheetMngService.getFolderAll(srchDto));
}
@Operation(summary = "지정폴더내 파일목록 조회", description = "지정폴더내 파일목록 조회")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/file-list")
public ApiResponseDto<FilesDto> getFiles(@RequestBody SrchFilesDto srchDto) {
return ApiResponseDto.createOK(mapSheetMngService.getFilesAll(srchDto));
}
}

View File

@@ -1,143 +0,0 @@
package com.kamco.cd.kamcoback.mapsheet;
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
import com.kamco.cd.kamcoback.code.service.CommonCodeService;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.FilesDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.FoldersDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.SrchFilesDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.SrchFoldersDto;
import com.kamco.cd.kamcoback.mapsheet.service.MapSheetMngFileCheckerService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "영상 관리", description = "영상 관리 API")
@RestController
@RequiredArgsConstructor
@RequestMapping({"/api/mapsheet"})
public class MapSheetMngFileCheckerApiController {
private final CommonCodeService commonCodeService;
private final MapSheetMngFileCheckerService mapSheetMngFileCheckerService;
@Operation(summary = "폴더 조회", description = "폴더 조회 (ROOT:/app/original-images 이하로 경로입력)")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/folder-list")
public ApiResponseDto<FoldersDto> getDir(@RequestBody SrchFoldersDto srchDto) {
return ApiResponseDto.createOK(mapSheetMngFileCheckerService.getFolderAll(srchDto));
}
@Operation(summary = "지정폴더내 파일목록 조회", description = "지정폴더내 파일목록 조회")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/file-list")
public ApiResponseDto<FilesDto> getFiles(@RequestBody SrchFilesDto srchDto) {
return ApiResponseDto.createOK(mapSheetMngFileCheckerService.getFilesAll(srchDto));
}
/*
@Operation(summary = "파일 업로드", description = "파일 업로드 및 TIF 검증")
@PostMapping(value = "/upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public ApiResponseDto<String> uploadFile(
@RequestPart("file") MultipartFile file,
@RequestParam("targetPath") String targetPath,
@RequestParam(value = "overwrite", required = false, defaultValue = "false")
boolean overwrite,
@RequestParam(value = "hstUid", required = false) Long hstUid) {
return ApiResponseDto.createOK(
mapSheetMngFileCheckerService.uploadFile(file, targetPath, overwrite, hstUid));
}
*/
/*
@Operation(summary = "파일 삭제", description = "중복 파일 등 파일 삭제")
@PostMapping("/delete")
public ApiResponseDto<Boolean> deleteFile(@RequestBody SrchFoldersDto dto) {
return ApiResponseDto.createOK(mapSheetMngFileCheckerService.deleteFile(dto.getDirPath()));
}
@Operation(summary = "중복 파일 삭제", description = "중복 데이터 발견 시 기존 데이터를 삭제")
@PostMapping(value = "/delete-file")
public ApiResponseDto<String> deleteDuplicateFile(
@RequestParam("filePath") String filePath, @RequestParam("fileName") String fileName) {
return ApiResponseDto.createOK(
mapSheetMngFileCheckerService.deleteDuplicate(filePath, fileName));
}
*/
/*
@Operation(summary = "지정폴더(하위폴더포함) 파일목록 조회", description = "지정폴더(하위폴더포함) 파일목록 조회")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/file-all-list")
public ApiResponseDto<FilesDto> getAllFiles(@RequestBody SrchFilesDepthDto srchDto) {
return ApiResponseDto.createOK(mapSheetMngFileCheckerService.getFilesDepthAll(srchDto));
}
@Operation(summary = "영상데이터관리 > 영상파일 동기화", description = "영상파일 동기화")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "동기화 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "동기화 할수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/sync-process")
public ApiResponseDto<ImageryDto.SyncReturn> syncProcess(
@RequestBody @Valid ImageryDto.searchReq searchReq) {
return ApiResponseDto.ok(mapSheetMngFileCheckerService.syncProcess(searchReq));
}
*/
}

View File

@@ -88,9 +88,27 @@ public class MapSheetMngDto {
private Long syncDuplicateExecCnt;
private Long syncFaultCnt;
private Long syncFaultExecCnt;
private Long syncNoFileCnt;
private Long syncNoFileExecCnt;
@JsonFormatDttm private ZonedDateTime rgstStrtDttm;
@JsonFormatDttm private ZonedDateTime rgstEndDttm;
public String getSyncState() {
if (this.syncStateDoneCnt == 0) return "NOTYET";
else if (this.syncStateDoneCnt < this.syncTotCnt) return "PROCESSING";
return "DONE";
}
public String getDataCheckState() {
if (this.syncDataCheckDoneCnt == 0) return "NOTYET";
else if (this.syncDataCheckDoneCnt < this.syncTotCnt) return "PROCESSING";
return "DONE";
}
public double getSyncStateDoneRate() {
if (this.syncTotCnt == null || this.syncTotCnt == 0) {
return 0.0;
@@ -113,8 +131,21 @@ public class MapSheetMngDto {
return this.syncNotPaireExecCnt + this.syncDuplicateExecCnt + this.syncFaultExecCnt;
}
public String getMngState() {
String mngState = "DONE";
if (this.syncStateDoneCnt == 0) mngState = "NOTYET";
else if (this.syncStateDoneCnt < this.syncTotCnt) mngState = "PROCESSING";
if ((this.syncNotPaireExecCnt + this.syncDuplicateExecCnt + this.syncFaultExecCnt) > 0)
mngState = "TAKINGERROR";
return mngState;
}
public String getMngStateName() {
String enumId = this.mngState;
String enumId = this.getMngState();
if (enumId == null || enumId.isEmpty()) {
enumId = "NOTYET";
}

View File

@@ -1,530 +0,0 @@
package com.kamco.cd.kamcoback.mapsheet.service;
import static java.lang.String.CASE_INSENSITIVE_ORDER;
import com.kamco.cd.kamcoback.common.exception.DuplicateFileException;
import com.kamco.cd.kamcoback.common.exception.ValidationException;
import com.kamco.cd.kamcoback.common.utils.FIleChecker;
import com.kamco.cd.kamcoback.config.FileConfig;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.FilesDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.FoldersDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.SrchFilesDepthDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.SrchFilesDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.SrchFoldersDto;
import com.kamco.cd.kamcoback.mapsheet.dto.ImageryDto;
import com.kamco.cd.kamcoback.postgres.core.MapSheetMngFileCheckerCoreService;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetMngFileEntity;
import com.kamco.cd.kamcoback.postgres.repository.mapsheet.MapSheetMngFileRepository;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.FileTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FilenameUtils;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
@Slf4j
@Service
@RequiredArgsConstructor
@Transactional(readOnly = true)
public class MapSheetMngFileCheckerService {
private final MapSheetMngFileCheckerCoreService mapSheetMngFileCheckerCoreService;
private final FileConfig fileConfig;
private final MapSheetMngFileRepository mapSheetMngFileRepository;
// @Value("${mapsheet.upload.skipGdalValidation:false}")
// private boolean skipGdalValidation;
public FoldersDto getFolderAll(SrchFoldersDto srchDto) {
Path startPath = Paths.get(fileConfig.getRootSyncDir() + srchDto.getDirPath());
String dirPath = fileConfig.getRootSyncDir() + srchDto.getDirPath();
String sortType = "name desc";
List<FIleChecker.Folder> folderList = FIleChecker.getFolderAll(dirPath);
int folderTotCnt = folderList.size();
int folderErrTotCnt =
(int)
folderList.stream().filter(dto -> dto.getIsValid().toString().equals("false")).count();
return new FoldersDto(dirPath, folderTotCnt, folderErrTotCnt, folderList);
}
public FilesDto getFilesAll(SrchFilesDto srchDto) {
String dirPath = srchDto.getDirPath();
int startPos = srchDto.getStartPos();
int endPos = srchDto.getEndPos();
List<FIleChecker.Basic> files =
FIleChecker.getFilesFromAllDepth(
srchDto.getDirPath(),
"*",
srchDto.getExtension(),
1,
srchDto.getSortType(),
startPos,
endPos);
int fileListPos = 0;
int fileTotCnt = files.size();
long fileTotSize = FIleChecker.getFileTotSize(files);
return new FilesDto(dirPath, fileTotCnt, fileTotSize, files);
}
public FilesDto getFilesDepthAll(SrchFilesDepthDto srchDto) {
String dirPath = srchDto.getDirPath();
int startPos = srchDto.getStartPos();
int endPos = srchDto.getEndPos();
List<FIleChecker.Basic> files =
FIleChecker.getFilesFromAllDepth(
srchDto.getDirPath(),
"*",
srchDto.getExtension(),
srchDto.getMaxDepth(),
srchDto.getSortType(),
startPos,
endPos);
int fileListPos = 0;
int fileTotCnt = files.size();
long fileTotSize = FIleChecker.getFileTotSize(files);
return new FilesDto(dirPath, fileTotCnt, fileTotSize, files);
}
public Set<String> createExtensionSet(String extensionString) {
if (extensionString == null || extensionString.isBlank()) {
return Set.of();
}
// "java, class" -> ["java", " class"] -> [".java", ".class"]
return Arrays.stream(extensionString.split(","))
.map(ext -> ext.trim())
.filter(ext -> !ext.isEmpty())
.map(ext -> "." + ext.toLowerCase())
.collect(Collectors.toSet());
}
public String extractExtension(Path path) {
String filename = path.getFileName().toString();
int lastDotIndex = filename.lastIndexOf('.');
// 확장자가 없거나 파일명이 .으로 끝나는 경우
if (lastDotIndex == -1 || lastDotIndex == filename.length() - 1) {
return ""; // 빈 문자열 반환
}
// 확장자 추출 및 소문자 변환
return filename.substring(lastDotIndex).toLowerCase();
}
public Comparator<Path> getFileComparator(String sortType) {
// 파일 이름 비교 기본 Comparator (대소문자 무시)
Comparator<Path> nameComparator =
Comparator.comparing(path -> path.getFileName().toString(), CASE_INSENSITIVE_ORDER);
Comparator<Path> dateComparator =
Comparator.comparing(
path -> {
try {
return Files.getLastModifiedTime(path);
} catch (IOException e) {
return FileTime.fromMillis(0);
}
});
if ("name desc".equalsIgnoreCase(sortType)) {
return nameComparator.reversed();
} else if ("date".equalsIgnoreCase(sortType)) {
return dateComparator;
} else if ("date desc".equalsIgnoreCase(sortType)) {
return dateComparator.reversed();
} else {
return nameComparator;
}
}
public ImageryDto.SyncReturn syncProcess(ImageryDto.searchReq searchReq) {
return mapSheetMngFileCheckerCoreService.syncProcess(searchReq);
}
@Transactional
public String uploadFile(MultipartFile file, String targetPath, boolean overwrite, Long hstUid) {
try {
// 파일 유효성 검증
if (file == null || file.isEmpty()) {
throw new ValidationException("업로드 파일이 비어있습니다.");
}
if (file.getOriginalFilename() == null || file.getOriginalFilename().isEmpty()) {
throw new ValidationException("파일명이 유효하지 않습니다.");
}
Path path = Paths.get(targetPath);
// targetPath가 존재하지 않으면 파일 경로로 가정하고 부모 디렉토리 생성
if (!Files.exists(path)) {
// 경로가 확장자로 끝나면 파일로 간주
if (targetPath.matches(".*\\.[a-zA-Z]{3,4}$")) {
if (path.getParent() != null) {
Files.createDirectories(path.getParent());
}
} else {
// 확장자가 없으면 디렉토리로 간주
Files.createDirectories(path);
path = path.resolve(file.getOriginalFilename());
}
} else if (Files.isDirectory(path)) {
path = path.resolve(file.getOriginalFilename());
}
// 최종 파일의 부모 디렉토리 생성
if (path.getParent() != null && !Files.exists(path.getParent())) {
Files.createDirectories(path.getParent());
}
String filename = path.getFileName().toString();
String ext = FilenameUtils.getExtension(filename).toLowerCase();
String baseName = FilenameUtils.getBaseName(filename);
Path tfwPath =
path.getParent() == null
? Paths.get(baseName + ".tfw")
: path.getParent().resolve(baseName + ".tfw");
Path tifPath =
path.getParent() == null
? Paths.get(baseName + ".tif")
: path.getParent().resolve(baseName + ".tif");
// DB 중복 체크
String parentPathStr = path.getParent() != null ? path.getParent().toString() : "";
boolean dbExists =
mapSheetMngFileRepository.existsByFileNameAndFilePath(filename, parentPathStr);
// boolean fileExists = Files.exists(path); // 파일 시스템 존재 여부는 체크하지 않음 (DB 기준)
// 이미 존재하는 경우 처리 (DB에만 있는 경우 체크)
if (!overwrite && dbExists) {
throw new DuplicateFileException("동일한 파일이 이미 존재합니다 (DB): " + path.getFileName());
}
// 덮어쓰기인 경우 기존 DB 데이터 삭제 (새로 저장하기 위함)
if (overwrite && dbExists) {
mapSheetMngFileRepository.deleteByFileNameAndFilePath(filename, parentPathStr);
}
// 업로드 파일 저장(덮어쓰기 허용 시 replace)
file.transferTo(path.toFile());
if ("tfw".equals(ext)) {
// TFW 검증
boolean tfwOk = FIleChecker.checkTfw(path.toString());
if (!tfwOk) {
Files.deleteIfExists(path);
throw new ValidationException(
"유효하지 않은 TFW 파일입니다 (6줄 숫자 형식 검증 실패): " + path.getFileName());
}
// 안내: 같은 베이스의 TIF가 없으면 추후 TIF 업로드 필요
if (!Files.exists(tifPath)) {
// DB 메타 저장은 진행 (향후 쌍 검증 위해)
saveUploadMeta(path, hstUid);
return "TFW 업로드 성공 (매칭되는 TIF가 아직 없습니다).";
}
// TIF가 존재하면 쌍 요건 충족
saveUploadMeta(path, hstUid);
return "TFW 업로드 성공";
}
if ("tif".equals(ext) || "tiff".equals(ext)) {
// GDAL 검증 (항상 수행)
boolean isValidTif = FIleChecker.cmmndGdalInfo(path.toString());
if (!isValidTif) {
Files.deleteIfExists(path);
throw new ValidationException("유효하지 않은 TIF 파일입니다 (GDAL 검증 실패): " + path.getFileName());
}
// TFW 존재/검증
if (!Files.exists(tfwPath)) {
Files.deleteIfExists(path);
throw new ValidationException("TFW 파일이 존재하지 않습니다: " + tfwPath.getFileName());
}
boolean tfwOk = FIleChecker.checkTfw(tfwPath.toString());
if (!tfwOk) {
Files.deleteIfExists(path);
throw new ValidationException(
"유효하지 않은 TFW 파일입니다 (6줄 숫자 형식 검증 실패): " + tfwPath.getFileName());
}
saveUploadMeta(path, hstUid);
return "TIF 업로드 성공";
}
// 기타 확장자: 저장만 하고 메타 기록
saveUploadMeta(path, hstUid);
return "업로드 성공";
} catch (ValidationException | DuplicateFileException e) {
// 비즈니스 예외는 그대로 던짐
throw e;
} catch (IOException e) {
throw new IllegalArgumentException("파일 I/O 처리 실패: " + e.getMessage(), e);
} catch (Exception e) {
throw new IllegalArgumentException("파일 업로드 처리 중 오류 발생: " + e.getMessage(), e);
}
}
@Transactional
public String uploadPair(
MultipartFile tfwFile,
MultipartFile tifFile,
String targetPath,
boolean overwrite,
Long hstUid) {
try {
log.info(
"uploadPair 시작 - targetPath: {}, overwrite: {}, hstUid: {}",
targetPath,
overwrite,
hstUid);
// 파일 유효성 검증
if (tfwFile == null || tfwFile.isEmpty()) {
throw new ValidationException("TFW 파일이 비어있습니다.");
}
if (tifFile == null || tifFile.isEmpty()) {
throw new ValidationException("TIF 파일이 비어있습니다.");
}
if (tfwFile.getOriginalFilename() == null || tfwFile.getOriginalFilename().isEmpty()) {
throw new ValidationException("TFW 파일명이 유효하지 않습니다.");
}
if (tifFile.getOriginalFilename() == null || tifFile.getOriginalFilename().isEmpty()) {
throw new ValidationException("TIF 파일명이 유효하지 않습니다.");
}
log.info(
"파일명 - TFW: {}, TIF: {}", tfwFile.getOriginalFilename(), tifFile.getOriginalFilename());
Path basePath = Paths.get(targetPath);
// targetPath가 존재하지 않으면 디렉토리로 생성
if (!Files.exists(basePath)) {
log.info("대상 경로가 존재하지 않아 디렉토리 생성: {}", basePath);
Files.createDirectories(basePath);
}
// 파일인 경우 부모 디렉토리를 basePath로 사용
if (Files.isRegularFile(basePath)) {
log.info("대상 경로가 파일이므로 부모 디렉토리 사용");
basePath = basePath.getParent();
}
if (Files.isDirectory(basePath)) {
log.info("디렉토리 확인됨: {}", basePath);
// 디렉토리인 경우 파일명 기준으로 경로 생성
Path tfwPath = basePath.resolve(tfwFile.getOriginalFilename());
Path tifPath = basePath.resolve(tifFile.getOriginalFilename());
// 동일 베이스명 확인
String tfwBase = FilenameUtils.getBaseName(tfwPath.getFileName().toString());
String tifBase = FilenameUtils.getBaseName(tifPath.getFileName().toString());
if (!tfwBase.equalsIgnoreCase(tifBase)) {
throw new ValidationException("TFW/TIF 파일명이 동일한 베이스가 아닙니다.");
}
// 디렉토리는 이미 생성되었으므로 추가 생성 불필요
// if (tfwPath.getParent() != null) Files.createDirectories(tfwPath.getParent());
// if (tifPath.getParent() != null) Files.createDirectories(tifPath.getParent());
// DB 중복 체크 및 overwrite 처리 (각 파일별)
String parentPathStr = basePath.toString();
String tfwName = tfwPath.getFileName().toString();
String tifName = tifPath.getFileName().toString();
boolean tfwDbExists =
mapSheetMngFileRepository.existsByFileNameAndFilePath(tfwName, parentPathStr);
boolean tifDbExists =
mapSheetMngFileRepository.existsByFileNameAndFilePath(tifName, parentPathStr);
if (!overwrite && (tfwDbExists || tifDbExists)) {
throw new DuplicateFileException("동일한 파일이 이미 존재합니다 (DB): " + tfwName + ", " + tifName);
}
if (overwrite) {
if (tfwDbExists)
mapSheetMngFileRepository.deleteByFileNameAndFilePath(tfwName, parentPathStr);
if (tifDbExists)
mapSheetMngFileRepository.deleteByFileNameAndFilePath(tifName, parentPathStr);
}
// 파일 저장
log.info("파일 저장 시작 - TFW: {}, TIF: {}", tfwPath, tifPath);
tfwFile.transferTo(tfwPath.toFile());
tifFile.transferTo(tifPath.toFile());
log.info("파일 저장 완료");
// 검증
log.info("TFW 파일 검증 시작: {}", tfwPath);
boolean tfwOk = FIleChecker.checkTfw(tfwPath.toString());
if (!tfwOk) {
log.warn("TFW 파일 검증 실패: {}", tfwName);
Files.deleteIfExists(tfwPath);
Files.deleteIfExists(tifPath);
throw new ValidationException("유효하지 않은 TFW 파일입니다 (6줄 숫자 형식 검증 실패): " + tfwName);
}
log.info("TFW 파일 검증 성공");
log.info("TIF 파일 검증 시작: {}", tifPath);
boolean isValidTif = FIleChecker.cmmndGdalInfo(tifPath.toString());
if (!isValidTif) {
log.warn("TIF 파일 검증 실패: {}", tifName);
Files.deleteIfExists(tfwPath);
Files.deleteIfExists(tifPath);
throw new ValidationException("유효하지 않은 TIF 파일입니다 (GDAL 검증 실패): " + tifName);
}
log.info("TIF 파일 검증 성공");
// 메타 저장 (두 파일 각각 저장)
log.info("메타 데이터 저장 시작");
saveUploadMeta(tfwPath, hstUid);
saveUploadMeta(tifPath, hstUid);
log.info("메타 데이터 저장 완료");
return "TFW/TIF 페어 업로드 성공";
} else {
throw new ValidationException("targetPath는 디렉토리여야 합니다.");
}
} catch (ValidationException | DuplicateFileException e) {
// 비즈니스 예외는 그대로 던짐
log.warn("업로드 비즈니스 예외 발생: {}", e.getMessage());
throw e;
} catch (IOException e) {
log.error("파일 I/O 처리 실패: {}", e.getMessage(), e);
throw new IllegalArgumentException("파일 I/O 처리 실패: " + e.getMessage(), e);
} catch (Exception e) {
log.error("파일 업로드 처리 중 예상치 못한 오류 발생: {}", e.getMessage(), e);
throw new IllegalArgumentException("파일 업로드 처리 중 오류 발생: " + e.getMessage(), e);
}
}
private void saveUploadMeta(Path savedPath, Long hstUid) {
String fullPath = savedPath.toAbsolutePath().toString();
String fileName = savedPath.getFileName().toString();
String ext = FilenameUtils.getExtension(fileName);
MapSheetMngFileEntity entity = new MapSheetMngFileEntity();
if (hstUid != null) {
// 히스토리에서 메타 가져오기
var hstOpt = mapSheetMngFileCheckerCoreService.findHstByUid(hstUid);
hstOpt.ifPresent(
hst -> {
entity.setHstUid(hst.getHstUid());
entity.setMngYyyy(hst.getMngYyyy());
entity.setMapSheetNum(hst.getMapSheetNum());
entity.setRefMapSheetNum(hst.getRefMapSheetNum());
});
} else {
// 기존 추정 로직 유지
Integer mngYyyy = extractYearFromPath(fullPath);
String mapSheetNum = extractMapSheetNumFromFileName(fileName);
String refMapSheetNum = null;
if (mapSheetNum != null && !mapSheetNum.isEmpty()) {
try {
long num = Long.parseLong(mapSheetNum);
refMapSheetNum = String.valueOf(num / 1000);
} catch (NumberFormatException ignored) {
}
}
entity.setMngYyyy(mngYyyy);
entity.setMapSheetNum(mapSheetNum);
entity.setRefMapSheetNum(refMapSheetNum);
}
entity.setFilePath(savedPath.getParent() != null ? savedPath.getParent().toString() : "");
entity.setFileName(fileName);
entity.setFileExt(ext);
// 파일 크기 설정
try {
long size = Files.size(savedPath);
entity.setFileSize(size);
} catch (IOException e) {
entity.setFileSize(0L);
}
mapSheetMngFileRepository.save(entity);
}
private Integer extractYearFromPath(String fullPath) {
// 경로에서 4자리 연도를 찾아 가장 근접한 값을 사용
// 예시 경로: /Users/.../original-images/2022/2022_25cm/1/34602
String[] parts = fullPath.split("/");
for (String p : parts) {
if (p.matches("\\d{4}")) {
try {
return Integer.parseInt(p);
} catch (NumberFormatException ignored) {
}
}
}
return null;
}
private String extractMapSheetNumFromFileName(String fileName) {
// 파일명에서 연속된 숫자를 최대한 찾아 사용 (예: 34602027.tif -> 34602027)
String base = FilenameUtils.getBaseName(fileName);
String digits = base.replaceAll("[^0-9]", "");
if (!digits.isEmpty()) {
return digits;
}
return null;
}
@Transactional
public Boolean deleteFile(String filePath) {
try {
Path path = Paths.get(filePath);
return Files.deleteIfExists(path);
} catch (IOException e) {
throw new RuntimeException("파일 삭제 실패: " + e.getMessage());
}
}
@Transactional(readOnly = true)
public List<MapSheetMngFileEntity> findRecentFiles(int limit) {
// 간단히 전체를 불러 정렬/제한 (운영에선 Page 요청으로 변경 권장)
List<MapSheetMngFileEntity> all = new ArrayList<>();
mapSheetMngFileRepository.findAll().forEach(all::add);
all.sort(
(a, b) -> {
// fileUid 기준 내림차순
long av = a.getFileUid() == null ? 0L : a.getFileUid();
long bv = b.getFileUid() == null ? 0L : b.getFileUid();
return Long.compare(bv, av);
});
if (all.size() > limit) {
return all.subList(0, limit);
}
return all;
}
@Transactional
public String deleteDuplicate(String filePath, String fileName) {
try {
Path path = Paths.get(filePath, fileName);
boolean deleted = Files.deleteIfExists(path);
// DB에서도 삭제
mapSheetMngFileRepository.deleteByFileNameAndFilePath(fileName, filePath);
return deleted ? "파일 및 DB 레코드 삭제 완료" : "DB 레코드 삭제 완료 (파일 미존재)";
} catch (IOException e) {
throw new RuntimeException("중복 파일 삭제 실패: " + e.getMessage());
}
}
}

View File

@@ -1,7 +1,10 @@
package com.kamco.cd.kamcoback.mapsheet.service;
import com.kamco.cd.kamcoback.common.utils.FIleChecker;
import com.kamco.cd.kamcoback.config.FileConfig;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.FilesDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.FoldersDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.SrchFilesDto;
import com.kamco.cd.kamcoback.mapsheet.dto.FileDto.SrchFoldersDto;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.AddReq;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.DmlReturn;
@@ -13,7 +16,6 @@ import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.YearSearchReq;
import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService;
import com.kamco.cd.kamcoback.postgres.entity.YearEntity;
import jakarta.validation.Valid;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -22,6 +24,7 @@ import java.nio.file.StandardCopyOption;
import java.util.Comparator;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -33,7 +36,15 @@ import org.springframework.web.multipart.MultipartFile;
public class MapSheetMngService {
private final MapSheetMngCoreService mapSheetMngCoreService;
private final FileConfig fileConfig;
@Value("${file.sync-root-dir}")
private String syncRootDir;
@Value("${file.sync-tmp-dir}")
private String syncTmpDir;
@Value("${file.sync-file-extention}")
private String syncFileExtention;
public List<MngDto> findMapSheetMngList() {
return mapSheetMngCoreService.findMapSheetMngList();
@@ -74,12 +85,9 @@ public class MapSheetMngService {
}
@Transactional
public DmlReturn mngDataSave(AddReq AddReq) {
return mapSheetMngCoreService.mngDataSave(AddReq);
}
public DmlReturn uploadProcess(@Valid List<Long> hstUidList) {
return mapSheetMngCoreService.uploadProcess(hstUidList);
public DmlReturn mngDataSave(AddReq addReq) {
int execCnt = mapSheetMngCoreService.mngDataSave(addReq);
return new MapSheetMngDto.DmlReturn("success", addReq.getMngYyyy() + "년, " + execCnt + "건 생성");
}
public DmlReturn updateExceptUseInference(@Valid List<Long> hstUidList) {
@@ -90,8 +98,10 @@ public class MapSheetMngService {
public DmlReturn uploadPair(
MultipartFile tfwFile, MultipartFile tifFile, Long hstUid, Boolean overwrite) {
String rootPath = fileConfig.getRootSyncDir();
String tmpPath = fileConfig.getTmpSyncDir();
String rootPath = syncRootDir;
String tmpPath = syncTmpDir;
DmlReturn dmlReturn = new DmlReturn("success", "UPLOAD COMPLETE");
ErrorDataDto errDto = mapSheetMngCoreService.findMapSheetError(hstUid);
@@ -104,88 +114,43 @@ public class MapSheetMngService {
return new DmlReturn("fail", "CREATE TEMP FOLDER ERROR");
}
// 파일 유효성 검증
if (tfwFile == null || tfwFile.isEmpty() || tfwFile.getSize() == 0) {
return new DmlReturn("fail", "TFW SIZE 오류");
} else if (tifFile == null || tifFile.isEmpty() || tifFile.getSize() == 0) {
return new DmlReturn("fail", "TIF SIZE 오류");
}
// 확장자명 체크
if (!FIleChecker.checkExtensions(tfwFile.getOriginalFilename(), "tfw")) {
return new DmlReturn("fail", "TFW FILENAME ERROR");
} else if (!FIleChecker.checkExtensions(tifFile.getOriginalFilename(), "tif")) {
return new DmlReturn("fail", "TIF FILENAME ERROR");
}
// 업로드 파일 사이즈,확장자명 체크
dmlReturn = this.validationFile(tfwFile, tifFile);
if (dmlReturn.getFlag().equals("fail")) return dmlReturn;
MngDto mngDto = mapSheetMngCoreService.findMapSheetMng(errDto.getMngYyyy());
String targetYearDir = mngDto.getMngPath();
// 중복체크
List<FIleChecker.Basic> basicTfwList =
FIleChecker.getFilesFromAllDepth(targetYearDir, tfwFile.getOriginalFilename(), "tfw");
List<FIleChecker.Basic> basicTifList =
FIleChecker.getFilesFromAllDepth(targetYearDir, tifFile.getOriginalFilename(), "tif");
int tfwCnt =
(int)
basicTfwList.stream()
.filter(dto -> dto.getExtension().toString().equals("tfw"))
.count();
int tifCnt =
(int)
basicTifList.stream()
.filter(dto -> dto.getExtension().toString().equals("tif"))
.count();
if (!overwrite) {
if (tfwCnt > 0 || tifCnt > 0) {
String tfwtifMsg = "";
if (tfwCnt > 0) {
tfwtifMsg = tfwFile.getOriginalFilename();
}
if (tifCnt > 0) {
if (tfwCnt > 0) {
tfwtifMsg = "," + tifFile.getOriginalFilename();
} else {
tfwtifMsg = tifFile.getOriginalFilename();
}
}
return new DmlReturn("duplicate", tfwtifMsg);
}
dmlReturn =
this.duplicateFile(
errDto.getMngYyyy(), tfwFile.getOriginalFilename(), tifFile.getOriginalFilename());
if (dmlReturn.getFlag().equals("duplicate")) return dmlReturn;
}
File directory = new File(tmpPath);
// 멀티파트 파일 tmp폴더 저장(파일형식 체크를 위해)
String tfwTmpPath = tmpPath + tfwFile.getOriginalFilename();
Path tfwTmpSavePath = Paths.get(tfwTmpPath);
String tifTmpPath = tmpPath + tifFile.getOriginalFilename();
Path tifTmpSavePath = Paths.get(tifTmpPath);
boolean fileUpload = true;
try {
tfwFile.transferTo(tfwTmpSavePath);
tifFile.transferTo(tifTmpSavePath);
} catch (IOException e) {
// throw new RuntimeException(e);
return new DmlReturn("fail", "UPLOAD ERROR");
}
if (!FIleChecker.cmmndGdalInfo(tifTmpPath)) {
return new DmlReturn("fail", "TIF TYPE ERROR");
}
if (!FIleChecker.checkTfw(tfwTmpPath)) {
return new DmlReturn("fail", "TFW TYPE ERROR");
}
if (!FIleChecker.multipartSaveTo(tfwFile, tfwTmpPath))
return new DmlReturn("fail", "UPLOAD ERROR");
if (!FIleChecker.multipartSaveTo(tifFile, tifTmpPath))
return new DmlReturn("fail", "UPLOAD ERROR");
if (!FIleChecker.cmmndGdalInfo(tifTmpPath)) return new DmlReturn("fail", "TIF TYPE ERROR");
if (!FIleChecker.checkTfw(tfwTmpPath)) return new DmlReturn("fail", "TFW TYPE ERROR");
// 싱크파일목록으로 업로드 경로 확인
List<MngFilesDto> mngFiles = mapSheetMngCoreService.findIdToMapSheetFileList(hstUid);
List<MngFilesDto> mngFiles = mapSheetMngCoreService.findByHstUidMapSheetFileList(hstUid);
String uploadPath = "";
for (MngFilesDto dto : mngFiles) {
uploadPath = dto.getFilePath();
break;
}
Path tfwTmpSavePath = Paths.get(tfwTmpPath);
Path tifTmpSavePath = Paths.get(tifTmpPath);
Path tfwTargetPath = null;
Path tifTargetPath = null;
@@ -229,6 +194,7 @@ public class MapSheetMngService {
addReq.setMapSheetNum(errDto.getMapSheetNum());
addReq.setRefMapSheetNum(errDto.getRefMapSheetNum());
addReq.setFilePath(uploadPath);
addReq.setFileName(tfwFile.getOriginalFilename());
addReq.setFileExt("tfw");
addReq.setFileSize(tfwFile.getSize());
@@ -237,6 +203,7 @@ public class MapSheetMngService {
mapSheetMngCoreService.mngFileSave(addReq);
addReq.setFileName(tifFile.getOriginalFilename());
addReq.setFileExt("tif");
addReq.setFileSize(tifFile.getSize());
mapSheetMngCoreService.mngFileSave(addReq);
@@ -244,40 +211,106 @@ public class MapSheetMngService {
return new DmlReturn("success", "파일 업로드 완료되었습니다.");
}
public List<MngFilesDto> findHstUidToMapSheetFileList(Long hstUid) {
return mapSheetMngCoreService.findHstUidToMapSheetFileList(hstUid);
public List<MngFilesDto> findByHstUidMapSheetFileList(Long hstUid) {
return mapSheetMngCoreService.findByHstUidMapSheetFileList(hstUid);
}
@Transactional
public DmlReturn deleteByFileUidMngFile(List<Long> fileUids) {
public DmlReturn setUseByFileUidMngFile(List<Long> fileUids) {
long hstUid = 0;
// hstUid = 149049;
DmlReturn dmlReturn = new DmlReturn("success", "정상처리되었습니다.");
MapSheetMngDto.SyncCheckStateReqUpdateDto reqDto =
new MapSheetMngDto.SyncCheckStateReqUpdateDto();
for (Long uid : fileUids) {
MapSheetMngDto.MngFilesDto dto = mapSheetMngCoreService.findIdToMapSheetFile(uid);
hstUid = dto.getHstUid();
MapSheetMngDto.MngFilesDto dto = mapSheetMngCoreService.findByFileUidMapSheetFile(uid);
String filePath = dto.getFilePath() + "/" + dto.getFileName();
Path path = Paths.get(filePath);
try {
boolean isDeleted = Files.deleteIfExists(path);
if (isDeleted) {
System.out.println("파일 삭제 성공: " + filePath);
} else {
System.out.println("삭제 실패: 파일이 존재하지 않습니다.");
}
} catch (IOException e) {
System.err.println("파일 삭제 중 오류 발생: " + e.getMessage());
reqDto.setHstUid(dto.getHstUid());
reqDto.setFilePath(dto.getFilePath());
reqDto.setSyncCheckState("DONE");
if (dto.getFileExt().equals("tif")) reqDto.setSyncCheckTifFileName(dto.getFileName());
else if (dto.getFileExt().equals("tfw")) reqDto.setSyncCheckTfwFileName(dto.getFileName());
mapSheetMngCoreService.updateByFileUidFileState(uid, "DONE");
}
// 선택제외 삭제처리
mapSheetMngCoreService.deleteByNotInFileUidMngFile(reqDto.getHstUid(), fileUids);
// Hst(내역) 테이블 상태 업데이트
mapSheetMngCoreService.updateByHstUidSyncCheckState(reqDto);
return new DmlReturn("success", fileUids.size() + "개 파일이 사용설정되었습니다.");
}
public DmlReturn validationFile(MultipartFile tfwFile, MultipartFile tifFile) {
if (!FIleChecker.validationMultipart(tfwFile)) return new DmlReturn("fail", "TFW SIZE 오류");
else if (!FIleChecker.validationMultipart(tifFile)) return new DmlReturn("fail", "TFW SIZE 오류");
else if (!FIleChecker.checkExtensions(tfwFile.getOriginalFilename(), "tfw"))
return new DmlReturn("fail", "TFW FILENAME ERROR");
else if (!FIleChecker.checkExtensions(tifFile.getOriginalFilename(), "tif"))
return new DmlReturn("fail", "TIF FILENAME ERROR");
return new DmlReturn("success", "파일체크");
}
public DmlReturn duplicateFile(int mngYyyy, String tfwFileName, String tifFileName) {
int tfwCnt = mapSheetMngCoreService.findByYearFileNameFileCount(mngYyyy, tfwFileName);
int tifCnt = mapSheetMngCoreService.findByYearFileNameFileCount(mngYyyy, tifFileName);
if (tfwCnt > 0 || tifCnt > 0) {
String resMsg = "";
if (tfwCnt > 0) resMsg = tfwFileName;
if (tifCnt > 0) {
if (tfwCnt > 0) resMsg = resMsg + "," + tifFileName;
else resMsg = tifFileName;
}
DmlReturn dmlReturn = mapSheetMngCoreService.deleteByFileUidMngFile(uid);
return new DmlReturn("duplicate", resMsg);
}
// 중복제거 확인후 처리상태(DONE)변경
if (hstUid > 0) {
mapSheetMngCoreService.updateByHstUidSyncCheckState(hstUid);
}
return new DmlReturn("success", "파일체크");
}
return new DmlReturn("success", fileUids.size() + "개 파일이 삭제되었습니다.");
public FoldersDto getFolderAll(SrchFoldersDto srchDto) {
Path startPath = Paths.get(syncRootDir + srchDto.getDirPath());
String dirPath = syncRootDir + srchDto.getDirPath();
String sortType = "name desc";
List<FIleChecker.Folder> folderList = FIleChecker.getFolderAll(dirPath);
int folderTotCnt = folderList.size();
int folderErrTotCnt =
(int)
folderList.stream().filter(dto -> dto.getIsValid().toString().equals("false")).count();
return new FoldersDto(dirPath, folderTotCnt, folderErrTotCnt, folderList);
}
public FilesDto getFilesAll(SrchFilesDto srchDto) {
String dirPath = srchDto.getDirPath();
int startPos = srchDto.getStartPos();
int endPos = srchDto.getEndPos();
List<FIleChecker.Basic> files =
FIleChecker.getFilesFromAllDepth(
srchDto.getDirPath(),
"*",
srchDto.getExtension(),
1,
srchDto.getSortType(),
startPos,
endPos);
int fileListPos = 0;
int fileTotCnt = files.size();
long fileTotSize = FIleChecker.getFileTotSize(files);
return new FilesDto(dirPath, fileTotCnt, fileTotSize, files);
}
}

View File

@@ -1,5 +1,6 @@
package com.kamco.cd.kamcoback.model;
import com.kamco.cd.kamcoback.common.utils.zip.ZipUtils;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.model.service.ModelMngService;
@@ -10,15 +11,21 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.transaction.Transactional;
import java.io.IOException;
import java.time.LocalDate;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
@Tag(name = "모델 관리", description = "모델 관리 API")
@RequiredArgsConstructor
@@ -29,6 +36,8 @@ public class ModelMngApiController {
private final ModelMngService modelMngService;
@Autowired private ZipUtils zipUtils;
@Operation(summary = "모델관리 목록")
@GetMapping
public ApiResponseDto<Page<ModelMngDto.ModelList>> findModelMgmtList(
@@ -70,4 +79,10 @@ public class ModelMngApiController {
String modelVer) {
return ApiResponseDto.okObject(modelMngService.removeModel(modelVer));
}
@Operation(summary = "모델 zip 파일 업로드", description = "모델 zip 파일 업로드")
@PostMapping(value = "/upload/zip", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public void upload(@RequestPart MultipartFile zipFilie) throws IOException {
zipUtils.processZip(zipFilie.getInputStream());
}
}

View File

@@ -6,7 +6,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetAnalDataRepository;
import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository;
import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.constraints.NotNull;
@@ -20,7 +20,7 @@ import org.springframework.transaction.annotation.Transactional;
@RequiredArgsConstructor
public class InferenceResultCoreService {
private final InferenceResultRepository inferenceResultRepository;
private final MapSheetAnalDataRepository mapSheetAnalDataRepository;
private final MapInkx5kRepository mapInkx5kRepository;
/**
@@ -31,7 +31,7 @@ public class InferenceResultCoreService {
*/
public Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq) {
return inferenceResultRepository.getInferenceResultList(searchReq);
return mapSheetAnalDataRepository.getInferenceResultList(searchReq);
}
/**
@@ -42,7 +42,7 @@ public class InferenceResultCoreService {
*/
public InferenceResultDto.AnalResSummary getInferenceResultSummary(Long id) {
InferenceResultDto.AnalResSummary summary =
inferenceResultRepository
mapSheetAnalDataRepository
.getInferenceResultSummary(id)
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
return summary;
@@ -55,7 +55,7 @@ public class InferenceResultCoreService {
* @return
*/
public List<Dashboard> getDashboard(Long id) {
return inferenceResultRepository.getDashboard(id);
return mapSheetAnalDataRepository.getDashboard(id);
}
/**
@@ -66,7 +66,7 @@ public class InferenceResultCoreService {
*/
public Page<InferenceResultDto.Geom> getInferenceResultGeomList(
Long id, InferenceResultDto.SearchGeoReq searchGeoReq) {
return inferenceResultRepository.getInferenceGeomList(id, searchGeoReq);
return mapSheetAnalDataRepository.getInferenceGeomList(id, searchGeoReq);
}
/**
@@ -80,13 +80,13 @@ public class InferenceResultCoreService {
@NotNull Long analyId, InferenceResultDto.SearchGeoReq searchReq) {
// 분석 ID 에 해당하는 dataids를 가져온다.
List<Long> dataIds =
inferenceResultRepository.listAnalyGeom(analyId).stream()
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataEntity::getId)
.boxed()
.toList();
// 해당데이터의 폴리곤데이터를 가져온다
Page<MapSheetAnalDataGeomEntity> mapSheetAnalDataGeomEntities =
inferenceResultRepository.listInferenceResultWithGeom(dataIds, searchReq);
mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq);
return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataGeomEntity::toEntity);
}
@@ -97,13 +97,13 @@ public class InferenceResultCoreService {
* @return
*/
public List<Long> getSheets(Long id) {
return inferenceResultRepository.getSheets(id);
return mapSheetAnalDataRepository.getSheets(id);
}
@Transactional(readOnly = true)
public List<MapSheet> listGetScenes5k(Long analyId) {
List<String> sceneCodes =
inferenceResultRepository.listAnalyGeom(analyId).stream()
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataEntity::getMapSheetNum)
.mapToObj(String::valueOf)
.toList();

View File

@@ -0,0 +1,76 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@RequiredArgsConstructor
public class InferenceResultShpCoreService {
private final InferenceResultRepository repo;
/**
* inference_results 기준으로 - tb_map_sheet_anal_data_inference -
* tb_map_sheet_anal_data_inference_geom 테이블을 최신 상태로 구성한다.
*/
@Transactional
public InferenceResultShpDto.InferenceCntDto buildInferenceData() {
int sheetAnalDataCnt = repo.upsertGroupsFromMapSheetAnal();
int inferenceCnt = repo.upsertGroupsFromInferenceResults();
int inferenceGeomCnt = repo.upsertGeomsFromInferenceResults();
InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto();
cntDto.setSheetAnalDataCnt(sheetAnalDataCnt);
cntDto.setInferenceCnt(inferenceCnt);
cntDto.setInferenceGeomCnt(inferenceGeomCnt);
return cntDto;
}
/** 파일 생성이 완료되지 않은 분석 데이터(data_uid) 목록을 조회한다. */
@Transactional(readOnly = true)
public List<Long> findPendingDataUids(int limit) {
return repo.findPendingDataUids(limit);
}
/**
* 분석 데이터 재생성을 위해 기존 파일 생성 상태를 초기화한다.
*
* <p>- 분석 데이터(file_created_yn)를 미생성 상태로 변경 - 해당 분석 데이터에 속한 모든 도형의 생성 상태를 미생성으로 변경
*/
@Transactional
public void resetForRegenerate(Long dataUid) {
repo.resetInferenceCreated(dataUid);
repo.resetGeomCreatedByDataUid(dataUid);
}
/**
* 지정된 분석 데이터에 속한 도형 정보를 조회한다.
*
* <p>- 파일 미생성 상태의 도형만 대상 - geometry가 존재하는 도형만 조회
*/
@Transactional(readOnly = true)
public List<InferenceResultShpDto.Basic> loadGeomDtos(Long dataUid, int limit) {
List<MapSheetAnalDataInferenceGeomEntity> entities =
repo.findGeomEntitiesByDataUid(dataUid, limit);
return entities.stream().map(InferenceResultShpDto.Basic::from).toList();
}
/**
* 파일 생성이 성공한 도형 및 분석 데이터에 대해 생성 완료 상태로 갱신한다.
*
* @param dataUid 분석 데이터 UID
* @param geoUids 파일 생성이 완료된 도형 UID 목록
*/
@Transactional
public void markSuccess(Long dataUid, List<Long> geoUids) {
repo.markGeomCreatedByGeoUids(geoUids);
repo.markInferenceCreated(dataUid);
}
}

View File

@@ -0,0 +1,66 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto.ApiResponseCode;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto.ResponseObj;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx50kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx50kRepository;
import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto.UseInferReq;
import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.Valid;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.locationtech.jts.geom.Polygon;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class MapInkxMngCoreService {
private final MapInkx5kRepository mapInkx5kRepository;
private final MapInkx50kRepository mapInkx50kRepository;
// 목록
public Page<MapInkxMngDto.MapList> findMapInkxMngList(
MapInkxMngDto.searchReq searchReq, String useInference, String searchVal) {
return mapInkx5kRepository.findMapInkxMngList(searchReq, useInference, searchVal);
}
// 저장
public ResponseObj saveMapInkx5k(MapInkxMngDto.AddMapReq req, Polygon map_polygon) {
Long existsCount = mapInkx5kRepository.findByMapidCdNoExists(req.getMapidcdNo());
if (existsCount > 0) {
return new ResponseObj(ApiResponseCode.DUPLICATE_DATA, "이미 등록된 도엽코드 입니다.");
}
MapInkx50kEntity mapInkx50k = mapInkx50kRepository.findByMapidCdParentNo(req.getMapidcdNo());
if (mapInkx50k == null) {
return new ResponseObj(ApiResponseCode.NOT_FOUND_DATA, "1:50,000 도엽의 정보가 없습니다. 관리자에게 문의하세요.");
}
MapInkx5kEntity entity =
new MapInkx5kEntity(
req.getMapidcdNo(), req.getMapidNm(), map_polygon, mapInkx50k, "USE" // 기본은 USE로
);
mapInkx5kRepository.save(entity);
return new ResponseObj(ApiResponseCode.OK, "");
}
public ResponseObj updateUseInference(@Valid UseInferReq useInferReq) {
Optional<MapInkx5kEntity> entity =
Optional.ofNullable(
mapInkx5kRepository
.findByMapidCdNoInfo(useInferReq.getMapidcdNo())
.orElseThrow(() -> new EntityNotFoundException("도엽정보를 찾을 수 없습니다.")));
entity.get().updateUseInference(useInferReq.getUseInference());
return new ResponseObj(ApiResponseCode.OK, "");
}
}

View File

@@ -8,16 +8,9 @@ import com.kamco.cd.kamcoback.postgres.entity.YearEntity;
import com.kamco.cd.kamcoback.postgres.repository.mapsheet.MapSheetMngRepository;
import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.Valid;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
@@ -30,8 +23,6 @@ public class MapSheetMngCoreService {
private final MapSheetMngRepository mapSheetMngRepository;
private static final String ORIGINAL_IMAGES_PATH = "/app/original-images";
@Value("{spring.profiles.active}")
private String activeEnv;
@@ -67,10 +58,13 @@ public class MapSheetMngCoreService {
mapSheetMngRepository.deleteByHstUidMngFile(hstUid);
}
public int findByYearFileNameFileCount(int mngYyyy, String fileName) {
return mapSheetMngRepository.findByYearFileNameFileCount(mngYyyy, fileName);
}
public MapSheetMngDto.DmlReturn mngFileSave(@Valid MapSheetMngDto.MngFileAddReq addReq) {
mapSheetMngRepository.mngFileSave(addReq);
// int hstCnt = mapSheetMngRepository.insertMapSheetOrgDataToMapSheetMngHst(saved.getMngYyyy());
return new MapSheetMngDto.DmlReturn("success", "파일정보저장되었습니다.");
}
@@ -88,102 +82,16 @@ public class MapSheetMngCoreService {
return mapSheetMngRepository.findMapSheetError(hstUid);
}
public List<MapSheetMngDto.MngFilesDto> findIdToMapSheetFileList(Long hstUid) {
return mapSheetMngRepository.findIdToMapSheetFileList(hstUid);
public List<MapSheetMngDto.MngFilesDto> findByHstUidMapSheetFileList(Long hstUid) {
return mapSheetMngRepository.findByHstUidMapSheetFileList(hstUid);
}
public MapSheetMngDto.MngFilesDto findYyyyToMapSheetFilePathRefer(int mngYyyy) {
return mapSheetMngRepository.findYyyyToMapSheetFilePathRefer(mngYyyy);
}
public MapSheetMngDto.MngFilesDto findIdToMapSheetFile(Long fileUid) {
return mapSheetMngRepository.findIdToMapSheetFile(fileUid);
}
public MapSheetMngDto.DmlReturn uploadProcess(@Valid List<Long> hstUidList) {
int count = 0;
if (!Objects.isNull(hstUidList) && !hstUidList.isEmpty()) {
for (Long hstUid : hstUidList) {
Optional<MapSheetMngHstEntity> entity =
Optional.ofNullable(
mapSheetMngRepository
.findMapSheetMngHstInfo(hstUid)
.orElseThrow(EntityNotFoundException::new));
String localPath = "";
String rootDir = ORIGINAL_IMAGES_PATH + "/" + entity.get().getMngYyyy();
if (activeEnv.equals("local")) {
rootDir = localPath + rootDir;
}
String filename = entity.get().getMapSheetNum();
String[] extensions = {"tif", "tfw"};
boolean flag = allExtensionsExist(rootDir, filename, extensions);
if (flag) {
count += 1;
}
// 파일 크기 계산 및 저장
try (Stream<Path> paths = Files.walk(Paths.get(rootDir))) {
List<Path> matched =
paths
.filter(Files::isRegularFile)
.filter(
p -> {
String name = p.getFileName().toString();
return name.equals(filename + ".tif") || name.equals(filename + ".tfw");
})
.collect(Collectors.toList());
long tifSize =
matched.stream()
.filter(p -> p.getFileName().toString().endsWith(".tif"))
.mapToLong(
p -> {
try {
return Files.size(p);
} catch (IOException e) {
return 0L;
}
})
.sum();
long tfwSize =
matched.stream()
.filter(p -> p.getFileName().toString().endsWith(".tfw"))
.mapToLong(
p -> {
try {
return Files.size(p);
} catch (IOException e) {
return 0L;
}
})
.sum();
MapSheetMngHstEntity mapSheetMngHst = entity.get();
mapSheetMngHst.updateFileInfos(tifSize, tfwSize);
// 엔터티 저장 -> 커스텀 업데이트로 변경
mapSheetMngRepository.updateHstFileSizes(
entity.get().getHstUid(), tifSize, tfwSize, tifSize + tfwSize);
} catch (IOException e) {
// 크기 계산 실패 시 0으로 저장
MapSheetMngHstEntity mapSheetMng = entity.get();
mapSheetMng.updateFileInfos(0L, 0L);
mapSheetMngRepository.updateHstFileSizes(entity.get().getHstUid(), 0L, 0L, 0L);
}
/*
MapSheetMngDto.DataState dataState =
flag ? MapSheetMngDto.DataState.SUCCESS : MapSheetMngDto.DataState.FAIL;
entity.get().updateDataState(dataState);
*/
}
}
return new MapSheetMngDto.DmlReturn("success", count + "개 업로드 성공하였습니다.");
public MapSheetMngDto.MngFilesDto findByFileUidMapSheetFile(Long fileUid) {
return mapSheetMngRepository.findByFileUidMapSheetFile(fileUid);
}
public MapSheetMngDto.DmlReturn updateExceptUseInference(@Valid List<Long> hstUidList) {
@@ -201,40 +109,7 @@ public class MapSheetMngCoreService {
return new MapSheetMngDto.DmlReturn("success", hstUidList.size() + "개 추론제외 업데이트 하였습니다.");
}
/**
* 특정 파일명 + 여러 확장자가 모두 존재하는지 확인
*
* @param rootDir 검색할 최상위 디렉토리
* @param filename 파일명 (확장자 제외)
* @param extensions 확인할 확장자 배열 (예: {"tif", "tfw"})
* @return 모든 확장자가 존재하면 true, 하나라도 없으면 false
*/
public static boolean allExtensionsExist(String rootDir, String filename, String... extensions) {
try (Stream<Path> paths = Files.walk(Paths.get(rootDir))) {
// 모든 파일명을 Set으로 저장
Set<String> fileNames =
paths
.filter(Files::isRegularFile)
.map(p -> p.getFileName().toString())
.collect(Collectors.toSet());
// 모든 확장자 파일 존재 여부 확인
for (String ext : extensions) {
String target = filename + "." + ext;
if (!fileNames.contains(target)) {
return false; // 하나라도 없으면 false
}
}
return true; // 모두 존재하면 true
} catch (IOException e) {
throw new RuntimeException("File search error", e);
}
}
public MapSheetMngDto.DmlReturn mngDataSave(@Valid MapSheetMngDto.AddReq addReq) {
public int mngDataSave(@Valid MapSheetMngDto.AddReq addReq) {
MapSheetMngEntity entity = new MapSheetMngEntity();
entity.setMngYyyy(addReq.getMngYyyy());
@@ -243,50 +118,31 @@ public class MapSheetMngCoreService {
mapSheetMngRepository.deleteByMngYyyyMngAll(addReq.getMngYyyy());
MapSheetMngEntity saved = mapSheetMngRepository.save(entity);
int hstCnt = mapSheetMngRepository.insertMapSheetOrgDataToMapSheetMngHst(saved.getMngYyyy());
int hstCnt =
mapSheetMngRepository.insertMapSheetOrgDataToMapSheetMngHst(
saved.getMngYyyy(), saved.getMngPath());
mapSheetMngRepository.updateYearState(saved.getMngYyyy(), "DONE");
return new MapSheetMngDto.DmlReturn("success", saved.getMngYyyy().toString());
return hstCnt;
}
public List<MapSheetMngDto.MngFilesDto> findHstUidToMapSheetFileList(Long hstUid) {
return mapSheetMngRepository.findHstUidToMapSheetFileList(hstUid);
}
public MapSheetMngDto.DmlReturn deleteByFileUidMngFile(Long fileUid) {
public void deleteByFileUidMngFile(Long fileUid) {
mapSheetMngRepository.deleteByFileUidMngFile(fileUid);
return new MapSheetMngDto.DmlReturn("success", fileUid + " : 삭제되었습니다.");
}
public MapSheetMngDto.DmlReturn updateByHstUidSyncCheckState(Long hstUid) {
MapSheetMngDto.SyncCheckStateReqUpdateDto reqDto =
new MapSheetMngDto.SyncCheckStateReqUpdateDto();
reqDto.setHstUid(hstUid);
List<MapSheetMngDto.MngFilesDto> filesDto =
mapSheetMngRepository.findHstUidToMapSheetFileList(hstUid);
for (MapSheetMngDto.MngFilesDto dto : filesDto) {
if (dto.getFileExt().equals("tif")) {
reqDto.setSyncCheckTifFileName(dto.getFileName());
} else if (dto.getFileExt().equals("tfw")) {
reqDto.setSyncCheckTfwFileName(dto.getFileName());
}
reqDto.setFilePath(dto.getFilePath());
}
String fileState = "DONE";
if (filesDto.size() > 2) {
fileState = "DONE";
}
reqDto.setSyncCheckState(fileState);
public void updateByHstUidSyncCheckState(MapSheetMngDto.SyncCheckStateReqUpdateDto reqDto) {
mapSheetMngRepository.updateMapSheetMngHstSyncCheckState(reqDto);
mapSheetMngRepository.updateByHstUidMngFileState(hstUid, fileState);
}
return new MapSheetMngDto.DmlReturn("success", hstUid + " : 상태변경되었습니다.");
public void updateByFileUidFileState(Long fileUid, String fileState) {
mapSheetMngRepository.updateByFileUidMngFileState(fileUid, fileState);
}
public void deleteByNotInFileUidMngFile(Long hstUid, List<Long> fileUids) {
mapSheetMngRepository.deleteByNotInFileUidMngFile(hstUid, fileUids);
}
}

View File

@@ -1,6 +1,5 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.auth.BCryptSaltGenerator;
import com.kamco.cd.kamcoback.common.enums.StatusType;
import com.kamco.cd.kamcoback.common.enums.error.AuthErrorCode;
import com.kamco.cd.kamcoback.common.exception.CustomApiException;
@@ -43,10 +42,8 @@ public class MembersCoreService {
throw new DuplicateMemberException(Field.EMPLOYEE_NO, addReq.getEmployeeNo());
}
// salt 생성, 사번이 salt
String salt = BCryptSaltGenerator.generateSaltWithEmployeeNo(addReq.getEmployeeNo().trim());
// 패스워드 암호화, 초기 패스워드 고정
String hashedPassword = BCrypt.hashpw(addReq.getPassword(), salt);
String hashedPassword = CommonStringUtils.hashPassword(addReq.getPassword());
MemberEntity memberEntity = new MemberEntity();
memberEntity.setUserId(addReq.getEmployeeNo());
@@ -85,8 +82,7 @@ public class MembersCoreService {
throw new CustomApiException("WRONG_PASSWORD", HttpStatus.BAD_REQUEST);
}
String password =
CommonStringUtils.hashPassword(updateReq.getPassword(), memberEntity.getEmployeeNo());
String password = CommonStringUtils.hashPassword(updateReq.getPassword());
memberEntity.setStatus(StatusType.PENDING.getId());
memberEntity.setLoginFailCount(0);
@@ -111,8 +107,7 @@ public class MembersCoreService {
throw new CustomApiException(AuthErrorCode.LOGIN_PASSWORD_MISMATCH);
}
String password =
CommonStringUtils.hashPassword(initReq.getNewPassword(), memberEntity.getEmployeeNo());
String password = CommonStringUtils.hashPassword(initReq.getNewPassword());
memberEntity.setPassword(password);
memberEntity.setStatus(StatusType.ACTIVE.getId());

View File

@@ -0,0 +1,79 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
import org.locationtech.jts.geom.Geometry;
@Getter
@Setter
@Entity
@Table(name = "inference_results")
public class InferenceResultEntity {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "uid", nullable = false)
private Long id;
@NotNull
@ColumnDefault("uuid_generate_v4()")
@Column(name = "uuid", nullable = false)
private UUID uuid;
@Column(name = "stage")
private Integer stage;
@Column(name = "cd_prob")
private Float cdProb;
@Column(name = "input1")
private Integer input1;
@Column(name = "input2")
private Integer input2;
@Column(name = "map_id")
private Long mapId;
@Size(max = 20)
@Column(name = "before_class", length = 20)
private String beforeClass;
@Column(name = "before_probability")
private Float beforeProbability;
@Size(max = 20)
@Column(name = "after_class", length = 20)
private String afterClass;
@Column(name = "after_probability")
private Float afterProbability;
@ColumnDefault("st_area(geometry)")
@Column(name = "area")
private Float area;
@NotNull
@ColumnDefault("now()")
@Column(name = "created_dttm", nullable = false)
private ZonedDateTime createdDttm;
@NotNull
@ColumnDefault("now()")
@Column(name = "updated_dttm", nullable = false)
private ZonedDateTime updatedDttm;
@Column(name = "geometry", columnDefinition = "geometry not null")
private Geometry geometry;
}

View File

@@ -1,7 +1,15 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.*;
import com.kamco.cd.kamcoback.postgres.CommonDateEntity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.locationtech.jts.geom.Geometry;
@@ -9,7 +17,9 @@ import org.locationtech.jts.geom.Geometry;
@Setter
@Table(name = "tb_map_inkx_50k")
@Entity
public class MapInkx50kEntity {
@NoArgsConstructor
public class MapInkx50kEntity extends CommonDateEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_inkx_50k_fid_seq_gen")
@SequenceGenerator(
@@ -29,4 +39,11 @@ public class MapInkx50kEntity {
@Column(name = "geom")
private Geometry geom;
public MapInkx50kEntity(String mapidcdNo, String mapidNm, String mapidNo, Geometry geom) {
this.mapidcdNo = mapidcdNo;
this.mapidNm = mapidNm;
this.mapidNo = mapidNo;
this.geom = geom;
}
}

View File

@@ -2,14 +2,19 @@ package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet;
import com.kamco.cd.kamcoback.postgres.CommonDateEntity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.locationtech.jts.geom.Geometry;
@@ -17,7 +22,8 @@ import org.locationtech.jts.geom.Geometry;
@Setter
@Table(name = "tb_map_inkx_5k")
@Entity
public class MapInkx5kEntity {
@NoArgsConstructor
public class MapInkx5kEntity extends CommonDateEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_inkx_5k_fid_seq_gen")
@@ -36,8 +42,29 @@ public class MapInkx5kEntity {
@Column(name = "geom")
private Geometry geom;
@Column(name = "fid_k50")
private Long fidK50;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "fid_k50", referencedColumnName = "fid")
private MapInkx50kEntity mapInkx50k;
@Column(name = "use_inference")
private String useInference;
public MapInkx5kEntity(
String mapidcdNo,
String mapidNm,
Geometry geom,
MapInkx50kEntity mapInkx50k,
String useInference) {
this.mapidcdNo = mapidcdNo;
this.mapidNm = mapidNm;
this.geom = geom;
this.mapInkx50k = mapInkx50k;
this.useInference = useInference;
}
public void updateUseInference(String useInference) {
this.useInference = useInference;
}
public InferenceResultDto.MapSheet toEntity() {
return new MapSheet(mapidcdNo, mapidNm);

View File

@@ -0,0 +1,167 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@Getter
@Setter
@Entity
@Table(name = "tb_map_sheet_anal_data_inference")
public class MapSheetAnalDataInferenceEntity {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "data_uid", nullable = false)
private Long id;
@Size(max = 128)
@Column(name = "data_name", length = 128)
private String dataName;
@Size(max = 255)
@Column(name = "data_path")
private String dataPath;
@Size(max = 128)
@Column(name = "data_type", length = 128)
private String dataType;
@Size(max = 128)
@Column(name = "data_crs_type", length = 128)
private String dataCrsType;
@Size(max = 255)
@Column(name = "data_crs_type_name")
private String dataCrsTypeName;
@ColumnDefault("now()")
@Column(name = "created_dttm")
private ZonedDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
@ColumnDefault("now()")
@Column(name = "updated_dttm")
private ZonedDateTime updatedDttm;
@Column(name = "updated_uid")
private Long updatedUid;
@Column(name = "compare_yyyy")
private Integer compareYyyy;
@Column(name = "target_yyyy")
private Integer targetYyyy;
@Column(name = "data_json", length = Integer.MAX_VALUE)
private String dataJson;
@Size(max = 20)
@ColumnDefault("'0'")
@Column(name = "data_state", length = 20)
private String dataState;
@ColumnDefault("now()")
@Column(name = "data_state_dttm")
private ZonedDateTime dataStateDttm;
@Column(name = "anal_strt_dttm")
private ZonedDateTime analStrtDttm;
@Column(name = "anal_end_dttm")
private ZonedDateTime analEndDttm;
@ColumnDefault("0")
@Column(name = "anal_sec")
private Long analSec;
@Size(max = 20)
@Column(name = "anal_state", length = 20)
private String analState;
@Column(name = "anal_uid")
private Long analUid;
@Column(name = "map_sheet_num")
private Long mapSheetNum;
@ColumnDefault("0")
@Column(name = "detecting_cnt")
private Long detectingCnt;
@ColumnDefault("0")
@Column(name = "pnu")
private Long pnu;
@Size(max = 20)
@Column(name = "down_state", length = 20)
private String downState;
@Column(name = "down_state_dttm")
private ZonedDateTime downStateDttm;
@Size(max = 20)
@Column(name = "fit_state", length = 20)
private String fitState;
@Column(name = "fit_state_dttm")
private ZonedDateTime fitStateDttm;
@Column(name = "labeler_uid")
private Long labelerUid;
@Size(max = 20)
@ColumnDefault("NULL")
@Column(name = "label_state", length = 20)
private String labelState;
@Column(name = "label_state_dttm")
private ZonedDateTime labelStateDttm;
@Column(name = "tester_uid")
private Long testerUid;
@Size(max = 20)
@Column(name = "test_state", length = 20)
private String testState;
@Column(name = "test_state_dttm")
private ZonedDateTime testStateDttm;
@Column(name = "fit_state_cmmnt", length = Integer.MAX_VALUE)
private String fitStateCmmnt;
@Column(name = "ref_map_sheet_num")
private Long refMapSheetNum;
@Column(name = "stage")
private Integer stage;
@Column(name = "file_created_yn")
private Boolean fileCreatedYn;
@Size(max = 100)
@Column(name = "m1", length = 100)
private String m1;
@Size(max = 100)
@Column(name = "m2", length = 100)
private String m2;
@Size(max = 100)
@Column(name = "m3", length = 100)
private String m3;
@Column(name = "file_created_dttm")
private ZonedDateTime fileCreatedDttm;
}

View File

@@ -0,0 +1,149 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
import org.locationtech.jts.geom.Geometry;
@Getter
@Setter
@Entity
@Table(name = "tb_map_sheet_anal_data_inference_geom")
public class MapSheetAnalDataInferenceGeomEntity {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "geo_uid")
private Long geoUid;
@Column(name = "cd_prob")
private Float cdProb;
@Size(max = 40)
@Column(name = "class_before_cd", length = 40)
private String classBeforeCd;
@Column(name = "class_before_prob")
private Float classBeforeProb;
@Size(max = 40)
@Column(name = "class_after_cd", length = 40)
private String classAfterCd;
@Column(name = "class_after_prob")
private Float classAfterProb;
@Column(name = "map_sheet_num")
private Long mapSheetNum;
@Column(name = "compare_yyyy")
private Integer compareYyyy;
@Column(name = "target_yyyy")
private Integer targetYyyy;
@Column(name = "area")
private Float area;
@Size(max = 100)
@Column(name = "geo_type", length = 100)
private String geoType;
@Column(name = "data_uid")
private Long dataUid;
@ColumnDefault("now()")
@Column(name = "created_dttm")
private ZonedDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
@ColumnDefault("now()")
@Column(name = "updated_dttm")
private ZonedDateTime updatedDttm;
@Column(name = "updated_uid")
private Long updatedUid;
@ColumnDefault("0")
@Column(name = "geom_cnt")
private Long geomCnt;
@ColumnDefault("0")
@Column(name = "pnu")
private Long pnu;
@Size(max = 20)
@ColumnDefault("'0'")
@Column(name = "fit_state", length = 20)
private String fitState;
@ColumnDefault("now()")
@Column(name = "fit_state_dttm")
private ZonedDateTime fitStateDttm;
@Column(name = "labeler_uid")
private Long labelerUid;
@Size(max = 20)
@ColumnDefault("'0'")
@Column(name = "label_state", length = 20)
private String labelState;
@ColumnDefault("now()")
@Column(name = "label_state_dttm")
private ZonedDateTime labelStateDttm;
@Column(name = "tester_uid")
private Long testerUid;
@Size(max = 20)
@ColumnDefault("'0'")
@Column(name = "test_state", length = 20)
private String testState;
@ColumnDefault("now()")
@Column(name = "test_state_dttm")
private ZonedDateTime testStateDttm;
@Column(name = "fit_state_cmmnt", length = Integer.MAX_VALUE)
private String fitStateCmmnt;
@Column(name = "ref_map_sheet_num")
private Long refMapSheetNum;
@ColumnDefault("uuid_generate_v4()")
@Column(name = "uuid")
private UUID uuid;
@Column(name = "stage")
private Integer stage;
@Column(name = "map_5k_id")
private Long map5kId;
@Column(name = "file_created_yn")
private Boolean fileCreatedYn;
@Column(name = "geom", columnDefinition = "geometry")
private Geometry geom;
@Column(name = "geom_center", columnDefinition = "geometry")
private Geometry geomCenter;
@Column(name = "before_geom", columnDefinition = "geometry")
private Geometry beforeGeom;
@Column(name = "file_created_dttm")
private ZonedDateTime fileCreatedDttm;
}

View File

@@ -10,6 +10,7 @@ import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@Getter
@Setter
@@ -54,4 +55,9 @@ public class MapSheetMngFileEntity {
@Size(max = 20)
@Column(name = "file_state", length = 20)
private String fileState;
@NotNull
@ColumnDefault("false")
@Column(name = "file_del", nullable = false)
private Boolean fileDel = false;
}

View File

@@ -1,7 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface InferenceResultRepository
extends JpaRepository<MapSheetAnalEntity, Long>, InferenceResultRepositoryCustom {}
extends JpaRepository<com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity, Long>,
InferenceResultRepositoryCustom {}

View File

@@ -1,31 +1,25 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import jakarta.validation.constraints.NotNull;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import java.util.List;
import java.util.Optional;
import org.springframework.data.domain.Page;
public interface InferenceResultRepositoryCustom {
Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq);
int upsertGroupsFromMapSheetAnal();
Optional<InferenceResultDto.AnalResSummary> getInferenceResultSummary(Long id);
int upsertGroupsFromInferenceResults();
Page<InferenceResultDto.Geom> getInferenceGeomList(
Long id, InferenceResultDto.SearchGeoReq searchGeoReq);
int upsertGeomsFromInferenceResults();
Page<MapSheetAnalDataGeomEntity> listInferenceResultWithGeom(
List<Long> dataIds, SearchGeoReq searchReq);
List<Long> findPendingDataUids(int limit);
List<Long> getSheets(Long id);
int resetInferenceCreated(Long dataUid);
List<Dashboard> getDashboard(Long id);
int markInferenceCreated(Long dataUid);
List<MapSheetAnalDataEntity> listAnalyGeom(@NotNull Long id);
int resetGeomCreatedByDataUid(Long dataUid);
int markGeomCreatedByGeoUids(List<Long> geoUids);
List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(Long dataUid, int limit);
}

View File

@@ -1,27 +1,14 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.*;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Order;
import com.querydsl.core.types.OrderSpecifier;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.JPQLQuery;
import com.querydsl.jpa.impl.JPAQuery;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.ArrayList;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Repository;
@Repository
@@ -29,335 +16,296 @@ import org.springframework.stereotype.Repository;
public class InferenceResultRepositoryImpl implements InferenceResultRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final QModelMngBakEntity tmm = QModelMngBakEntity.modelMngBakEntity;
private final QModelVerEntity tmv = QModelVerEntity.modelVerEntity;
private final QMapSheetAnalEntity mapSheetAnalEntity = QMapSheetAnalEntity.mapSheetAnalEntity;
private final QMapSheetAnalDataEntity mapSheetAnalDataEntity =
QMapSheetAnalDataEntity.mapSheetAnalDataEntity;
private final QMapSheetAnalDataGeomEntity mapSheetAnalDataGeomEntity =
QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
private final QMapSheetAnalSttcEntity mapSheetAnalSttcEntity =
QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
/**
* 분석결과 목록 조회
*
* @param searchReq
* @return
*/
@PersistenceContext private final EntityManager em;
/** tb_map_sheet_anal_data_inference */
private final QMapSheetAnalDataInferenceEntity inferenceEntity =
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
/** tb_map_sheet_anal_data_inference_geom */
private final QMapSheetAnalDataInferenceGeomEntity inferenceGeomEntity =
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
// ===============================
// Upsert (Native only)
// ===============================
@Override
public Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq) {
Pageable pageable = searchReq.toPageable();
// "0000" 전체조회
BooleanBuilder builder = new BooleanBuilder();
if (searchReq.getStatCode() != null && !"0000".equals(searchReq.getStatCode())) {
builder.and(mapSheetAnalEntity.analState.eq(searchReq.getStatCode()));
}
public int upsertGroupsFromMapSheetAnal() {
String sql =
"""
INSERT INTO tb_map_sheet_anal_inference (
compare_yyyy,
target_yyyy,
anal_map_sheet,
stage,
anal_title
)
SELECT
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS anal_map_sheet,
r.stage,
CONCAT(r.stage ,'_', r.input1 ,'_', r.input2 ,'_', r.map_id) as anal_title
FROM inference_results r
GROUP BY r.stage, r.input1, r.input2, r.map_id
ON CONFLICT (compare_yyyy, target_yyyy, anal_map_sheet, stage)
DO UPDATE SET
updated_dttm = now()
""";
// 제목
if (searchReq.getTitle() != null) {
builder.and(mapSheetAnalEntity.analTitle.like("%" + searchReq.getTitle() + "%"));
}
List<InferenceResultDto.AnalResList> content =
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResList.class,
mapSheetAnalEntity.id,
mapSheetAnalEntity.analTitle,
mapSheetAnalEntity.analMapSheet,
mapSheetAnalEntity.detectingCnt,
mapSheetAnalEntity.analStrtDttm,
mapSheetAnalEntity.analEndDttm,
mapSheetAnalEntity.analSec,
mapSheetAnalEntity.analPredSec,
mapSheetAnalEntity.analState,
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState),
mapSheetAnalEntity.gukyuinUsed))
.from(mapSheetAnalEntity)
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(mapSheetAnalEntity.id.desc())
.fetch();
long total =
queryFactory
.select(mapSheetAnalEntity.id)
.from(mapSheetAnalEntity)
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total);
return em.createNativeQuery(sql).executeUpdate();
}
/**
* 분석결과 요약정보
* inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를
* 생성/갱신한다.
*
* @param id
* @return
* <p>- 최초 생성 시 file_created_yn = false - detecting_cnt는 inference_results 건수 기준
*
* @return 반영된 행 수
*/
@Override
public Optional<InferenceResultDto.AnalResSummary> getInferenceResultSummary(Long id) {
public int upsertGroupsFromInferenceResults() {
// 1. 최신 버전 UID를 가져오는 서브쿼리
JPQLQuery<Long> latestVerUidSub =
JPAExpressions.select(tmv.id.max()).from(tmv).where(tmv.modelUid.eq(tmm.id));
String sql =
"""
INSERT INTO tb_map_sheet_anal_data_inference (
stage,
compare_yyyy,
target_yyyy,
map_sheet_num,
created_dttm,
updated_dttm,
file_created_yn,
detecting_cnt
)
SELECT
r.stage,
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS map_sheet_num,
now() AS created_dttm,
now() AS updated_dttm,
false AS file_created_yn,
count(*) AS detecting_cnt
FROM inference_results r
GROUP BY r.stage, r.input1, r.input2, r.map_id
ON CONFLICT (stage, compare_yyyy, target_yyyy, map_sheet_num)
DO UPDATE SET
updated_dttm = now(),
detecting_cnt = EXCLUDED.detecting_cnt
""";
Optional<InferenceResultDto.AnalResSummary> content =
Optional.ofNullable(
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResSummary.class,
mapSheetAnalEntity.id,
mapSheetAnalEntity.analTitle,
tmm.modelNm.concat(" ").concat(tmv.modelVer).as("modelInfo"),
mapSheetAnalEntity.targetYyyy,
mapSheetAnalEntity.compareYyyy,
mapSheetAnalEntity.analMapSheet,
mapSheetAnalEntity.analStrtDttm,
mapSheetAnalEntity.analEndDttm,
mapSheetAnalEntity.analSec,
mapSheetAnalEntity.analPredSec,
mapSheetAnalEntity.resultUrl,
mapSheetAnalEntity.detectingCnt,
mapSheetAnalEntity.accuracy,
mapSheetAnalEntity.analState,
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState)))
.from(mapSheetAnalEntity)
.leftJoin(tmm)
.on(mapSheetAnalEntity.modelUid.eq(tmm.id))
.leftJoin(tmv)
.on(tmv.modelUid.eq(tmm.id).and(tmv.id.eq(latestVerUidSub)))
.where(mapSheetAnalEntity.id.eq(id))
.fetchOne());
return content;
return em.createNativeQuery(sql).executeUpdate();
}
/**
* 분석결과 상세 class name별 탐지 개수
* inference_results 테이블을 기준으로 도형 단위(uuid) 분석 결과를 생성/갱신한다.
*
* @param id
* @return
* <p>- uuid 기준 중복 제거(DISTINCT ON) - 최신 updated_dttm 우선 - geometry는 WKB / WKT 모두 처리 - 최초 생성 시
* file_created_yn = false
*
* @return 반영된 행 수
*/
@Override
public List<Dashboard> getDashboard(Long id) {
public int upsertGeomsFromInferenceResults() {
String sql =
"""
INSERT INTO tb_map_sheet_anal_data_inference_geom (
uuid, stage, cd_prob, compare_yyyy, target_yyyy, map_sheet_num,
class_before_cd, class_before_prob, class_after_cd, class_after_prob,
geom, area, data_uid, created_dttm, updated_dttm,
file_created_yn
)
SELECT
x.uuid, x.stage, x.cd_prob, x.compare_yyyy, x.target_yyyy, x.map_sheet_num,
x.class_before_cd, x.class_before_prob, x.class_after_cd, x.class_after_prob,
x.geom, x.area, x.data_uid, x.created_dttm, x.updated_dttm,
false AS file_created_yn
FROM (
SELECT DISTINCT ON (r.uuid)
r.uuid,
r.stage,
r.cd_prob,
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS map_sheet_num,
r.before_class AS class_before_cd,
r.before_probability AS class_before_prob,
r.after_class AS class_after_cd,
r.after_probability AS class_after_prob,
CASE
WHEN r.geometry IS NULL THEN NULL
WHEN left(r.geometry, 2) = '01'
THEN ST_SetSRID(ST_GeomFromWKB(decode(r.geometry, 'hex')), 5186)
ELSE ST_GeomFromText(r.geometry, 5186)
END AS geom,
r.area,
di.data_uid,
r.created_dttm,
r.updated_dttm
FROM inference_results r
JOIN tb_map_sheet_anal_data_inference di
ON di.stage = r.stage
AND di.compare_yyyy = r.input1
AND di.target_yyyy = r.input2
AND di.map_sheet_num = r.map_id
ORDER BY r.uuid, r.updated_dttm DESC NULLS LAST, r.uid DESC
) x
ON CONFLICT (uuid)
DO UPDATE SET
stage = EXCLUDED.stage,
cd_prob = EXCLUDED.cd_prob,
compare_yyyy = EXCLUDED.compare_yyyy,
target_yyyy = EXCLUDED.target_yyyy,
map_sheet_num = EXCLUDED.map_sheet_num,
class_before_cd = EXCLUDED.class_before_cd,
class_before_prob = EXCLUDED.class_before_prob,
class_after_cd = EXCLUDED.class_after_cd,
class_after_prob = EXCLUDED.class_after_prob,
geom = EXCLUDED.geom,
area = EXCLUDED.area,
data_uid = EXCLUDED.data_uid,
updated_dttm = now()
""";
return em.createNativeQuery(sql).executeUpdate();
}
// ===============================
// Jobs
// ===============================
/**
* 파일 생성이 완료되지 않은 분석 데이터(data_uid) 목록을 조회한다.
*
* @param limit 최대 조회 건수
* @return data_uid 목록
*/
@Override
public List<Long> findPendingDataUids(int limit) {
return queryFactory
.select(
Projections.constructor(
Dashboard.class,
mapSheetAnalSttcEntity.id.classAfterCd,
mapSheetAnalSttcEntity.classAfterCnt.sum()))
.from(mapSheetAnalSttcEntity)
.where(mapSheetAnalSttcEntity.id.analUid.eq(id))
.groupBy(mapSheetAnalSttcEntity.id.classAfterCd)
.orderBy(mapSheetAnalSttcEntity.id.classAfterCd.asc())
.select(inferenceEntity.id)
.from(inferenceEntity)
.where(inferenceEntity.fileCreatedYn.isFalse().or(inferenceEntity.fileCreatedYn.isNull()))
.orderBy(inferenceEntity.id.asc())
.limit(limit)
.fetch();
}
// ===============================
// Reset / Mark
// ===============================
/**
* 분석 데이터의 파일 생성 상태를 재생성 가능 상태로 초기화한다.
*
* <p>- file_created_yn = false - file_created_dttm = null
*
* @return 갱신된 행 수
*/
@Override
public List<MapSheetAnalDataEntity> listAnalyGeom(Long id) {
QMapSheetAnalDataEntity analy = QMapSheetAnalDataEntity.mapSheetAnalDataEntity;
return queryFactory.selectFrom(analy).where(analy.analUid.eq(id)).fetch();
public int resetInferenceCreated(Long dataUid) {
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(inferenceEntity)
.set(inferenceEntity.fileCreatedYn, false)
.set(inferenceEntity.fileCreatedDttm, (ZonedDateTime) null)
.set(inferenceEntity.updatedDttm, now)
.where(inferenceEntity.id.eq(dataUid))
.execute();
}
/**
* 분석결과 상세 목록
* 분석 데이터의 파일 생성 완료 상태를 반영한다.
*
* @param searchReq
* @return
* @return 갱신된 행 수
*/
@Override
public Page<MapSheetAnalDataGeomEntity> listInferenceResultWithGeom(
List<Long> ids, SearchGeoReq searchReq) {
public int markInferenceCreated(Long dataUid) {
ZonedDateTime now = ZonedDateTime.now();
// 분석 차수
QMapSheetAnalDataGeomEntity detectedEntity =
QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
Pageable pageable = searchReq.toPageable();
// 검색조건
JPAQuery<MapSheetAnalDataGeomEntity> query =
return (int)
queryFactory
.selectFrom(detectedEntity)
.where(
detectedEntity.dataUid.in(ids),
eqTargetClass(detectedEntity, searchReq.getTargetClass()),
eqCompareClass(detectedEntity, searchReq.getCompareClass()),
containsMapSheetNum(detectedEntity, searchReq.getMapSheetNum()));
// count
long total = query.fetchCount();
// Pageable에서 정렬 가져오기, 없으면 기본 정렬(createdDttm desc) 사용
List<OrderSpecifier<?>> orders = getOrderSpecifiers(pageable.getSort());
if (orders.isEmpty()) {
orders.add(detectedEntity.createdDttm.desc());
}
List<MapSheetAnalDataGeomEntity> content =
query
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(orders.toArray(new OrderSpecifier[0]))
.fetch();
return new PageImpl<>(content, pageable, total);
.update(inferenceEntity)
.set(inferenceEntity.fileCreatedYn, true)
.set(inferenceEntity.fileCreatedDttm, now)
.set(inferenceEntity.updatedDttm, now)
.where(inferenceEntity.id.eq(dataUid))
.execute();
}
/**
* 분석결과 상세 목록
* 분석 데이터에 속한 모든 도형의 파일 생성 상태를 초기화한다.
*
* @param searchGeoReq
* @return
* @return 갱신된 행 수
*/
@Override
public Page<InferenceResultDto.Geom> getInferenceGeomList(Long id, SearchGeoReq searchGeoReq) {
Pageable pageable = searchGeoReq.toPageable();
BooleanBuilder builder = new BooleanBuilder();
public int resetGeomCreatedByDataUid(Long dataUid) {
ZonedDateTime now = ZonedDateTime.now();
// 추론결과 id
builder.and(mapSheetAnalEntity.id.eq(id));
// 기준년도 분류
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) {
builder.and(
mapSheetAnalDataGeomEntity
.classAfterCd
.toLowerCase()
.eq(searchGeoReq.getTargetClass().toLowerCase()));
}
// 비교년도 분류
if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")) {
builder.and(
mapSheetAnalDataGeomEntity
.classBeforeCd
.toLowerCase()
.eq(searchGeoReq.getCompareClass().toLowerCase()));
}
// 분석도엽
if (searchGeoReq.getMapSheetNum() != null && !searchGeoReq.getMapSheetNum().isEmpty()) {
List<Long> mapSheetNum = searchGeoReq.getMapSheetNum();
builder.and(mapSheetAnalDataGeomEntity.mapSheetNum.in(mapSheetNum));
}
List<InferenceResultDto.Geom> content =
return (int)
queryFactory
.select(
Projections.constructor(
InferenceResultDto.Geom.class,
mapSheetAnalDataGeomEntity.compareYyyy,
mapSheetAnalDataGeomEntity.targetYyyy,
mapSheetAnalDataGeomEntity.classBeforeCd,
mapSheetAnalDataGeomEntity.classBeforeProb,
mapSheetAnalDataGeomEntity.classAfterCd,
mapSheetAnalDataGeomEntity.classAfterProb,
mapSheetAnalDataGeomEntity.mapSheetNum,
mapSheetAnalDataGeomEntity.geom,
mapSheetAnalDataGeomEntity.geomCenter))
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.join(mapSheetAnalDataGeomEntity)
.on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id))
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.fetch();
long total =
queryFactory
.select(mapSheetAnalDataGeomEntity.id)
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.join(mapSheetAnalDataGeomEntity)
.on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id))
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total);
.update(inferenceGeomEntity)
.set(inferenceGeomEntity.fileCreatedYn, false)
.set(inferenceGeomEntity.fileCreatedDttm, (ZonedDateTime) null)
.set(inferenceGeomEntity.updatedDttm, now)
.where(inferenceGeomEntity.dataUid.eq(dataUid))
.execute();
}
/**
* 추론된 5000:1 도엽 목록
* 파일 생성이 완료된 도형(geo_uid)을 생성 완료 상태로 반영한다.
*
* @param id
* @return
* @param geoUids 생성 완료된 도형 UID 목록
* @return 갱신된 행 수
*/
@Override
public List<Long> getSheets(Long id) {
public int markGeomCreatedByGeoUids(List<Long> geoUids) {
if (geoUids == null || geoUids.isEmpty()) {
return 0;
}
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(inferenceGeomEntity)
.set(inferenceGeomEntity.fileCreatedYn, true)
.set(inferenceGeomEntity.fileCreatedDttm, now)
.set(inferenceGeomEntity.updatedDttm, now)
.where(inferenceGeomEntity.geoUid.in(geoUids))
.execute();
}
// ===============================
// Export source (Entity only)
// ===============================
/**
* SHP / GeoJSON 파일 생성을 위한 도형 데이터 조회
*
* <p>- 특정 분석 데이터(data_uid)에 속한 도형 - geometry 존재 - 파일 미생성 상태만 대상
*/
@Override
public List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(
Long dataUid, int limit) {
return queryFactory
.select(mapSheetAnalDataEntity.mapSheetNum)
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.where(mapSheetAnalEntity.id.eq(id))
.groupBy(mapSheetAnalDataEntity.mapSheetNum)
.selectFrom(inferenceGeomEntity)
.where(
inferenceGeomEntity.dataUid.eq(dataUid),
inferenceGeomEntity.geom.isNotNull(),
inferenceGeomEntity
.fileCreatedYn
.isFalse()
.or(inferenceGeomEntity.fileCreatedYn.isNull()))
.orderBy(inferenceGeomEntity.geoUid.asc())
.limit(limit)
.fetch();
}
/** Pageable의 Sort를 QueryDSL OrderSpecifier로 변환 */
@SuppressWarnings({"unchecked", "rawtypes"})
private List<OrderSpecifier<?>> getOrderSpecifiers(Sort sort) {
List<OrderSpecifier<?>> orders = new ArrayList<>();
if (sort.isSorted()) {
QMapSheetAnalDataGeomEntity entity = QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
for (Sort.Order order : sort) {
Order direction = order.isAscending() ? Order.ASC : Order.DESC;
String property = order.getProperty();
// 유효한 필드만 처리
switch (property) {
case "classBeforeCd" -> orders.add(new OrderSpecifier(direction, entity.classBeforeCd));
case "classBeforeProb" ->
orders.add(new OrderSpecifier(direction, entity.classBeforeProb));
case "classAfterCd" -> orders.add(new OrderSpecifier(direction, entity.classAfterCd));
case "classAfterProb" -> orders.add(new OrderSpecifier(direction, entity.classAfterProb));
case "mapSheetNum" -> orders.add(new OrderSpecifier(direction, entity.mapSheetNum));
case "compareYyyy" -> orders.add(new OrderSpecifier(direction, entity.compareYyyy));
case "targetYyyy" -> orders.add(new OrderSpecifier(direction, entity.targetYyyy));
case "area" -> orders.add(new OrderSpecifier(direction, entity.area));
case "createdDttm" -> orders.add(new OrderSpecifier(direction, entity.createdDttm));
case "updatedDttm" -> orders.add(new OrderSpecifier(direction, entity.updatedDttm));
// 유효하지 않은 필드는 무시
default -> {}
}
}
}
return orders;
}
private BooleanExpression eqTargetClass(
QMapSheetAnalDataGeomEntity detectedEntity, String targetClass) {
return targetClass != null && !targetClass.isEmpty()
? detectedEntity.classAfterCd.toLowerCase().eq(targetClass.toLowerCase())
: null;
}
private BooleanExpression eqCompareClass(
QMapSheetAnalDataGeomEntity detectedEntity, String compareClass) {
return compareClass != null && !compareClass.isEmpty()
? detectedEntity.classBeforeCd.toLowerCase().eq(compareClass.toLowerCase())
: null;
}
private BooleanExpression containsMapSheetNum(
QMapSheetAnalDataGeomEntity detectedEntity, List<Long> mapSheet) {
if (mapSheet == null || mapSheet.isEmpty()) {
return null;
}
return detectedEntity.mapSheetNum.in(mapSheet);
}
}

View File

@@ -0,0 +1,8 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface MapSheetAnalDataInferenceGeomRepository
extends JpaRepository<MapSheetAnalDataInferenceGeomEntity, Long>,
MapSheetAnalDataInferenceGeomRepositoryCustom {}

View File

@@ -0,0 +1,3 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
public interface MapSheetAnalDataInferenceGeomRepositoryCustom {}

View File

@@ -0,0 +1,9 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class MapSheetAnalDataInferenceGeomRepositoryImpl
implements MapSheetAnalDataInferenceGeomRepositoryCustom {}

View File

@@ -0,0 +1,8 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface MapSheetAnalDataInferenceRepository
extends JpaRepository<MapSheetAnalDataInferenceEntity, Long>,
MapSheetAnalDataInferenceRepositoryCustom {}

View File

@@ -0,0 +1,3 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
public interface MapSheetAnalDataInferenceRepositoryCustom {}

View File

@@ -0,0 +1,13 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.querydsl.jpa.impl.JPAQueryFactory;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class MapSheetAnalDataInferenceRepositoryImpl
implements MapSheetAnalDataInferenceRepositoryCustom {
private final JPAQueryFactory queryFactory;
}

View File

@@ -0,0 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface MapSheetAnalDataRepository
extends JpaRepository<MapSheetAnalEntity, Long>, MapSheetAnalDataRepositoryCustom {}

View File

@@ -0,0 +1,32 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResSummary;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import jakarta.validation.constraints.NotNull;
import java.util.List;
import java.util.Optional;
import org.springframework.data.domain.Page;
public interface MapSheetAnalDataRepositoryCustom {
Page<AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq);
Optional<AnalResSummary> getInferenceResultSummary(Long id);
Page<InferenceResultDto.Geom> getInferenceGeomList(
Long id, InferenceResultDto.SearchGeoReq searchGeoReq);
Page<MapSheetAnalDataGeomEntity> listInferenceResultWithGeom(
List<Long> dataIds, SearchGeoReq searchReq);
List<Long> getSheets(Long id);
List<Dashboard> getDashboard(Long id);
List<MapSheetAnalDataEntity> listAnalyGeom(@NotNull Long id);
}

View File

@@ -0,0 +1,371 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResSummary;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelMngBakEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Order;
import com.querydsl.core.types.OrderSpecifier;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.JPQLQuery;
import com.querydsl.jpa.impl.JPAQuery;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class MapSheetAnalDataRepositoryImpl implements MapSheetAnalDataRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final QModelMngBakEntity tmm = QModelMngBakEntity.modelMngBakEntity;
private final QModelVerEntity tmv = QModelVerEntity.modelVerEntity;
private final QMapSheetAnalEntity mapSheetAnalEntity = QMapSheetAnalEntity.mapSheetAnalEntity;
private final QMapSheetAnalDataEntity mapSheetAnalDataEntity =
QMapSheetAnalDataEntity.mapSheetAnalDataEntity;
private final QMapSheetAnalDataGeomEntity mapSheetAnalDataGeomEntity =
QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
private final QMapSheetAnalSttcEntity mapSheetAnalSttcEntity =
QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
/**
* 분석결과 목록 조회
*
* @param searchReq
* @return
*/
@Override
public Page<AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq) {
Pageable pageable = searchReq.toPageable();
// "0000" 전체조회
BooleanBuilder builder = new BooleanBuilder();
if (searchReq.getStatCode() != null && !"0000".equals(searchReq.getStatCode())) {
builder.and(mapSheetAnalEntity.analState.eq(searchReq.getStatCode()));
}
// 제목
if (searchReq.getTitle() != null) {
builder.and(mapSheetAnalEntity.analTitle.like("%" + searchReq.getTitle() + "%"));
}
List<AnalResList> content =
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResList.class,
mapSheetAnalEntity.id,
mapSheetAnalEntity.analTitle,
mapSheetAnalEntity.analMapSheet,
mapSheetAnalEntity.detectingCnt,
mapSheetAnalEntity.analStrtDttm,
mapSheetAnalEntity.analEndDttm,
mapSheetAnalEntity.analSec,
mapSheetAnalEntity.analPredSec,
mapSheetAnalEntity.analState,
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState),
mapSheetAnalEntity.gukyuinUsed))
.from(mapSheetAnalEntity)
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(mapSheetAnalEntity.id.desc())
.fetch();
long total =
queryFactory
.select(mapSheetAnalEntity.id)
.from(mapSheetAnalEntity)
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total);
}
/**
* 분석결과 요약정보
*
* @param id
* @return
*/
@Override
public Optional<AnalResSummary> getInferenceResultSummary(Long id) {
// 1. 최신 버전 UID를 가져오는 서브쿼리
JPQLQuery<Long> latestVerUidSub =
JPAExpressions.select(tmv.id.max()).from(tmv).where(tmv.modelUid.eq(tmm.id));
Optional<InferenceResultDto.AnalResSummary> content =
Optional.ofNullable(
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResSummary.class,
mapSheetAnalEntity.id,
mapSheetAnalEntity.analTitle,
tmm.modelNm.concat(" ").concat(tmv.modelVer).as("modelInfo"),
mapSheetAnalEntity.targetYyyy,
mapSheetAnalEntity.compareYyyy,
mapSheetAnalEntity.analMapSheet,
mapSheetAnalEntity.analStrtDttm,
mapSheetAnalEntity.analEndDttm,
mapSheetAnalEntity.analSec,
mapSheetAnalEntity.analPredSec,
mapSheetAnalEntity.resultUrl,
mapSheetAnalEntity.detectingCnt,
mapSheetAnalEntity.accuracy,
mapSheetAnalEntity.analState,
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState)))
.from(mapSheetAnalEntity)
.leftJoin(tmm)
.on(mapSheetAnalEntity.modelUid.eq(tmm.id))
.leftJoin(tmv)
.on(tmv.modelUid.eq(tmm.id).and(tmv.id.eq(latestVerUidSub)))
.where(mapSheetAnalEntity.id.eq(id))
.fetchOne());
return content;
}
/**
* 분석결과 상세 class name별 탐지 개수
*
* @param id
* @return
*/
@Override
public List<Dashboard> getDashboard(Long id) {
return queryFactory
.select(
Projections.constructor(
Dashboard.class,
mapSheetAnalSttcEntity.id.classAfterCd,
mapSheetAnalSttcEntity.classAfterCnt.sum()))
.from(mapSheetAnalSttcEntity)
.where(mapSheetAnalSttcEntity.id.analUid.eq(id))
.groupBy(mapSheetAnalSttcEntity.id.classAfterCd)
.orderBy(mapSheetAnalSttcEntity.id.classAfterCd.asc())
.fetch();
}
@Override
public List<MapSheetAnalDataEntity> listAnalyGeom(Long id) {
QMapSheetAnalDataEntity analy = QMapSheetAnalDataEntity.mapSheetAnalDataEntity;
return queryFactory.selectFrom(analy).where(analy.analUid.eq(id)).fetch();
}
/**
* 분석결과 상세 목록
*
* @param searchReq
* @return
*/
@Override
public Page<MapSheetAnalDataGeomEntity> listInferenceResultWithGeom(
List<Long> ids, SearchGeoReq searchReq) {
// 분석 차수
QMapSheetAnalDataGeomEntity detectedEntity =
QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
Pageable pageable = searchReq.toPageable();
// 검색조건
JPAQuery<MapSheetAnalDataGeomEntity> query =
queryFactory
.selectFrom(detectedEntity)
.where(
detectedEntity.dataUid.in(ids),
eqTargetClass(detectedEntity, searchReq.getTargetClass()),
eqCompareClass(detectedEntity, searchReq.getCompareClass()),
containsMapSheetNum(detectedEntity, searchReq.getMapSheetNum()));
// count
long total = query.fetchCount();
// Pageable에서 정렬 가져오기, 없으면 기본 정렬(createdDttm desc) 사용
List<OrderSpecifier<?>> orders = getOrderSpecifiers(pageable.getSort());
if (orders.isEmpty()) {
orders.add(detectedEntity.createdDttm.desc());
}
List<MapSheetAnalDataGeomEntity> content =
query
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(orders.toArray(new OrderSpecifier[0]))
.fetch();
return new PageImpl<>(content, pageable, total);
}
/**
* 분석결과 상세 목록
*
* @param searchGeoReq
* @return
*/
@Override
public Page<InferenceResultDto.Geom> getInferenceGeomList(Long id, SearchGeoReq searchGeoReq) {
Pageable pageable = searchGeoReq.toPageable();
BooleanBuilder builder = new BooleanBuilder();
// 추론결과 id
builder.and(mapSheetAnalEntity.id.eq(id));
// 기준년도 분류
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) {
builder.and(
mapSheetAnalDataGeomEntity
.classAfterCd
.toLowerCase()
.eq(searchGeoReq.getTargetClass().toLowerCase()));
}
// 비교년도 분류
if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")) {
builder.and(
mapSheetAnalDataGeomEntity
.classBeforeCd
.toLowerCase()
.eq(searchGeoReq.getCompareClass().toLowerCase()));
}
// 분석도엽
if (searchGeoReq.getMapSheetNum() != null && !searchGeoReq.getMapSheetNum().isEmpty()) {
List<Long> mapSheetNum = searchGeoReq.getMapSheetNum();
builder.and(mapSheetAnalDataGeomEntity.mapSheetNum.in(mapSheetNum));
}
List<InferenceResultDto.Geom> content =
queryFactory
.select(
Projections.constructor(
InferenceResultDto.Geom.class,
mapSheetAnalDataGeomEntity.compareYyyy,
mapSheetAnalDataGeomEntity.targetYyyy,
mapSheetAnalDataGeomEntity.classBeforeCd,
mapSheetAnalDataGeomEntity.classBeforeProb,
mapSheetAnalDataGeomEntity.classAfterCd,
mapSheetAnalDataGeomEntity.classAfterProb,
mapSheetAnalDataGeomEntity.mapSheetNum,
mapSheetAnalDataGeomEntity.geom,
mapSheetAnalDataGeomEntity.geomCenter))
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.join(mapSheetAnalDataGeomEntity)
.on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id))
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.fetch();
long total =
queryFactory
.select(mapSheetAnalDataGeomEntity.id)
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.join(mapSheetAnalDataGeomEntity)
.on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id))
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total);
}
/**
* 추론된 5000:1 도엽 목록
*
* @param id
* @return
*/
@Override
public List<Long> getSheets(Long id) {
return queryFactory
.select(mapSheetAnalDataEntity.mapSheetNum)
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.where(mapSheetAnalEntity.id.eq(id))
.groupBy(mapSheetAnalDataEntity.mapSheetNum)
.fetch();
}
/** Pageable의 Sort를 QueryDSL OrderSpecifier로 변환 */
@SuppressWarnings({"unchecked", "rawtypes"})
private List<OrderSpecifier<?>> getOrderSpecifiers(Sort sort) {
List<OrderSpecifier<?>> orders = new ArrayList<>();
if (sort.isSorted()) {
QMapSheetAnalDataGeomEntity entity = QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
for (Sort.Order order : sort) {
Order direction = order.isAscending() ? Order.ASC : Order.DESC;
String property = order.getProperty();
// 유효한 필드만 처리
switch (property) {
case "classBeforeCd" -> orders.add(new OrderSpecifier(direction, entity.classBeforeCd));
case "classBeforeProb" ->
orders.add(new OrderSpecifier(direction, entity.classBeforeProb));
case "classAfterCd" -> orders.add(new OrderSpecifier(direction, entity.classAfterCd));
case "classAfterProb" -> orders.add(new OrderSpecifier(direction, entity.classAfterProb));
case "mapSheetNum" -> orders.add(new OrderSpecifier(direction, entity.mapSheetNum));
case "compareYyyy" -> orders.add(new OrderSpecifier(direction, entity.compareYyyy));
case "targetYyyy" -> orders.add(new OrderSpecifier(direction, entity.targetYyyy));
case "area" -> orders.add(new OrderSpecifier(direction, entity.area));
case "createdDttm" -> orders.add(new OrderSpecifier(direction, entity.createdDttm));
case "updatedDttm" -> orders.add(new OrderSpecifier(direction, entity.updatedDttm));
// 유효하지 않은 필드는 무시
default -> {}
}
}
}
return orders;
}
private BooleanExpression eqTargetClass(
QMapSheetAnalDataGeomEntity detectedEntity, String targetClass) {
return targetClass != null && !targetClass.isEmpty()
? detectedEntity.classAfterCd.toLowerCase().eq(targetClass.toLowerCase())
: null;
}
private BooleanExpression eqCompareClass(
QMapSheetAnalDataGeomEntity detectedEntity, String compareClass) {
return compareClass != null && !compareClass.isEmpty()
? detectedEntity.classBeforeCd.toLowerCase().eq(compareClass.toLowerCase())
: null;
}
private BooleanExpression containsMapSheetNum(
QMapSheetAnalDataGeomEntity detectedEntity, List<Long> mapSheet) {
if (mapSheet == null || mapSheet.isEmpty()) {
return null;
}
return detectedEntity.mapSheetNum.in(mapSheet);
}
}

View File

@@ -21,7 +21,7 @@ public interface MapSheetMngRepositoryCustom {
Optional<MapSheetMngHstEntity> findMapSheetMngHstInfo(Long hstUid);
int insertMapSheetOrgDataToMapSheetMngHst(int mngYyyy);
int insertMapSheetOrgDataToMapSheetMngHst(int mngYyyy, String mngPath);
List<MapSheetMngDto.MngFilesDto> findHstUidToMapSheetFileList(Long hstUid);
@@ -45,6 +45,10 @@ public interface MapSheetMngRepositoryCustom {
void updateByHstUidMngFileState(Long hstUid, String fileState);
void updateByFileUidMngFileState(Long fileUid, String fileState);
void deleteByNotInFileUidMngFile(Long hstUid, List<Long> fileUids);
void updateYearState(int yyyy, String status);
Page<MapSheetMngDto.ErrorDataDto> findMapSheetErrorList(
@@ -52,11 +56,13 @@ public interface MapSheetMngRepositoryCustom {
MapSheetMngDto.ErrorDataDto findMapSheetError(Long hstUid);
List<MapSheetMngDto.MngFilesDto> findIdToMapSheetFileList(Long hstUid);
List<MapSheetMngDto.MngFilesDto> findByHstUidMapSheetFileList(Long hstUid);
MapSheetMngDto.MngFilesDto findIdToMapSheetFile(Long fileUid);
MapSheetMngDto.MngFilesDto findByFileUidMapSheetFile(Long fileUid);
void updateHstFileSizes(Long hstUid, long tifSizeBytes, long tfwSizeBytes, long totalSizeBytes);
Page<YearEntity> getYears(YearSearchReq yearSearchReq);
int findByYearFileNameFileCount(int mngYyyy, String fileName);
Page<YearEntity> getYears(YearSearchReq req);
}

View File

@@ -82,7 +82,7 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
mapSheetMngEntity.mngState,
mapSheetMngEntity.syncState,
mapSheetMngEntity.syncCheckState,
mapSheetMngHstEntity.count(),
mapSheetMngHstEntity.count().as("syncTotalCnt"),
new CaseBuilder()
.when(mapSheetMngHstEntity.dataState.eq("DONE"))
.then(1L)
@@ -144,6 +144,20 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
.then(1L)
.otherwise(0L)
.sum(),
new CaseBuilder()
.when(mapSheetMngHstEntity.syncState.eq("NOFILE"))
.then(1L)
.otherwise(0L)
.sum(),
new CaseBuilder()
.when(
mapSheetMngHstEntity
.syncCheckState
.eq("DONE")
.and(mapSheetMngHstEntity.syncState.eq("NOFILE")))
.then(1L)
.otherwise(0L)
.sum(),
mapSheetMngEntity.createdDttm,
mapSheetMngHstEntity.syncEndDttm.max()))
.from(mapSheetMngEntity)
@@ -212,7 +226,7 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
mapSheetMngEntity.mngState,
mapSheetMngEntity.syncState,
mapSheetMngEntity.syncCheckState,
mapSheetMngHstEntity.count(),
mapSheetMngHstEntity.count().as("syncTotalCnt"),
new CaseBuilder()
.when(mapSheetMngHstEntity.dataState.eq("DONE"))
.then(1L)
@@ -274,6 +288,20 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
.then(1L)
.otherwise(0L)
.sum(),
new CaseBuilder()
.when(mapSheetMngHstEntity.syncState.eq("NOFILE"))
.then(1L)
.otherwise(0L)
.sum(),
new CaseBuilder()
.when(
mapSheetMngHstEntity
.syncCheckState
.eq("DONE")
.and(mapSheetMngHstEntity.syncState.eq("NOFILE")))
.then(1L)
.otherwise(0L)
.sum(),
mapSheetMngEntity.createdDttm,
mapSheetMngHstEntity.syncEndDttm.max()))
.from(mapSheetMngEntity)
@@ -298,7 +326,6 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
mapSheetMngHstEntity.syncState.ne("DONE").and(mapSheetMngHstEntity.syncState.ne("NOTYET")));
if (searchReq.getSyncState() != null && !searchReq.getSyncState().isEmpty()) {
whereBuilder.and(mapSheetMngHstEntity.syncState.eq(searchReq.getSyncState()));
}
@@ -366,9 +393,9 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
queryFactory
.select(mapSheetMngHstEntity.hstUid.count())
.from(mapSheetMngHstEntity)
.innerJoin(mapSheetMngHstEntity.mapInkx5kByCode, mapInkx5kEntity)
.leftJoin(mapInkx50kEntity)
.on(mapInkx5kEntity.fidK50.eq(mapInkx50kEntity.fid.longValue()))
.innerJoin(mapInkx5kEntity, mapSheetMngHstEntity.mapInkx5kByCode)
.fetchJoin()
.leftJoin(mapInkx5kEntity.mapInkx50k, mapInkx50kEntity)
.where(whereBuilder)
.fetchOne();
@@ -423,10 +450,11 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
}
@Override
public List<MapSheetMngDto.MngFilesDto> findIdToMapSheetFileList(Long hstUid) {
public List<MapSheetMngDto.MngFilesDto> findByHstUidMapSheetFileList(Long hstUid) {
BooleanBuilder whereBuilder = new BooleanBuilder();
whereBuilder.and(mapSheetMngFileEntity.hstUid.eq(hstUid));
whereBuilder.and(mapSheetMngFileEntity.fileDel.eq(false));
List<MapSheetMngDto.MngFilesDto> foundContent =
queryFactory
@@ -453,6 +481,7 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
public List<MapSheetMngDto.MngFilesDto> findHstUidToMapSheetFileList(Long hstUid) {
BooleanBuilder whereBuilder = new BooleanBuilder();
whereBuilder.and(mapSheetMngFileEntity.hstUid.eq(hstUid));
whereBuilder.and(mapSheetMngFileEntity.fileDel.eq(false));
List<MapSheetMngDto.MngFilesDto> foundContent =
queryFactory
@@ -506,7 +535,7 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
}
@Override
public MapSheetMngDto.MngFilesDto findIdToMapSheetFile(Long fileUid) {
public MapSheetMngDto.MngFilesDto findByFileUidMapSheetFile(Long fileUid) {
MapSheetMngDto.MngFilesDto foundContent =
queryFactory
@@ -611,8 +640,9 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
public void deleteByHstUidMngFile(Long hstUid) {
long deletedFileCount =
queryFactory
.delete(mapSheetMngFileEntity)
.where(mapSheetMngFileEntity.fileUid.eq(hstUid))
.update(mapSheetMngFileEntity)
.set(mapSheetMngFileEntity.fileDel, true)
.where(mapSheetMngFileEntity.hstUid.eq(hstUid))
.execute();
}
@@ -620,7 +650,8 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
public void deleteByFileUidMngFile(Long fileUid) {
long fileCount =
queryFactory
.delete(mapSheetMngFileEntity)
.update(mapSheetMngFileEntity)
.set(mapSheetMngFileEntity.fileDel, true)
.where(mapSheetMngFileEntity.fileUid.eq(fileUid))
.execute();
}
@@ -635,6 +666,46 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
.execute();
}
@Override
public void deleteByNotInFileUidMngFile(Long hstUid, List<Long> fileUids) {
long execCount =
queryFactory
.update(mapSheetMngFileEntity)
.set(mapSheetMngFileEntity.fileDel, true)
.where(
mapSheetMngFileEntity
.hstUid
.eq(hstUid)
.and(mapSheetMngFileEntity.fileUid.notIn(fileUids)))
.execute();
}
@Override
public void updateByFileUidMngFileState(Long fileUid, String fileState) {
long execCount =
queryFactory
.update(mapSheetMngFileEntity)
.set(mapSheetMngFileEntity.fileState, fileState)
.where(mapSheetMngFileEntity.fileUid.eq(fileUid))
.execute();
}
@Override
public int findByYearFileNameFileCount(int mngYyyy, String fileName) {
Long execCount =
queryFactory
.select(mapSheetMngFileEntity.count())
.from(mapSheetMngFileEntity)
.where(
mapSheetMngFileEntity
.mngYyyy
.eq(mngYyyy)
.and(mapSheetMngFileEntity.fileName.eq(fileName)))
.fetchOne();
return Math.toIntExact(execCount);
}
@Override
public void mngFileSave(@Valid MapSheetMngDto.MngFileAddReq addReq) {
long fileCount =
@@ -664,13 +735,14 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
}
@Override
public int insertMapSheetOrgDataToMapSheetMngHst(int mngYyyy) {
public int insertMapSheetOrgDataToMapSheetMngHst(int mngYyyy, String mngPath) {
String sql =
"""
INSERT INTO tb_map_sheet_mng_hst
(
mng_yyyy
,map_sheet_path
,map_sheet_code
,map_sheet_num
,map_sheet_name
@@ -680,14 +752,15 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
,use_inference
)
select
:mngYyyy as mng_yyyy
,fid as map_sheet_code
,mapidcd_no::INTEGER as map_sheet_num
,mapid_nm as map_sheet_name
,fid as map_sheet_code_src
,5000 as scale_ratio
,((mapidcd_no::INTEGER)/1000) as ref_map_sheet_num
,use_inference
:mngYyyy as mng_yyyy,
:mngPath as map_sheet_path,
fid as map_sheet_code,
mapidcd_no as map_sheet_num,
mapid_nm as map_sheet_name,
fid as map_sheet_code_src,
5000 as scale_ratio,
((mapidcd_no::INTEGER)/1000) as ref_map_sheet_num,
use_inference
from
tb_map_inkx_5k
""";
@@ -695,6 +768,7 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
// Native Query 생성 및 실행
Query query = (Query) em.createNativeQuery(sql);
query.setParameter("mngYyyy", mngYyyy);
query.setParameter("mngPath", mngPath);
int exeCnt = query.executeUpdate(); // 실행 (영향받은 행의 개수 반환)

View File

@@ -0,0 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.scene;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx50kEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface MapInkx50kRepository
extends JpaRepository<MapInkx50kEntity, Long>, MapInkx50kRepositoryCustom {}

View File

@@ -0,0 +1,8 @@
package com.kamco.cd.kamcoback.postgres.repository.scene;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx50kEntity;
public interface MapInkx50kRepositoryCustom {
MapInkx50kEntity findByMapidCdParentNo(String mapidcdNo);
}

View File

@@ -0,0 +1,28 @@
package com.kamco.cd.kamcoback.postgres.repository.scene;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx50kEntity.mapInkx50kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx50kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
public class MapInkx50kRepositoryImpl extends QuerydslRepositorySupport
implements MapInkx50kRepositoryCustom {
private final JPAQueryFactory queryFactory;
public MapInkx50kRepositoryImpl(JPAQueryFactory queryFactory) {
super(MapInkx5kEntity.class);
this.queryFactory = queryFactory;
}
@Override
public MapInkx50kEntity findByMapidCdParentNo(String mapidcdNo) {
String parentCd = mapidcdNo.substring(0, 5);
return queryFactory
.selectFrom(mapInkx50kEntity)
.where(mapInkx50kEntity.mapidcdNo.eq(parentCd))
.fetchOne();
}
}

View File

@@ -1,9 +1,20 @@
package com.kamco.cd.kamcoback.postgres.repository.scene;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto.MapList;
import java.util.List;
import java.util.Optional;
import org.springframework.data.domain.Page;
public interface MapInkx5kRepositoryCustom {
List<MapInkx5kEntity> listGetScenes5k(List<String> codes);
Page<MapList> findMapInkxMngList(
MapInkxMngDto.searchReq searchReq, String useInference, String searchVal);
Long findByMapidCdNoExists(String mapidcdNo);
Optional<MapInkx5kEntity> findByMapidCdNoInfo(String mapidcdNo);
}

View File

@@ -1,9 +1,23 @@
package com.kamco.cd.kamcoback.postgres.repository.scene;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx50kEntity.mapInkx50kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity.mapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto.MapList;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto.searchReq;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
public class MapInkx5kRepositoryImpl extends QuerydslRepositorySupport
@@ -25,4 +39,84 @@ public class MapInkx5kRepositoryImpl extends QuerydslRepositorySupport
.orderBy(map5k.mapidcdNo.asc())
.fetch();
}
@Override
public Page<MapList> findMapInkxMngList(
searchReq searchReq, String useInference, String searchVal) {
Pageable pageable = searchReq.toPageable();
List<MapInkxMngDto.MapList> foundContent =
queryFactory
.select(
Projections.constructor(
MapInkxMngDto.MapList.class,
Expressions.numberTemplate(
Integer.class,
"row_number() over(order by {0} asc)",
mapInkx5kEntity.mapidcdNo),
mapInkx5kEntity.mapidcdNo,
mapInkx50kEntity.mapidcdNo,
mapInkx5kEntity.mapidNm,
mapInkx5kEntity.createdDate,
mapInkx5kEntity.modifiedDate,
// Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')",
// mapInkx5kEntity.createdDate),
// Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')",
// mapInkx5kEntity.modifiedDate),
mapInkx5kEntity.useInference))
.from(mapInkx5kEntity)
.innerJoin(mapInkx5kEntity.mapInkx50k, mapInkx50kEntity)
.fetchJoin()
.where(searchUseInference(useInference), searchValueMapCdNm(searchVal))
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(mapInkx5kEntity.mapidcdNo.asc())
.fetch();
Long countQuery =
queryFactory
.select(mapInkx5kEntity.count())
.from(mapInkx5kEntity)
.innerJoin(mapInkx5kEntity.mapInkx50k, mapInkx50kEntity)
.fetchJoin()
.where(searchUseInference(useInference), searchValueMapCdNm(searchVal))
.fetchOne();
return new PageImpl<>(foundContent, pageable, countQuery);
}
@Override
public Long findByMapidCdNoExists(String mapidcdNo) {
return queryFactory
.select(mapInkx5kEntity.count())
.from(mapInkx5kEntity)
.where(mapInkx5kEntity.mapidcdNo.eq(mapidcdNo))
.fetchOne();
}
@Override
public Optional<MapInkx5kEntity> findByMapidCdNoInfo(String mapidcdNo) {
return Optional.ofNullable(
queryFactory
.selectFrom(mapInkx5kEntity)
.where(mapInkx5kEntity.mapidcdNo.eq(mapidcdNo))
.fetchOne());
}
private BooleanExpression searchUseInference(String useInference) {
if (Objects.isNull(useInference)) {
return null;
}
return mapInkx5kEntity.useInference.eq(useInference);
}
private BooleanExpression searchValueMapCdNm(String searchVal) {
if (Objects.isNull(searchVal)) {
return null;
}
return mapInkx5kEntity
.mapidcdNo
.like("%" + searchVal + "%")
.or(mapInkx5kEntity.mapidNm.like("%" + searchVal + "%"));
}
}

View File

@@ -0,0 +1,113 @@
package com.kamco.cd.kamcoback.scene;
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto;
import com.kamco.cd.kamcoback.scene.service.MapInkxMngService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.validation.Valid;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "도엽 관리", description = "도엽 관리 API")
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/scene")
public class MapInkxMngApiController {
private final MapInkxMngService mapInkxMngService;
@Operation(summary = "목록 조회", description = "도엽 목록 조회")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@GetMapping
public ApiResponseDto<Page<MapInkxMngDto.MapList>> findMapInkxMngList(
@RequestParam int page,
@RequestParam(defaultValue = "20") int size,
@RequestParam(required = false) String useInference,
@RequestParam(required = false) String searchVal) {
MapInkxMngDto.searchReq searchReq = new MapInkxMngDto.searchReq(page, size, "");
return ApiResponseDto.ok(
mapInkxMngService.findMapInkxMngList(searchReq, useInference, searchVal));
}
@Operation(summary = "저장", description = "도엽정보를 저장 합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "도엽정보 저장 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = ApiResponseDto.ResponseObj.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping
public ApiResponseDto<ApiResponseDto.ResponseObj> saveMapInkx5k(
@io.swagger.v3.oas.annotations.parameters.RequestBody(
description = "도엽정보 생성 요청 정보",
required = true,
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = MapInkxMngDto.AddMapReq.class)))
@RequestBody
@Valid
MapInkxMngDto.AddMapReq addReq) {
return ApiResponseDto.okObject(mapInkxMngService.saveMapInkx5k(addReq));
}
@Operation(summary = "추론제외 업데이트", description = "추론제외 업데이트 합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "추론제외 업데이트 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = ApiResponseDto.ResponseObj.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PutMapping("/use-inference")
public ApiResponseDto<ApiResponseDto.ResponseObj> updateUseInference(
@io.swagger.v3.oas.annotations.parameters.RequestBody(
description = "추론제외 업데이트 정보",
required = true,
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = MapInkxMngDto.UseInferReq.class)))
@RequestBody
@Valid
MapInkxMngDto.UseInferReq useInferReq) {
return ApiResponseDto.okObject(mapInkxMngService.updateUseInference(useInferReq));
}
}

View File

@@ -0,0 +1,156 @@
package com.kamco.cd.kamcoback.scene.dto;
import com.fasterxml.jackson.databind.JsonNode;
import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose;
import com.kamco.cd.kamcoback.common.utils.enums.EnumType;
import io.swagger.v3.oas.annotations.media.Schema;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
public class MapInkxMngDto {
@CodeExpose
@Getter
@AllArgsConstructor
public enum UseInferenceType implements EnumType {
USE("사용중"),
EXCEPT("영구 추론제외");
private final String desc;
@Override
public String getId() {
return name();
}
@Override
public String getText() {
return desc;
}
}
@Schema(name = "Basic", description = "Basic")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class Basic {
private Integer fid;
private String mapidcdNo;
private String mapidNm;
private JsonNode geom;
private String useInference;
private ZonedDateTime createdDttm;
private ZonedDateTime updatedDttm;
}
@Schema(name = "MapList", description = "목록 항목")
@Getter
@Setter
@NoArgsConstructor
public static class MapList {
private Integer rowNum;
private String mapidcdNo5k;
private String mapidcdNo50k;
private String mapidNm;
private String createdDttm;
private String updatedDttm;
private String useInference;
private ZonedDateTime createdDttmTime;
private ZonedDateTime updatedDttmTime;
// 목록 Querydsl 에서 리턴 받는 건 생성자 기준임 -> 쿼리 컬럼 그대로 받고 여기서 Java 형변환 해서 return 하기
public MapList(
Integer rowNum,
String mapidcdNo5k,
String mapidcdNo50k,
String mapidNm,
ZonedDateTime createdDttmTime,
ZonedDateTime updatedDttmTime,
String useInference) {
this.rowNum = rowNum;
this.mapidcdNo5k = mapidcdNo5k;
this.mapidcdNo50k = mapidcdNo50k;
this.mapidNm = mapidNm;
DateTimeFormatter fmt =
DateTimeFormatter.ofPattern("yyyy-MM-dd").withZone(ZoneId.of("Asia/Seoul"));
this.createdDttm = fmt.format(createdDttmTime);
this.updatedDttm = fmt.format(updatedDttmTime);
this.createdDttmTime = createdDttmTime;
this.updatedDttmTime = updatedDttmTime;
this.useInference =
useInference.equals("USE")
? UseInferenceType.USE.getDesc()
: UseInferenceType.EXCEPT.getDesc();
}
}
@Schema(name = "searchReq", description = "검색 요청")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class searchReq {
// 페이징 파라미터
private int page = 0;
private int size = 20;
private String sort;
public Pageable toPageable() {
if (sort != null && !sort.isEmpty()) {
String[] sortParams = sort.split(",");
String property = sortParams[0];
Sort.Direction direction =
sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC;
return PageRequest.of(page, size, Sort.by(direction, property));
}
return PageRequest.of(page, size);
}
}
@Schema(name = "AddMapReq", description = "등록 요청")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class AddMapReq {
@Schema(description = "도엽번호", example = "31540687")
private String mapidcdNo;
@Schema(description = "도엽명", example = "공덕")
private String mapidNm;
@Schema(
description = "좌표 목록 (한 줄에 한 점, '경도 위도' 형식)",
example =
"127.17500001632317 36.17499998262991\n"
+ "127.14999995475043 36.17500002877932\n"
+ "127.15000004313612 36.199999984012415\n"
+ "127.1750000466954 36.20000001863179")
private String coordinates;
}
@Schema(name = "UseInferReq", description = "추론제외 업데이트 요청")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class UseInferReq {
private String mapidcdNo;
private String useInference;
}
}

View File

@@ -0,0 +1,58 @@
package com.kamco.cd.kamcoback.scene.service;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto.ResponseObj;
import com.kamco.cd.kamcoback.postgres.core.MapInkxMngCoreService;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto.MapList;
import com.kamco.cd.kamcoback.scene.dto.MapInkxMngDto.UseInferReq;
import jakarta.validation.Valid;
import java.util.ArrayList;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.jts.geom.GeometryFactory;
import org.locationtech.jts.geom.LinearRing;
import org.locationtech.jts.geom.PrecisionModel;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class MapInkxMngService {
private final MapInkxMngCoreService mapInkxMngCoreService;
public Page<MapList> findMapInkxMngList(
MapInkxMngDto.searchReq searchReq, String useInference, String searchVal) {
return mapInkxMngCoreService.findMapInkxMngList(searchReq, useInference, searchVal);
}
public ResponseObj saveMapInkx5k(@Valid MapInkxMngDto.AddMapReq req) {
String[] coordinates = req.getCoordinates().split("\\r?\\n");
List<Coordinate> coords = new ArrayList<Coordinate>();
for (String line : coordinates) {
String[] parts = line.trim().split("\\s+");
double lon = Double.parseDouble(parts[0]); // 경도
double lat = Double.parseDouble(parts[1]); // 위도
coords.add(new Coordinate(lon, lat));
}
// Polygon은 반드시 닫혀 있어야 함
if (!coords.get(0).equals2D(coords.get(coords.size() - 1))) {
coords.add(coords.get(0));
}
GeometryFactory GEOMETRY_FACTORY = new GeometryFactory(new PrecisionModel(), 4326);
LinearRing shell = GEOMETRY_FACTORY.createLinearRing(coords.toArray(new Coordinate[0]));
return mapInkxMngCoreService.saveMapInkx5k(req, GEOMETRY_FACTORY.createPolygon(shell));
}
public ResponseObj updateUseInference(@Valid UseInferReq useInferReq) {
return mapInkxMngCoreService.updateUseInference(useInferReq);
}
}

View File

@@ -64,17 +64,9 @@ management:
include:
- "health"
# GeoJSON 파일 모니터링 설정
geojson:
monitor:
watch-directory: ~/geojson/upload
processed-directory: ~/geojson/processed
error-directory: ~/geojson/error
temp-directory: /tmp/geojson_extract
cron-expression: "0/30 * * * * *" # 매 30초마다 실행
supported-extensions:
- zip
- tar
- tar.gz
- tgz
max-file-size: 104857600 # 100MB
file:
#sync-root-dir: D:/app/original-images/
sync-root-dir: /app/original-images/
sync-tmp-dir: ${file.sync-root-dir}/tmp
sync-file-extention: tfw,tif