Geojson Polygon DATA Operating System Build Complete - Daniel C No.5

This commit is contained in:
sanghyeonhd
2025-11-27 17:50:17 +09:00
parent 9d32c85fd0
commit bacd321666
47 changed files with 37945 additions and 39180 deletions

View File

@@ -96,18 +96,16 @@ public class AuthApiController {
})
@GetMapping("/list")
public ApiResponseDto<Page<Basic>> getUserList(
@Parameter(description = "관리자 이름")
@RequestParam(required = false) String userNm,
@Parameter(description = "페이지 번호 (0부터 시작)", example = "0")
@RequestParam(defaultValue = "0") int page,
@Parameter(description = "페이지 크기", example = "20")
@RequestParam(defaultValue = "20") int size,
@Parameter(description = "관리자 이름") @RequestParam(required = false) String userNm,
@Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0")
int page,
@Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20")
int size,
@Parameter(description = "정렬 조건 (형식: 필드명,방향)", example = "name,asc")
@RequestParam(required = false) String sort
) {
@RequestParam(required = false)
String sort) {
AuthDto.SearchReq searchReq = new AuthDto.SearchReq(userNm, page, size, sort);
Page<AuthDto.Basic> userList = authService.getUserList(searchReq);
return ApiResponseDto.ok(userList);
}
}

View File

@@ -26,10 +26,16 @@ public class AuthDto {
private String userId;
private String empId;
private String userEmail;
@JsonFormatDttm
private ZonedDateTime createdDttm;
@JsonFormatDttm private ZonedDateTime createdDttm;
public Basic(Long id, String userAuth, String userNm, String userId, String empId, String userEmail, ZonedDateTime createdDttm) {
public Basic(
Long id,
String userAuth,
String userNm,
String userId,
String empId,
String userEmail,
ZonedDateTime createdDttm) {
this.id = id;
this.userAuth = userAuth;
this.userNm = userNm;

View File

@@ -30,15 +30,17 @@ public class AuthService {
/**
* 시퀀스 id로 관리자 조회
*
* @param id
* @return
*/
public AuthDto.Basic getFindUserById(Long id){
public AuthDto.Basic getFindUserById(Long id) {
return authCoreService.findUserById(id);
}
/**
* 관리자 목록 조회
*
* @param searchReq
* @return
*/

View File

@@ -7,13 +7,12 @@ import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import io.swagger.v3.oas.annotations.Hidden;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.transaction.Transactional;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@Tag(name = "변화탐지", description = "변화탐지 API")
@RequiredArgsConstructor
@RestController
@@ -32,10 +31,11 @@ public class ChangeDetectionApiController {
/**
* PolygonData -> JsonNode 변환 예제
*
* @return
*/
@GetMapping("/json-data")
public ApiResponseDto<List<JsonNode>> getPolygonToJson(){
public ApiResponseDto<List<JsonNode>> getPolygonToJson() {
return ApiResponseDto.ok(changeDetectionService.getPolygonToJson());
}
}

View File

@@ -7,7 +7,7 @@ import lombok.NoArgsConstructor;
import lombok.Setter;
import org.locationtech.jts.geom.Geometry;
public class ChangeDetectionDto{
public class ChangeDetectionDto {
@Schema(name = "TestDto", description = "테스트용")
@Getter
@@ -17,7 +17,8 @@ public class ChangeDetectionDto{
public static class TestDto {
private Long id;
private Geometry polygon;
private Double centroidX;;
private Double centroidX;
;
private Double centroidY;
}
@@ -26,12 +27,12 @@ public class ChangeDetectionDto{
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class PointGeometry{
public static class PointGeometry {
private Long geoUid;
private String type; // "Point"
private Geometry coordinates; //Point 값
private String before_class; //기준 분류
private String after_class; //비교 분류
private Geometry coordinates; // Point 값
private String before_class; // 기준 분류
private String after_class; // 비교 분류
}
@Schema(name = "PolygonGeometry", description = "폴리곤 리턴 객체")
@@ -39,12 +40,12 @@ public class ChangeDetectionDto{
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class PolygonGeometry{
public static class PolygonGeometry {
private Long geoUid;
private String type; // "MultiPolygon"
private Geometry coordinates; //Polygon 값
private Double center_latitude; //폴리곤 중심 위도
private Double center_longitude; //폴리곤 중심 경도
private Geometry coordinates; // Polygon 값
private Double center_latitude; // 폴리곤 중심 위도
private Double center_longitude; // 폴리곤 중심 경도
}
@Schema(name = "CogURL", description = "COG URL")
@@ -52,9 +53,9 @@ public class ChangeDetectionDto{
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class CogURL{
private String before_cog_url; //기준 COG URL
private String after_cog_url; //비교 COG URL
public static class CogURL {
private String before_cog_url; // 기준 COG URL
private String after_cog_url; // 비교 COG URL
}
@Schema(name = "PolygonProperties", description = "폴리곤 정보")
@@ -62,13 +63,13 @@ public class ChangeDetectionDto{
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class PolygonProperties{
private Double area; //면적
private String before_year; //기준년도
private Double before_confidence; //기준 신뢰도(확률)
private String before_class; //기준 분류
private String after_year; //비교년도
private Double after_confidence; //비교 신뢰도(확률)
private String after_class; //비교 분류
public static class PolygonProperties {
private Double area; // 면적
private String before_year; // 기준년도
private Double before_confidence; // 기준 신뢰도(확률)
private String before_class; // 기준 분류
private String after_year; // 비교년도
private Double after_confidence; // 비교 신뢰도(확률)
private String after_class; // 비교 분류
}
}

View File

@@ -17,7 +17,7 @@ public class ChangeDetectionService {
return changeDetectionCoreService.getPolygonToPoint();
}
public List<JsonNode> getPolygonToJson(){
public List<JsonNode> getPolygonToJson() {
return changeDetectionCoreService.getPolygonToJson();
}
}

View File

@@ -3,11 +3,15 @@ package com.kamco.cd.kamcoback.geojson.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.time.ZonedDateTime;
import java.util.*;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.geojson.GeoJsonReader;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -18,7 +22,9 @@ import org.springframework.transaction.annotation.Transactional;
public class GeoJsonDataService {
private final MapSheetLearnDataRepository mapSheetLearnDataRepository;
private final MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
private final ObjectMapper objectMapper;
private final GeoJsonReader geoJsonReader = new GeoJsonReader();
/** GeoJSON 파일들을 데이터베이스에 저장 */
@Transactional
@@ -37,6 +43,11 @@ public class GeoJsonDataService {
if (savedId != null) {
savedIds.add(savedId);
log.debug("GeoJSON 파일 저장 성공: {} (ID: {})", fileName, savedId);
// 학습 모델 결과 파일인지 확인하여 geometry 데이터 처리
if (isLearningModelResult(fileName, geoJsonContent)) {
processLearningModelGeometry(savedId, geoJsonContent, fileName);
}
}
} catch (Exception e) {
log.error("GeoJSON 파일 처리 실패: {}", fileName, e);
@@ -163,7 +174,24 @@ public class GeoJsonDataService {
/** 연도 정보 추출 */
private void setYearInformation(MapSheetLearnDataEntity entity, String fileName) {
// 파일명에서 연도 추출 시도 (예: kamco_2021_2022_35813023.geojson)
// 학습 모델 결과 파일인지 확인하고 특별 처리
if (fileName.matches(".*캠코_\\d{4}_\\d{4}_\\d+.*")) {
String[] parts = fileName.split("_");
if (parts.length >= 4) {
String beforeYear = parts[1];
String afterYear = parts[2];
// 비교년도 정보를 첫 번째 연도의 정수로 저장
try {
entity.setCompareYyyy(Integer.parseInt(beforeYear));
log.debug("학습 모델 연도 정보 설정: {}", beforeYear);
} catch (NumberFormatException e) {
log.warn("연도 파싱 실패: {}", beforeYear, e);
}
return;
}
}
// 기존 로직: 파일명에서 연도 추출 시도
String[] parts = fileName.split("_");
for (String part : parts) {
if (part.matches("\\d{4}")) { // 4자리 숫자 (연도)
@@ -226,4 +254,177 @@ public class GeoJsonDataService {
return true;
}
/** 학습 모델 결과 파일인지 확인 */
private boolean isLearningModelResult(String fileName, String geoJsonContent) {
try {
// 파일명으로 확인 (캠코_YYYY_YYYY_번호 패턴)
if (fileName.matches(".*캠코_\\d{4}_\\d{4}_\\d+.*\\.geojson")) {
return true;
}
// GeoJSON 내용으로 확인 (학습 모델 특화 필드 존재 여부)
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
if (rootNode.has("features")) {
JsonNode features = rootNode.get("features");
if (features.isArray() && features.size() > 0) {
JsonNode firstFeature = features.get(0);
if (firstFeature.has("properties")) {
JsonNode properties = firstFeature.get("properties");
// 학습 모델 특화 필드 확인
return properties.has("cd_prob")
|| properties.has("class")
|| (properties.has("before") && properties.has("after"));
}
}
}
} catch (Exception e) {
log.debug("학습 모델 결과 파일 확인 중 오류: {}", fileName, e);
}
return false;
}
/** 학습 모델 결과의 geometry 데이터 처리 */
@Transactional
public void processLearningModelGeometry(Long dataUid, String geoJsonContent, String fileName) {
try {
log.info("학습 모델 geometry 데이터 처리 시작: {} (dataUid: {})", fileName, dataUid);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// 메타데이터 추출
String mapSheetName =
rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// 파일명에서 연도 및 지도번호 추출 (캠코_2021_2022_35813023)
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1];
afterYear = parts[2];
mapSheetNum = parts[3];
}
if (beforeYear == null || afterYear == null) {
log.warn("연도 정보를 추출할 수 없습니다: {}", fileName);
return;
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
log.warn("features 배열이 없습니다: {}", fileName);
return;
}
List<MapSheetLearnDataGeomEntity> geomEntities = new ArrayList<>();
int processedCount = 0;
for (JsonNode feature : features) {
try {
MapSheetLearnDataGeomEntity geomEntity =
createGeometryEntity(feature, dataUid, beforeYear, afterYear, mapSheetNum);
if (geomEntity != null) {
geomEntities.add(geomEntity);
processedCount++;
}
} catch (Exception e) {
log.warn("Feature geometry 처리 실패 (feature {}): {}", processedCount, e.getMessage());
}
}
// 배치 저장
if (!geomEntities.isEmpty()) {
mapSheetLearnDataGeomRepository.saveAll(geomEntities);
log.info("학습 모델 geometry 데이터 저장 완료: {} ({}개 feature)", fileName, geomEntities.size());
}
} catch (Exception e) {
log.error("학습 모델 geometry 데이터 처리 실패: {}", fileName, e);
}
}
/** 개별 feature에서 geometry entity 생성 */
private MapSheetLearnDataGeomEntity createGeometryEntity(
JsonNode feature, Long dataUid, String beforeYear, String afterYear, String mapSheetNum) {
JsonNode properties = feature.get("properties");
JsonNode geometry = feature.get("geometry");
if (properties == null || geometry == null) {
return null;
}
MapSheetLearnDataGeomEntity entity = new MapSheetLearnDataGeomEntity();
// 기본 정보
entity.setDataUid(dataUid);
entity.setBeforeYyyy(Integer.parseInt(beforeYear));
entity.setAfterYyyy(Integer.parseInt(afterYear));
if (mapSheetNum != null) {
try {
entity.setMapSheetNum(Long.parseLong(mapSheetNum));
} catch (NumberFormatException e) {
log.warn("지도번호 파싱 실패: {}", mapSheetNum, e);
}
}
// 변화 탐지 확률
if (properties.has("cd_prob")) {
entity.setCdProb(properties.get("cd_prob").asDouble());
}
// 면적 정보
if (properties.has("area")) {
entity.setArea(properties.get("area").asDouble());
}
// 분류 정보 처리
if (properties.has("class")) {
JsonNode classNode = properties.get("class");
// before 분류
if (classNode.has("before") && classNode.get("before").isArray()) {
JsonNode beforeArray = classNode.get("before");
if (beforeArray.size() > 0) {
JsonNode firstBefore = beforeArray.get(0);
if (firstBefore.has("class_name")) {
entity.setClassBeforeName(firstBefore.get("class_name").asText());
}
if (firstBefore.has("probability")) {
entity.setClassBeforeProb(firstBefore.get("probability").asDouble());
}
}
}
// after 분류
if (classNode.has("after") && classNode.get("after").isArray()) {
JsonNode afterArray = classNode.get("after");
if (afterArray.size() > 0) {
JsonNode firstAfter = afterArray.get(0);
if (firstAfter.has("class_name")) {
entity.setClassAfterName(firstAfter.get("class_name").asText());
}
if (firstAfter.has("probability")) {
entity.setClassAfterProb(firstAfter.get("probability").asDouble());
}
}
}
}
// geometry 변환
try {
Geometry geom = geoJsonReader.read(geometry.toString());
if (geom != null) {
geom.setSRID(5186); // EPSG:5186
entity.setGeom(geom);
}
} catch (Exception e) {
log.warn("Geometry 파싱 실패: {}", e.getMessage());
return null;
}
return entity;
}
}

View File

@@ -2,7 +2,9 @@ package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.LearningModelResultDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultService;
import com.kamco.cd.kamcoback.inference.service.LearningModelResultProcessor;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content;
@@ -10,10 +12,17 @@ import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@@ -24,7 +33,10 @@ import org.springframework.web.bind.annotation.RestController;
@RestController
public class InferenceResultApiController {
private static final Logger logger = LoggerFactory.getLogger(InferenceResultApiController.class);
private final InferenceResultService inferenceResultService;
private final LearningModelResultProcessor learningModelResultProcessor;
@Operation(summary = "추론관리 분석결과 목록 조회", description = "분석상태, 제목으로 분석결과를 조회 합니다.")
@ApiResponses(
@@ -132,4 +144,182 @@ public class InferenceResultApiController {
inferenceResultService.getInferenceResultGeomList(searchGeoReq);
return ApiResponseDto.ok(geomList);
}
@Operation(summary = "학습모델 결과 처리", description = "실제 학습모델 GeoJSON 파일을 처리하여 데이터베이스에 저장합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "처리 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(implementation = LearningModelResultDto.ProcessResponse.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/learning-model/process")
public ApiResponseDto<LearningModelResultDto.ProcessResponse> processLearningModelResult(
@RequestBody LearningModelResultDto.ProcessRequest request) {
try {
logger.info("Processing learning model result file: {}", request.getFilePath());
Path filePath = Paths.get(request.getFilePath());
int processedFeatures = learningModelResultProcessor.processLearningModelResult(filePath);
LearningModelResultDto.ProcessResponse response =
LearningModelResultDto.ProcessResponse.builder()
.success(true)
.message("학습모델 결과 처리가 완료되었습니다.")
.processedFeatures(processedFeatures)
.filePath(request.getFilePath())
.build();
logger.info(
"Successfully processed {} features from file: {}",
processedFeatures,
request.getFilePath());
return ApiResponseDto.ok(response);
} catch (Exception e) {
logger.error("Failed to process learning model result: {}", request.getFilePath(), e);
LearningModelResultDto.ProcessResponse response =
LearningModelResultDto.ProcessResponse.builder()
.success(false)
.message("학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
.processedFeatures(0)
.filePath(request.getFilePath())
.build();
return ApiResponseDto.ok(response);
}
}
@Operation(summary = "학습모델 결과 일괄 처리", description = "여러 학습모델 GeoJSON 파일을 일괄 처리하여 데이터베이스에 저장합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "처리 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(
implementation = LearningModelResultDto.BatchProcessResponse.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/learning-model/process-batch")
public ApiResponseDto<LearningModelResultDto.BatchProcessResponse>
processBatchLearningModelResults(
@RequestBody LearningModelResultDto.BatchProcessRequest request) {
try {
logger.info("Processing {} learning model result files", request.getFilePaths().size());
List<Path> filePaths = new ArrayList<>();
for (String filePath : request.getFilePaths()) {
filePaths.add(Paths.get(filePath));
}
int totalProcessedFeatures =
learningModelResultProcessor.processMultipleLearningModelResults(filePaths);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(true)
.message("일괄 학습모델 결과 처리가 완료되었습니다.")
.totalProcessedFeatures(totalProcessedFeatures)
.processedFileCount(request.getFilePaths().size())
.filePaths(request.getFilePaths())
.build();
logger.info(
"Successfully processed {} features from {} files",
totalProcessedFeatures,
request.getFilePaths().size());
return ApiResponseDto.ok(response);
} catch (Exception e) {
logger.error("Failed to process batch learning model results", e);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(false)
.message("일괄 학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
.totalProcessedFeatures(0)
.processedFileCount(0)
.filePaths(request.getFilePaths())
.build();
return ApiResponseDto.ok(response);
}
}
@Operation(summary = "기본 학습모델 파일 처리", description = "미리 준비된 학습모델 파일을 처리합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "처리 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(
implementation = LearningModelResultDto.BatchProcessResponse.class))),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/learning-model/process-default")
public ApiResponseDto<LearningModelResultDto.BatchProcessResponse>
processDefaultLearningModelResults() {
try {
logger.info("Processing default learning model result files");
// Process the two default learning model files from upload directory
List<String> defaultFilePaths =
List.of(
"/Users/deniallee/geojson/upload/캠코_2021_2022_35813023.geojson",
"/Users/deniallee/geojson/upload/캠코_2023_2024_35810049.geojson");
List<Path> filePaths = new ArrayList<>();
for (String filePath : defaultFilePaths) {
filePaths.add(Paths.get(filePath));
}
int totalProcessedFeatures =
learningModelResultProcessor.processMultipleLearningModelResults(filePaths);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(true)
.message("기본 학습모델 결과 파일 처리가 완료되었습니다.")
.totalProcessedFeatures(totalProcessedFeatures)
.processedFileCount(defaultFilePaths.size())
.filePaths(defaultFilePaths)
.build();
logger.info(
"Successfully processed {} features from {} default files",
totalProcessedFeatures,
defaultFilePaths.size());
return ApiResponseDto.ok(response);
} catch (Exception e) {
logger.error("Failed to process default learning model results", e);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(false)
.message("기본 학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
.totalProcessedFeatures(0)
.processedFileCount(0)
.filePaths(List.of())
.build();
return ApiResponseDto.ok(response);
}
}
}

View File

@@ -0,0 +1,180 @@
package com.kamco.cd.kamcoback.inference.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/** DTO classes for learning model result processing */
public class LearningModelResultDto {
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 결과 처리 요청")
public static class ProcessRequest {
@Schema(
description = "GeoJSON 파일 경로",
example =
"src/main/resources/db/migration/sample-results_updated/캠코_2021_2022_35813023.geojson")
private String filePath;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 결과 처리 응답")
public static class ProcessResponse {
@Schema(description = "처리 성공 여부")
private boolean success;
@Schema(description = "처리 결과 메시지")
private String message;
@Schema(description = "처리된 feature 개수")
private int processedFeatures;
@Schema(description = "처리된 파일 경로")
private String filePath;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 결과 일괄 처리 요청")
public static class BatchProcessRequest {
@Schema(description = "GeoJSON 파일 경로 목록")
private List<String> filePaths;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 결과 일괄 처리 응답")
public static class BatchProcessResponse {
@Schema(description = "처리 성공 여부")
private boolean success;
@Schema(description = "처리 결과 메시지")
private String message;
@Schema(description = "전체 처리된 feature 개수")
private int totalProcessedFeatures;
@Schema(description = "처리된 파일 개수")
private int processedFileCount;
@Schema(description = "처리된 파일 경로 목록")
private List<String> filePaths;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 처리 상태")
public static class ProcessingStatus {
@Schema(description = "처리 ID")
private String processingId;
@Schema(description = "처리 상태 (PENDING, PROCESSING, COMPLETED, FAILED)")
private String status;
@Schema(description = "진행률 (0-100)")
private int progressPercentage;
@Schema(description = "현재 처리 중인 파일")
private String currentFile;
@Schema(description = "전체 파일 개수")
private int totalFiles;
@Schema(description = "처리 완료된 파일 개수")
private int completedFiles;
@Schema(description = "시작 시간")
private String startTime;
@Schema(description = "예상 완료 시간")
private String estimatedEndTime;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 데이터 요약")
public static class DataSummary {
@Schema(description = "전체 데이터 개수")
private long totalRecords;
@Schema(description = "연도별 데이터 개수")
private List<YearDataCount> yearDataCounts;
@Schema(description = "분류별 데이터 개수")
private List<ClassDataCount> classDataCounts;
@Schema(description = "지도 영역별 데이터 개수")
private List<MapSheetDataCount> mapSheetDataCounts;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "연도별 데이터 개수")
public static class YearDataCount {
@Schema(description = "비교 연도 (예: 2021_2022)")
private String compareYear;
@Schema(description = "데이터 개수")
private long count;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "분류별 데이터 개수")
public static class ClassDataCount {
@Schema(description = "분류명")
private String className;
@Schema(description = "변화 전 개수")
private long beforeCount;
@Schema(description = "변화 후 개수")
private long afterCount;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "지도 영역별 데이터 개수")
public static class MapSheetDataCount {
@Schema(description = "지도 영역 번호")
private String mapSheetNum;
@Schema(description = "데이터 개수")
private long count;
@Schema(description = "평균 변화 탐지 확률")
private double avgChangeDetectionProb;
}
}

View File

@@ -0,0 +1,384 @@
package com.kamco.cd.kamcoback.inference.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.LocalDateTime;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.geojson.GeoJsonReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* Service for processing actual learning model result GeoJSON files and storing them in the
* database with proper field mapping.
*/
@Service
public class LearningModelResultProcessor {
private static final Logger logger = LoggerFactory.getLogger(LearningModelResultProcessor.class);
@Autowired private MapSheetLearnDataRepository mapSheetLearnDataRepository;
@Autowired private MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
private final ObjectMapper objectMapper = new ObjectMapper();
private final GeoJsonReader geoJsonReader = new GeoJsonReader();
/**
* Process large learning model result files with optimized batch processing
*/
public int processLearningModelResultOptimized(Path geoJsonFilePath) {
try {
logger.info("Processing learning model result file (optimized): {}", geoJsonFilePath);
// Read and parse GeoJSON file
String geoJsonContent = Files.readString(geoJsonFilePath);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// Validate GeoJSON structure
if (!rootNode.has("type") || !"FeatureCollection".equals(rootNode.get("type").asText())) {
throw new IllegalArgumentException("Invalid GeoJSON: Not a FeatureCollection");
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
throw new IllegalArgumentException("Invalid GeoJSON: No features array found");
}
// Extract metadata from file name and content
String fileName = geoJsonFilePath.getFileName().toString();
String mapSheetName = rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// Parse years from filename
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1];
afterYear = parts[2];
mapSheetNum = parts[3];
}
if (beforeYear == null || afterYear == null || mapSheetNum == null) {
throw new IllegalArgumentException("Cannot parse years and map sheet number from filename: " + fileName);
}
int totalFeatures = features.size();
logger.info("Total features to process: {}", totalFeatures);
// Step 1: Create main data record first
MapSheetLearnDataEntity savedMainData = createMainDataRecord(geoJsonContent, fileName, geoJsonFilePath.toString(), beforeYear, afterYear, mapSheetNum);
// Step 2: Process features in small batches to avoid transaction timeout
int totalProcessed = 0;
int batchSize = 25; // Smaller batch size for reliability
for (int i = 0; i < totalFeatures; i += batchSize) {
int endIndex = Math.min(i + batchSize, totalFeatures);
logger.info("Processing batch {}-{} of {}", i + 1, endIndex, totalFeatures);
List<JsonNode> batch = new ArrayList<>();
for (int j = i; j < endIndex; j++) {
batch.add(features.get(j));
}
try {
int processed = processBatchSafely(batch, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
totalProcessed += processed;
logger.info("Batch processed successfully. Total so far: {}/{}", totalProcessed, totalFeatures);
} catch (Exception e) {
logger.error("Failed to process batch {}-{}: {}", i + 1, endIndex, e.getMessage());
// Continue with next batch instead of failing completely
}
}
logger.info("Successfully processed {} out of {} features from file: {}", totalProcessed, totalFeatures, fileName);
return totalProcessed;
} catch (IOException e) {
logger.error("Failed to read GeoJSON file: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result file", e);
} catch (Exception e) {
logger.error("Error processing learning model result: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result", e);
}
}
@Transactional
private MapSheetLearnDataEntity createMainDataRecord(String geoJsonContent, String fileName, String filePath, String beforeYear, String afterYear, String mapSheetNum) {
MapSheetLearnDataEntity mainData = createMainDataEntity(geoJsonContent, fileName, filePath, beforeYear, afterYear, mapSheetNum);
MapSheetLearnDataEntity saved = mapSheetLearnDataRepository.save(mainData);
logger.info("Created main data record with ID: {}", saved.getId());
return saved;
}
@Transactional
private int processBatchSafely(List<JsonNode> features, Long dataUid, String beforeYear, String afterYear, String mapSheetNum) {
int processed = 0;
for (JsonNode feature : features) {
try {
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000);
insertFeatureWithPostGIS(feature, geoUid, dataUid, beforeYear, afterYear, mapSheetNum);
processed++;
// Small delay to prevent ID collisions
try { Thread.sleep(1); } catch (InterruptedException e) { Thread.currentThread().interrupt(); }
} catch (Exception e) {
logger.warn("Failed to process individual feature: {}", e.getMessage());
// Continue processing other features in this batch
}
}
return processed;
}
/**
* Process a learning model result GeoJSON file and store it in the database
*
* @param geoJsonFilePath Path to the GeoJSON file
* @return Number of features processed
*/
@Transactional
public int processLearningModelResult(Path geoJsonFilePath) {
try {
logger.info("Processing learning model result file: {}", geoJsonFilePath);
// Read and parse GeoJSON file
String geoJsonContent = Files.readString(geoJsonFilePath);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// Validate GeoJSON structure
if (!rootNode.has("type") || !"FeatureCollection".equals(rootNode.get("type").asText())) {
throw new IllegalArgumentException("Invalid GeoJSON: Not a FeatureCollection");
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
throw new IllegalArgumentException("Invalid GeoJSON: No features array found");
}
// Extract metadata from file name and content
String fileName = geoJsonFilePath.getFileName().toString();
String mapSheetName =
rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// Parse years from filename or data (e.g., "캠코_2021_2022_35813023")
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1]; // 2021 or 2023
afterYear = parts[2]; // 2022 or 2024
mapSheetNum = parts[3]; // 35813023 or 35810049
}
if (beforeYear == null || afterYear == null || mapSheetNum == null) {
throw new IllegalArgumentException(
"Cannot parse years and map sheet number from filename: " + fileName);
}
// Create main data record
MapSheetLearnDataEntity mainData =
createMainDataEntity(
geoJsonContent,
fileName,
geoJsonFilePath.toString(),
beforeYear,
afterYear,
mapSheetNum);
MapSheetLearnDataEntity savedMainData = mapSheetLearnDataRepository.save(mainData);
logger.info("Saved main data record with ID: {}", savedMainData.getId());
// Process each feature in the GeoJSON using direct PostGIS insertion
int featureCount = 0;
int batchSize = 10; // Much smaller batch size to avoid transaction timeout
for (int i = 0; i < features.size(); i += batchSize) {
int endIndex = Math.min(i + batchSize, features.size());
logger.info("Processing batch {}-{} of {} features", i + 1, endIndex, features.size());
// Process each feature individually within this logging batch
for (int j = i; j < endIndex; j++) {
JsonNode feature = features.get(j);
try {
// Generate unique ID for this geometry entity
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000) + j;
// Extract feature data and insert directly with PostGIS
insertFeatureWithPostGIS(feature, geoUid, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
featureCount++;
// Small delay to prevent issues
if (j % 5 == 0) {
try { Thread.sleep(10); } catch (InterruptedException e) { Thread.currentThread().interrupt(); }
}
} catch (Exception e) {
logger.warn("Failed to process feature {}: {}", j + 1, e.getMessage());
}
}
// Log progress after each batch
if (featureCount > 0 && endIndex % batchSize == 0) {
logger.info("Processed {} features so far, success rate: {:.1f}%",
featureCount, (featureCount * 100.0) / endIndex);
}
}
logger.info("Successfully processed {} features from file: {}", featureCount, fileName);
return featureCount;
} catch (IOException e) {
logger.error("Failed to read GeoJSON file: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result file", e);
} catch (Exception e) {
logger.error("Error processing learning model result: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result", e);
}
}
/** Create the main data entity for tb_map_sheet_learn_data table */
private MapSheetLearnDataEntity createMainDataEntity(
String geoJsonContent,
String fileName,
String filePath,
String beforeYear,
String afterYear,
String mapSheetNum) {
MapSheetLearnDataEntity entity = new MapSheetLearnDataEntity();
// Generate unique ID (using current timestamp + random component)
entity.setId(System.currentTimeMillis() + (long) (Math.random() * 1000));
LocalDateTime now = LocalDateTime.now();
entity.setAnalStrtDttm(ZonedDateTime.now());
entity.setAnalEndDttm(ZonedDateTime.now());
entity.setCompareYyyy(Integer.parseInt(beforeYear)); // 첫 번째 연도만 저장
// JSON 데이터를 Map으로 변환하여 저장
try {
@SuppressWarnings("unchecked")
Map<String, Object> jsonMap = objectMapper.readValue(geoJsonContent, Map.class);
entity.setDataJson(jsonMap);
} catch (Exception e) {
logger.warn("JSON 파싱 실패, 빈 Map으로 저장: {}", fileName, e);
entity.setDataJson(new HashMap<>());
}
entity.setDataName(fileName);
entity.setDataPath(filePath);
entity.setDataState("PROCESSED");
entity.setCreatedDttm(ZonedDateTime.now());
entity.setUpdatedDttm(ZonedDateTime.now());
return entity;
}
/** Insert GeoJSON feature directly using PostGIS functions */
private void insertFeatureWithPostGIS(
JsonNode feature, Long geoUid, Long dataUid, String beforeYear, String afterYear, String mapSheetNum)
throws Exception {
JsonNode properties = feature.get("properties");
JsonNode geometry = feature.get("geometry");
if (properties == null || geometry == null) {
throw new IllegalArgumentException("Feature missing properties or geometry");
}
// Extract properties
Double cdProb = properties.has("cd_prob") ? properties.get("cd_prob").asDouble() : null;
Double area = properties.has("area") ? properties.get("area").asDouble() : null;
String classBeforeName = null;
Double classBeforeProb = null;
String classAfterName = null;
Double classAfterProb = null;
// Classification data
JsonNode classNode = properties.get("class");
if (classNode != null) {
// Before classification
JsonNode beforeClass = classNode.get("before");
if (beforeClass != null && beforeClass.isArray() && beforeClass.size() > 0) {
JsonNode firstBefore = beforeClass.get(0);
if (firstBefore.has("class_name")) {
classBeforeName = firstBefore.get("class_name").asText();
}
if (firstBefore.has("probability")) {
classBeforeProb = firstBefore.get("probability").asDouble();
}
}
// After classification
JsonNode afterClass = classNode.get("after");
if (afterClass != null && afterClass.isArray() && afterClass.size() > 0) {
JsonNode firstAfter = afterClass.get(0);
if (firstAfter.has("class_name")) {
classAfterName = firstAfter.get("class_name").asText();
}
if (firstAfter.has("probability")) {
classAfterProb = firstAfter.get("probability").asDouble();
}
}
}
// Get geometry type
String geoType = geometry.has("type") ? geometry.get("type").asText() : "Unknown";
// Convert geometry to JSON string for PostGIS
String geometryJson = geometry.toString();
// Insert using PostGIS functions
mapSheetLearnDataGeomRepository.insertWithPostGISGeometry(
geoUid, cdProb, classBeforeName, classBeforeProb,
classAfterName, classAfterProb, Long.parseLong(mapSheetNum),
Integer.parseInt(beforeYear), Integer.parseInt(afterYear),
area, geometryJson, geoType, dataUid
);
logger.debug("Inserted geometry entity with ID: {} using PostGIS", geoUid);
}
/**
* Process multiple learning model result files
*
* @param filePaths List of GeoJSON file paths
* @return Total number of features processed across all files
*/
@Transactional
public int processMultipleLearningModelResults(List<Path> filePaths) {
int totalProcessed = 0;
for (Path filePath : filePaths) {
try {
int processed = processLearningModelResult(filePath);
totalProcessed += processed;
logger.info("Processed {} features from file: {}", processed, filePath.getFileName());
} catch (Exception e) {
logger.error("Failed to process file: {}", filePath, e);
// Continue processing other files even if one fails
}
}
logger.info("Total features processed across all files: {}", totalProcessed);
return totalProcessed;
}
}

View File

@@ -28,16 +28,21 @@ public class AuthCoreService {
/**
* 시퀀스 id로 관리자 조회
*
* @param id
* @return
*/
public AuthDto.Basic findUserById(Long id){
UserEntity entity = authRepository.findUserById(id).orElseThrow(() -> new EntityNotFoundException("관리자를 찾을 수 없습니다. " + id));
public AuthDto.Basic findUserById(Long id) {
UserEntity entity =
authRepository
.findUserById(id)
.orElseThrow(() -> new EntityNotFoundException("관리자를 찾을 수 없습니다. " + id));
return entity.toDto();
}
/**
* 관리자 목록 조회
*
* @param searchReq
* @return
*/

View File

@@ -29,7 +29,8 @@ public class ChangeDetectionCoreService {
// 중심 좌표 계산
Point centroid = polygon.getCentroid();
return new ChangeDetectionDto.TestDto(p.getId(), polygon, centroid.getX(), centroid.getY());
return new ChangeDetectionDto.TestDto(
p.getId(), polygon, centroid.getX(), centroid.getY());
})
.collect(Collectors.toList());
}

View File

@@ -10,12 +10,9 @@ import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import java.util.Map;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.type.SqlTypes;
@Getter
@Setter
@@ -149,5 +146,4 @@ public class MapSheetAnalDataEntity {
@Column(name = "ref_map_sheet_num")
private Long refMapSheetNum;
}

View File

@@ -93,8 +93,6 @@ public class UserEntity {
this.userId,
this.empId,
this.userEmail,
this.createdDttm
) ;
this.createdDttm);
}
}

View File

@@ -3,7 +3,11 @@ package com.kamco.cd.kamcoback.postgres.repository;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
@Repository
public interface MapSheetLearnDataGeomRepository
@@ -24,4 +28,40 @@ public interface MapSheetLearnDataGeomRepository
/** 데이터 UID로 기존 지오메트리 데이터 삭제 (재생성 전에 사용) */
void deleteByDataUid(Long dataUid);
/**
* PostGIS 함수를 사용하여 geometry 데이터를 직접 삽입
* ST_SetSRID(ST_GeomFromGeoJSON(...), 5186) 형식으로 저장
*/
@Modifying
@Transactional
@Query(value = """
INSERT INTO tb_map_sheet_learn_data_geom (
geo_uid, cd_prob, class_before_name, class_before_prob,
class_after_name, class_after_prob, map_sheet_num,
before_yyyy, after_yyyy, area, geom, geo_type, data_uid,
created_dttm, updated_dttm
) VALUES (
:geoUid, :cdProb, :classBeforeName, :classBeforeProb,
:classAfterName, :classAfterProb, :mapSheetNum,
:beforeYyyy, :afterYyyy, :area,
ST_SetSRID(ST_GeomFromGeoJSON(CAST(:geometryJson AS TEXT)), 5186),
:geoType, :dataUid, NOW(), NOW()
) ON CONFLICT (geo_uid) DO NOTHING
""", nativeQuery = true)
void insertWithPostGISGeometry(
@Param("geoUid") Long geoUid,
@Param("cdProb") Double cdProb,
@Param("classBeforeName") String classBeforeName,
@Param("classBeforeProb") Double classBeforeProb,
@Param("classAfterName") String classAfterName,
@Param("classAfterProb") Double classAfterProb,
@Param("mapSheetNum") Long mapSheetNum,
@Param("beforeYyyy") Integer beforeYyyy,
@Param("afterYyyy") Integer afterYyyy,
@Param("area") Double area,
@Param("geometryJson") String geometryJson,
@Param("geoType") String geoType,
@Param("dataUid") Long dataUid
);
}

View File

@@ -63,6 +63,7 @@ public class AuthRepositoryImpl implements AuthRepositoryCustom {
/**
* 유저 시퀀스 id로 조회
*
* @param id
* @return
*/
@@ -74,6 +75,7 @@ public class AuthRepositoryImpl implements AuthRepositoryCustom {
/**
* 관리자 목록 조회
*
* @param searchReq
* @return
*/
@@ -87,24 +89,22 @@ public class AuthRepositoryImpl implements AuthRepositoryCustom {
List<Basic> content =
queryFactory
.select(Projections.constructor(AuthDto.Basic.class,
.select(
Projections.constructor(
AuthDto.Basic.class,
userEntity.id,
userEntity.userAuth,
userEntity.userNm,
userEntity.userId,
userEntity.empId,
userEntity.userEmail,
userEntity.createdDttm
))
userEntity.createdDttm))
.from(userEntity)
.where(
builder
)
.where(builder)
.orderBy(userEntity.userId.asc())
.fetch();
long total =
queryFactory.select(userEntity.id).from(userEntity).where(builder).fetchCount();
long total = queryFactory.select(userEntity.id).from(userEntity).where(builder).fetchCount();
return new PageImpl<>(content, pageable, total);
}

View File

@@ -25,11 +25,9 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
}
@Override
public List<String> findPolygonJson(){
public List<String> findPolygonJson() {
return queryFactory
.select(
Expressions.stringTemplate("ST_AsGeoJSON({0})", mapSheetAnalDataGeomEntity.geom)
)
.select(Expressions.stringTemplate("ST_AsGeoJSON({0})", mapSheetAnalDataGeomEntity.geom))
.from(mapSheetAnalDataGeomEntity)
.orderBy(mapSheetAnalDataGeomEntity.id.desc())
.fetch();

View File

@@ -21,6 +21,13 @@ spring:
hikari:
minimum-idle: 10
maximum-pool-size: 20
connection-timeout: 60000 # 60초 연결 타임아웃
idle-timeout: 300000 # 5분 유휴 타임아웃
max-lifetime: 1800000 # 30분 최대 수명
leak-detection-threshold: 60000 # 연결 누수 감지
transaction:
default-timeout: 300 # 5분 트랜잭션 타임아웃

View File

@@ -5,7 +5,7 @@ spring:
application:
name: kamco-change-detection-api
profiles:
active: local # 사용할 프로파일 지정 (ex. dev, prod, test)
active: dev # 사용할 프로파일 지정 (ex. dev, prod, test)
datasource:
driver-class-name: org.postgresql.Driver

View File

@@ -0,0 +1,27 @@
-- Fix geometry column type in tb_map_sheet_learn_data_geom table
-- The table was incorrectly created with 'bytea' type instead of 'geometry' type
-- 1. First ensure PostGIS is enabled
CREATE EXTENSION IF NOT EXISTS postgis;
-- 2. Clear existing data since it's in incorrect format (JTS serialized objects)
-- This data needs to be reprocessed anyway with the correct PostGIS approach
DELETE FROM public.tb_map_sheet_learn_data_geom;
-- 3. Drop and recreate the geom column with correct PostGIS geometry type
ALTER TABLE public.tb_map_sheet_learn_data_geom DROP COLUMN IF EXISTS geom;
ALTER TABLE public.tb_map_sheet_learn_data_geom ADD COLUMN geom geometry(Polygon, 5186);
-- 4. Create spatial index for performance
CREATE INDEX IF NOT EXISTS idx_tb_map_sheet_learn_data_geom_spatial
ON public.tb_map_sheet_learn_data_geom USING GIST (geom);
-- 5. Update column comment
COMMENT ON COLUMN public.tb_map_sheet_learn_data_geom.geom IS 'PostGIS geometry 정보 (Polygon, EPSG:5186)';
-- 6. Verify the column type is correct
SELECT column_name, data_type, udt_name
FROM information_schema.columns
WHERE table_name = 'tb_map_sheet_learn_data_geom' AND column_name = 'geom';
SELECT 'Geometry column type fixed successfully' as message;

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
PROJCS["KGD2002_Central_Belt_2010",GEOGCS["GCS_KGD2002",DATUM["D_Korea_Geodetic_Datum_2002",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000.0],PARAMETER["False_Northing",600000.0],PARAMETER["Central_Meridian",127.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Latitude_Of_Origin",38.0],UNIT["Meter",1.0]]

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
PROJCS["KGD2002_Central_Belt_2010",GEOGCS["GCS_KGD2002",DATUM["D_Korea_Geodetic_Datum_2002",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",200000.0],PARAMETER["False_Northing",600000.0],PARAMETER["Central_Meridian",127.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Latitude_Of_Origin",38.0],UNIT["Meter",1.0]]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
PROJCS["Korea_2000_Korea_Unified_Coordinate_System",GEOGCS["GCS_Korea_2000",DATUM["D_Korea_2000",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",1000000.0],PARAMETER["False_Northing",2000000.0],PARAMETER["Central_Meridian",127.5],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",38.0],UNIT["Meter",1.0]]