Merge remote-tracking branch 'origin/feat/demo-20251205' into feat/demo-20251205

# Conflicts:
#	src/main/java/com/kamco/cd/kamcoback/changedetection/ChangeDetectionApiController.java
#	src/main/java/com/kamco/cd/kamcoback/changedetection/dto/ChangeDetectionDto.java
This commit is contained in:
2025-11-27 18:01:53 +09:00
46 changed files with 38145 additions and 39201 deletions

View File

@@ -15,7 +15,9 @@ import jakarta.validation.Valid;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
@@ -29,7 +31,7 @@ public class AuthApiController {
private final AuthService authService;
@Operation(summary = "관리자 등록", description = "관리자 를 등록 합니다.")
@Operation(summary = "관리자 등록", description = "관리자를 등록 합니다.")
@ApiResponses(
value = {
@ApiResponse(
@@ -43,71 +45,107 @@ public class AuthApiController {
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/signup")
public ApiResponseDto<Long> signup(
@PostMapping("/save")
public ApiResponseDto<Long> save(
@io.swagger.v3.oas.annotations.parameters.RequestBody(
description = "관리자 정보",
required = true,
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = AuthDto.Signup.class)))
schema = @Schema(implementation = AuthDto.SaveReq.class)))
@RequestBody
@Valid
AuthDto.Signup signup) {
return ApiResponseDto.createOK(authService.signup(signup));
AuthDto.SaveReq saveReq) {
return ApiResponseDto.createOK(authService.save(saveReq).getId());
}
@Operation(summary = "관리자 정보 수정", description = "관리자 정보를 수정 합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "관리자 정보 수정 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Long.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PutMapping("/update/{id}")
public ApiResponseDto<Long> update(@PathVariable Long id, @RequestBody AuthDto.SaveReq saveReq) {
return ApiResponseDto.createOK(authService.update(id, saveReq).getId());
}
@Operation(summary = "관리자 정보 탈퇴처리", description = "관리자 정보를 탈퇴처리 합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "관리자 탈퇴처리 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Long.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PutMapping("/withdrawal/{id}")
public ApiResponseDto<Long> withdrawal(@PathVariable Long id) {
return ApiResponseDto.deleteOk(authService.withdrawal(id).getId());
}
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = AuthDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = AuthDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@Operation(summary = "관리자 상세조회", description = "관리자 정보를 조회 합니다.")
@GetMapping("/detail")
public ApiResponseDto<AuthDto.Basic> getDetail(
@io.swagger.v3.oas.annotations.parameters.RequestBody(
description = "관리자 목록 id",
required = true)
@RequestParam
Long id) {
@io.swagger.v3.oas.annotations.parameters.RequestBody(
description = "관리자 목록 id",
required = true)
@RequestParam
Long id) {
return ApiResponseDto.ok(authService.getFindUserById(id));
}
@Operation(summary = "관리자 목록", description = "관리자 목록 조회")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "검색 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Page.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
value = {
@ApiResponse(
responseCode = "200",
description = "검색 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Page.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@GetMapping("/list")
public ApiResponseDto<Page<Basic>> getUserList(
@Parameter(description = "관리자 이름")
@RequestParam(required = false) String userNm,
@Parameter(description = "페이지 번호 (0부터 시작)", example = "0")
@RequestParam(defaultValue = "0") int page,
@Parameter(description = "페이지 크기", example = "20")
@RequestParam(defaultValue = "20") int size,
@Parameter(description = "정렬 조건 (형식: 필드명,방향)", example = "name,asc")
@RequestParam(required = false) String sort
) {
@Parameter(description = "관리자 이름") @RequestParam(required = false) String userNm,
@Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0")
int page,
@Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20")
int size,
@Parameter(description = "정렬 조건 (형식: 필드명,방향)", example = "name,asc")
@RequestParam(required = false)
String sort) {
AuthDto.SearchReq searchReq = new AuthDto.SearchReq(userNm, page, size, sort);
Page<AuthDto.Basic> userList = authService.getUserList(searchReq);
return ApiResponseDto.ok(userList);
}
}

View File

@@ -26,10 +26,16 @@ public class AuthDto {
private String userId;
private String empId;
private String userEmail;
@JsonFormatDttm
private ZonedDateTime createdDttm;
@JsonFormatDttm private ZonedDateTime createdDttm;
public Basic(Long id, String userAuth, String userNm, String userId, String empId, String userEmail, ZonedDateTime createdDttm) {
public Basic(
Long id,
String userAuth,
String userNm,
String userId,
String empId,
String userEmail,
ZonedDateTime createdDttm) {
this.id = id;
this.userAuth = userAuth;
this.userNm = userNm;
@@ -40,10 +46,10 @@ public class AuthDto {
}
}
@Schema(name = "Signup", description = "사용자 등록 정보")
@Schema(name = "save request", description = "사용자 등록 정보")
@Getter
@Setter
public static class Signup {
public static class SaveReq {
@Schema(description = "구분", example = "관리자/라벨러/검수자 중 하나")
@NotBlank
@@ -69,7 +75,7 @@ public class AuthDto {
@NotBlank
private String userEmail;
public Signup(
public SaveReq(
String userAuth,
String userNm,
String userId,
@@ -85,8 +91,60 @@ public class AuthDto {
}
}
@Schema(name = "update request", description = "사용자 수정 정보")
@Getter
@Setter
public static class UpdateReq {
@Schema(description = "id", example = "1")
@NotBlank
private Long id;
@Schema(description = "구분", example = "관리자/라벨러/검수자 중 하나")
@NotBlank
private String userAuth;
@NotBlank
@Schema(description = "이름", example = "홍길동")
private String userNm;
@Schema(description = "ID", example = "gildong")
@NotBlank
private String userId;
@Schema(description = "PW", example = "password")
@NotBlank
private String userPw;
@Schema(description = "사번", example = "사번")
@NotBlank
private String empId;
@Schema(description = "이메일", example = "gildong@naver.com")
@NotBlank
private String userEmail;
public UpdateReq(
Long id,
String userAuth,
String userNm,
String userId,
String userPw,
String empId,
String userEmail) {
this.id = id;
this.userAuth = userAuth;
this.userNm = userNm;
this.userId = userId;
this.userPw = userPw;
this.empId = empId;
this.userEmail = userEmail;
}
}
@Getter
public static class User {
String userId;
String userPw;
}
@@ -111,7 +169,7 @@ public class AuthDto {
String[] sortParams = sort.split(",");
String property = sortParams[0];
Sort.Direction direction =
sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC;
sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC;
return PageRequest.of(page, size, Sort.by(direction, property));
}
return PageRequest.of(page, size);

View File

@@ -3,6 +3,7 @@ package com.kamco.cd.kamcoback.auth.service;
import com.kamco.cd.kamcoback.auth.dto.AuthDto;
import com.kamco.cd.kamcoback.auth.dto.AuthDto.Basic;
import com.kamco.cd.kamcoback.postgres.core.AuthCoreService;
import com.kamco.cd.kamcoback.postgres.entity.UserEntity;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.security.crypto.password.PasswordEncoder;
@@ -13,32 +14,59 @@ import org.springframework.transaction.annotation.Transactional;
@Transactional(readOnly = true)
@RequiredArgsConstructor
public class AuthService {
private final AuthCoreService authCoreService;
private final PasswordEncoder passwordEncoder;
/**
* 관리자 등록
*
* @param signup
* @param saveReq
* @return
*/
@Transactional
public Long signup(AuthDto.Signup signup) {
signup.setUserPw(passwordEncoder.encode(signup.getUserPw()));
return authCoreService.signup(signup);
public UserEntity save(AuthDto.SaveReq saveReq) {
saveReq.setUserPw(passwordEncoder.encode(saveReq.getUserPw()));
return authCoreService.save(saveReq);
}
/**
* 관리자 정보 수정
*
* @param id
* @param saveReq
* @return
*/
public UserEntity update(Long id, AuthDto.SaveReq saveReq) {
if (saveReq.getUserPw() != null) {
saveReq.setUserPw(passwordEncoder.encode(saveReq.getUserPw()));
}
return authCoreService.update(id, saveReq);
}
/**
* 관리자 삭제
*
* @param id
* @return
*/
public UserEntity withdrawal(Long id) {
return authCoreService.withdrawal(id);
}
/**
* 시퀀스 id로 관리자 조회
*
* @param id
* @return
*/
public AuthDto.Basic getFindUserById(Long id){
public AuthDto.Basic getFindUserById(Long id) {
return authCoreService.findUserById(id);
}
/**
* 관리자 목록 조회
*
* @param searchReq
* @return
*/

View File

@@ -17,7 +17,7 @@ public class ChangeDetectionService {
return changeDetectionCoreService.getPolygonToPoint();
}
public List<JsonNode> getPolygonToJson(){
public List<JsonNode> getPolygonToJson() {
return changeDetectionCoreService.getPolygonToJson();
}

View File

@@ -3,11 +3,15 @@ package com.kamco.cd.kamcoback.geojson.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.time.ZonedDateTime;
import java.util.*;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.geojson.GeoJsonReader;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -18,7 +22,9 @@ import org.springframework.transaction.annotation.Transactional;
public class GeoJsonDataService {
private final MapSheetLearnDataRepository mapSheetLearnDataRepository;
private final MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
private final ObjectMapper objectMapper;
private final GeoJsonReader geoJsonReader = new GeoJsonReader();
/** GeoJSON 파일들을 데이터베이스에 저장 */
@Transactional
@@ -37,6 +43,11 @@ public class GeoJsonDataService {
if (savedId != null) {
savedIds.add(savedId);
log.debug("GeoJSON 파일 저장 성공: {} (ID: {})", fileName, savedId);
// 학습 모델 결과 파일인지 확인하여 geometry 데이터 처리
if (isLearningModelResult(fileName, geoJsonContent)) {
processLearningModelGeometry(savedId, geoJsonContent, fileName);
}
}
} catch (Exception e) {
log.error("GeoJSON 파일 처리 실패: {}", fileName, e);
@@ -163,7 +174,24 @@ public class GeoJsonDataService {
/** 연도 정보 추출 */
private void setYearInformation(MapSheetLearnDataEntity entity, String fileName) {
// 파일명에서 연도 추출 시도 (예: kamco_2021_2022_35813023.geojson)
// 학습 모델 결과 파일인지 확인하고 특별 처리
if (fileName.matches(".*캠코_\\d{4}_\\d{4}_\\d+.*")) {
String[] parts = fileName.split("_");
if (parts.length >= 4) {
String beforeYear = parts[1];
String afterYear = parts[2];
// 비교년도 정보를 첫 번째 연도의 정수로 저장
try {
entity.setCompareYyyy(Integer.parseInt(beforeYear));
log.debug("학습 모델 연도 정보 설정: {}", beforeYear);
} catch (NumberFormatException e) {
log.warn("연도 파싱 실패: {}", beforeYear, e);
}
return;
}
}
// 기존 로직: 파일명에서 연도 추출 시도
String[] parts = fileName.split("_");
for (String part : parts) {
if (part.matches("\\d{4}")) { // 4자리 숫자 (연도)
@@ -226,4 +254,177 @@ public class GeoJsonDataService {
return true;
}
/** 학습 모델 결과 파일인지 확인 */
private boolean isLearningModelResult(String fileName, String geoJsonContent) {
try {
// 파일명으로 확인 (캠코_YYYY_YYYY_번호 패턴)
if (fileName.matches(".*캠코_\\d{4}_\\d{4}_\\d+.*\\.geojson")) {
return true;
}
// GeoJSON 내용으로 확인 (학습 모델 특화 필드 존재 여부)
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
if (rootNode.has("features")) {
JsonNode features = rootNode.get("features");
if (features.isArray() && features.size() > 0) {
JsonNode firstFeature = features.get(0);
if (firstFeature.has("properties")) {
JsonNode properties = firstFeature.get("properties");
// 학습 모델 특화 필드 확인
return properties.has("cd_prob")
|| properties.has("class")
|| (properties.has("before") && properties.has("after"));
}
}
}
} catch (Exception e) {
log.debug("학습 모델 결과 파일 확인 중 오류: {}", fileName, e);
}
return false;
}
/** 학습 모델 결과의 geometry 데이터 처리 */
@Transactional
public void processLearningModelGeometry(Long dataUid, String geoJsonContent, String fileName) {
try {
log.info("학습 모델 geometry 데이터 처리 시작: {} (dataUid: {})", fileName, dataUid);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// 메타데이터 추출
String mapSheetName =
rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// 파일명에서 연도 및 지도번호 추출 (캠코_2021_2022_35813023)
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1];
afterYear = parts[2];
mapSheetNum = parts[3];
}
if (beforeYear == null || afterYear == null) {
log.warn("연도 정보를 추출할 수 없습니다: {}", fileName);
return;
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
log.warn("features 배열이 없습니다: {}", fileName);
return;
}
List<MapSheetLearnDataGeomEntity> geomEntities = new ArrayList<>();
int processedCount = 0;
for (JsonNode feature : features) {
try {
MapSheetLearnDataGeomEntity geomEntity =
createGeometryEntity(feature, dataUid, beforeYear, afterYear, mapSheetNum);
if (geomEntity != null) {
geomEntities.add(geomEntity);
processedCount++;
}
} catch (Exception e) {
log.warn("Feature geometry 처리 실패 (feature {}): {}", processedCount, e.getMessage());
}
}
// 배치 저장
if (!geomEntities.isEmpty()) {
mapSheetLearnDataGeomRepository.saveAll(geomEntities);
log.info("학습 모델 geometry 데이터 저장 완료: {} ({}개 feature)", fileName, geomEntities.size());
}
} catch (Exception e) {
log.error("학습 모델 geometry 데이터 처리 실패: {}", fileName, e);
}
}
/** 개별 feature에서 geometry entity 생성 */
private MapSheetLearnDataGeomEntity createGeometryEntity(
JsonNode feature, Long dataUid, String beforeYear, String afterYear, String mapSheetNum) {
JsonNode properties = feature.get("properties");
JsonNode geometry = feature.get("geometry");
if (properties == null || geometry == null) {
return null;
}
MapSheetLearnDataGeomEntity entity = new MapSheetLearnDataGeomEntity();
// 기본 정보
entity.setDataUid(dataUid);
entity.setBeforeYyyy(Integer.parseInt(beforeYear));
entity.setAfterYyyy(Integer.parseInt(afterYear));
if (mapSheetNum != null) {
try {
entity.setMapSheetNum(Long.parseLong(mapSheetNum));
} catch (NumberFormatException e) {
log.warn("지도번호 파싱 실패: {}", mapSheetNum, e);
}
}
// 변화 탐지 확률
if (properties.has("cd_prob")) {
entity.setCdProb(properties.get("cd_prob").asDouble());
}
// 면적 정보
if (properties.has("area")) {
entity.setArea(properties.get("area").asDouble());
}
// 분류 정보 처리
if (properties.has("class")) {
JsonNode classNode = properties.get("class");
// before 분류
if (classNode.has("before") && classNode.get("before").isArray()) {
JsonNode beforeArray = classNode.get("before");
if (beforeArray.size() > 0) {
JsonNode firstBefore = beforeArray.get(0);
if (firstBefore.has("class_name")) {
entity.setClassBeforeName(firstBefore.get("class_name").asText());
}
if (firstBefore.has("probability")) {
entity.setClassBeforeProb(firstBefore.get("probability").asDouble());
}
}
}
// after 분류
if (classNode.has("after") && classNode.get("after").isArray()) {
JsonNode afterArray = classNode.get("after");
if (afterArray.size() > 0) {
JsonNode firstAfter = afterArray.get(0);
if (firstAfter.has("class_name")) {
entity.setClassAfterName(firstAfter.get("class_name").asText());
}
if (firstAfter.has("probability")) {
entity.setClassAfterProb(firstAfter.get("probability").asDouble());
}
}
}
}
// geometry 변환
try {
Geometry geom = geoJsonReader.read(geometry.toString());
if (geom != null) {
geom.setSRID(5186); // EPSG:5186
entity.setGeom(geom);
}
} catch (Exception e) {
log.warn("Geometry 파싱 실패: {}", e.getMessage());
return null;
}
return entity;
}
}

View File

@@ -2,7 +2,9 @@ package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.LearningModelResultDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultService;
import com.kamco.cd.kamcoback.inference.service.LearningModelResultProcessor;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content;
@@ -10,10 +12,17 @@ import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@@ -24,7 +33,10 @@ import org.springframework.web.bind.annotation.RestController;
@RestController
public class InferenceResultApiController {
private static final Logger logger = LoggerFactory.getLogger(InferenceResultApiController.class);
private final InferenceResultService inferenceResultService;
private final LearningModelResultProcessor learningModelResultProcessor;
@Operation(summary = "추론관리 분석결과 목록 조회", description = "분석상태, 제목으로 분석결과를 조회 합니다.")
@ApiResponses(
@@ -132,4 +144,182 @@ public class InferenceResultApiController {
inferenceResultService.getInferenceResultGeomList(searchGeoReq);
return ApiResponseDto.ok(geomList);
}
@Operation(summary = "학습모델 결과 처리", description = "실제 학습모델 GeoJSON 파일을 처리하여 데이터베이스에 저장합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "처리 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(implementation = LearningModelResultDto.ProcessResponse.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/learning-model/process")
public ApiResponseDto<LearningModelResultDto.ProcessResponse> processLearningModelResult(
@RequestBody LearningModelResultDto.ProcessRequest request) {
try {
logger.info("Processing learning model result file: {}", request.getFilePath());
Path filePath = Paths.get(request.getFilePath());
int processedFeatures = learningModelResultProcessor.processLearningModelResult(filePath);
LearningModelResultDto.ProcessResponse response =
LearningModelResultDto.ProcessResponse.builder()
.success(true)
.message("학습모델 결과 처리가 완료되었습니다.")
.processedFeatures(processedFeatures)
.filePath(request.getFilePath())
.build();
logger.info(
"Successfully processed {} features from file: {}",
processedFeatures,
request.getFilePath());
return ApiResponseDto.ok(response);
} catch (Exception e) {
logger.error("Failed to process learning model result: {}", request.getFilePath(), e);
LearningModelResultDto.ProcessResponse response =
LearningModelResultDto.ProcessResponse.builder()
.success(false)
.message("학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
.processedFeatures(0)
.filePath(request.getFilePath())
.build();
return ApiResponseDto.ok(response);
}
}
@Operation(summary = "학습모델 결과 일괄 처리", description = "여러 학습모델 GeoJSON 파일을 일괄 처리하여 데이터베이스에 저장합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "처리 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(
implementation = LearningModelResultDto.BatchProcessResponse.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/learning-model/process-batch")
public ApiResponseDto<LearningModelResultDto.BatchProcessResponse>
processBatchLearningModelResults(
@RequestBody LearningModelResultDto.BatchProcessRequest request) {
try {
logger.info("Processing {} learning model result files", request.getFilePaths().size());
List<Path> filePaths = new ArrayList<>();
for (String filePath : request.getFilePaths()) {
filePaths.add(Paths.get(filePath));
}
int totalProcessedFeatures =
learningModelResultProcessor.processMultipleLearningModelResults(filePaths);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(true)
.message("일괄 학습모델 결과 처리가 완료되었습니다.")
.totalProcessedFeatures(totalProcessedFeatures)
.processedFileCount(request.getFilePaths().size())
.filePaths(request.getFilePaths())
.build();
logger.info(
"Successfully processed {} features from {} files",
totalProcessedFeatures,
request.getFilePaths().size());
return ApiResponseDto.ok(response);
} catch (Exception e) {
logger.error("Failed to process batch learning model results", e);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(false)
.message("일괄 학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
.totalProcessedFeatures(0)
.processedFileCount(0)
.filePaths(request.getFilePaths())
.build();
return ApiResponseDto.ok(response);
}
}
@Operation(summary = "기본 학습모델 파일 처리", description = "미리 준비된 학습모델 파일을 처리합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "처리 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(
implementation = LearningModelResultDto.BatchProcessResponse.class))),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/learning-model/process-default")
public ApiResponseDto<LearningModelResultDto.BatchProcessResponse>
processDefaultLearningModelResults() {
try {
logger.info("Processing default learning model result files");
// Process the two default learning model files from upload directory
List<String> defaultFilePaths =
List.of(
"/Users/deniallee/geojson/upload/캠코_2021_2022_35813023.geojson",
"/Users/deniallee/geojson/upload/캠코_2023_2024_35810049.geojson");
List<Path> filePaths = new ArrayList<>();
for (String filePath : defaultFilePaths) {
filePaths.add(Paths.get(filePath));
}
int totalProcessedFeatures =
learningModelResultProcessor.processMultipleLearningModelResults(filePaths);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(true)
.message("기본 학습모델 결과 파일 처리가 완료되었습니다.")
.totalProcessedFeatures(totalProcessedFeatures)
.processedFileCount(defaultFilePaths.size())
.filePaths(defaultFilePaths)
.build();
logger.info(
"Successfully processed {} features from {} default files",
totalProcessedFeatures,
defaultFilePaths.size());
return ApiResponseDto.ok(response);
} catch (Exception e) {
logger.error("Failed to process default learning model results", e);
LearningModelResultDto.BatchProcessResponse response =
LearningModelResultDto.BatchProcessResponse.builder()
.success(false)
.message("기본 학습모델 결과 처리 중 오류가 발생했습니다: " + e.getMessage())
.totalProcessedFeatures(0)
.processedFileCount(0)
.filePaths(List.of())
.build();
return ApiResponseDto.ok(response);
}
}
}

View File

@@ -0,0 +1,180 @@
package com.kamco.cd.kamcoback.inference.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/** DTO classes for learning model result processing */
public class LearningModelResultDto {
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 결과 처리 요청")
public static class ProcessRequest {
@Schema(
description = "GeoJSON 파일 경로",
example =
"src/main/resources/db/migration/sample-results_updated/캠코_2021_2022_35813023.geojson")
private String filePath;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 결과 처리 응답")
public static class ProcessResponse {
@Schema(description = "처리 성공 여부")
private boolean success;
@Schema(description = "처리 결과 메시지")
private String message;
@Schema(description = "처리된 feature 개수")
private int processedFeatures;
@Schema(description = "처리된 파일 경로")
private String filePath;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 결과 일괄 처리 요청")
public static class BatchProcessRequest {
@Schema(description = "GeoJSON 파일 경로 목록")
private List<String> filePaths;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 결과 일괄 처리 응답")
public static class BatchProcessResponse {
@Schema(description = "처리 성공 여부")
private boolean success;
@Schema(description = "처리 결과 메시지")
private String message;
@Schema(description = "전체 처리된 feature 개수")
private int totalProcessedFeatures;
@Schema(description = "처리된 파일 개수")
private int processedFileCount;
@Schema(description = "처리된 파일 경로 목록")
private List<String> filePaths;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 처리 상태")
public static class ProcessingStatus {
@Schema(description = "처리 ID")
private String processingId;
@Schema(description = "처리 상태 (PENDING, PROCESSING, COMPLETED, FAILED)")
private String status;
@Schema(description = "진행률 (0-100)")
private int progressPercentage;
@Schema(description = "현재 처리 중인 파일")
private String currentFile;
@Schema(description = "전체 파일 개수")
private int totalFiles;
@Schema(description = "처리 완료된 파일 개수")
private int completedFiles;
@Schema(description = "시작 시간")
private String startTime;
@Schema(description = "예상 완료 시간")
private String estimatedEndTime;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "학습모델 데이터 요약")
public static class DataSummary {
@Schema(description = "전체 데이터 개수")
private long totalRecords;
@Schema(description = "연도별 데이터 개수")
private List<YearDataCount> yearDataCounts;
@Schema(description = "분류별 데이터 개수")
private List<ClassDataCount> classDataCounts;
@Schema(description = "지도 영역별 데이터 개수")
private List<MapSheetDataCount> mapSheetDataCounts;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "연도별 데이터 개수")
public static class YearDataCount {
@Schema(description = "비교 연도 (예: 2021_2022)")
private String compareYear;
@Schema(description = "데이터 개수")
private long count;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "분류별 데이터 개수")
public static class ClassDataCount {
@Schema(description = "분류명")
private String className;
@Schema(description = "변화 전 개수")
private long beforeCount;
@Schema(description = "변화 후 개수")
private long afterCount;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Schema(description = "지도 영역별 데이터 개수")
public static class MapSheetDataCount {
@Schema(description = "지도 영역 번호")
private String mapSheetNum;
@Schema(description = "데이터 개수")
private long count;
@Schema(description = "평균 변화 탐지 확률")
private double avgChangeDetectionProb;
}
}

View File

@@ -0,0 +1,384 @@
package com.kamco.cd.kamcoback.inference.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataGeomRepository;
import com.kamco.cd.kamcoback.postgres.repository.MapSheetLearnDataRepository;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.LocalDateTime;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.geojson.GeoJsonReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* Service for processing actual learning model result GeoJSON files and storing them in the
* database with proper field mapping.
*/
@Service
public class LearningModelResultProcessor {
private static final Logger logger = LoggerFactory.getLogger(LearningModelResultProcessor.class);
@Autowired private MapSheetLearnDataRepository mapSheetLearnDataRepository;
@Autowired private MapSheetLearnDataGeomRepository mapSheetLearnDataGeomRepository;
private final ObjectMapper objectMapper = new ObjectMapper();
private final GeoJsonReader geoJsonReader = new GeoJsonReader();
/**
* Process large learning model result files with optimized batch processing
*/
public int processLearningModelResultOptimized(Path geoJsonFilePath) {
try {
logger.info("Processing learning model result file (optimized): {}", geoJsonFilePath);
// Read and parse GeoJSON file
String geoJsonContent = Files.readString(geoJsonFilePath);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// Validate GeoJSON structure
if (!rootNode.has("type") || !"FeatureCollection".equals(rootNode.get("type").asText())) {
throw new IllegalArgumentException("Invalid GeoJSON: Not a FeatureCollection");
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
throw new IllegalArgumentException("Invalid GeoJSON: No features array found");
}
// Extract metadata from file name and content
String fileName = geoJsonFilePath.getFileName().toString();
String mapSheetName = rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// Parse years from filename
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1];
afterYear = parts[2];
mapSheetNum = parts[3];
}
if (beforeYear == null || afterYear == null || mapSheetNum == null) {
throw new IllegalArgumentException("Cannot parse years and map sheet number from filename: " + fileName);
}
int totalFeatures = features.size();
logger.info("Total features to process: {}", totalFeatures);
// Step 1: Create main data record first
MapSheetLearnDataEntity savedMainData = createMainDataRecord(geoJsonContent, fileName, geoJsonFilePath.toString(), beforeYear, afterYear, mapSheetNum);
// Step 2: Process features in small batches to avoid transaction timeout
int totalProcessed = 0;
int batchSize = 25; // Smaller batch size for reliability
for (int i = 0; i < totalFeatures; i += batchSize) {
int endIndex = Math.min(i + batchSize, totalFeatures);
logger.info("Processing batch {}-{} of {}", i + 1, endIndex, totalFeatures);
List<JsonNode> batch = new ArrayList<>();
for (int j = i; j < endIndex; j++) {
batch.add(features.get(j));
}
try {
int processed = processBatchSafely(batch, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
totalProcessed += processed;
logger.info("Batch processed successfully. Total so far: {}/{}", totalProcessed, totalFeatures);
} catch (Exception e) {
logger.error("Failed to process batch {}-{}: {}", i + 1, endIndex, e.getMessage());
// Continue with next batch instead of failing completely
}
}
logger.info("Successfully processed {} out of {} features from file: {}", totalProcessed, totalFeatures, fileName);
return totalProcessed;
} catch (IOException e) {
logger.error("Failed to read GeoJSON file: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result file", e);
} catch (Exception e) {
logger.error("Error processing learning model result: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result", e);
}
}
@Transactional
private MapSheetLearnDataEntity createMainDataRecord(String geoJsonContent, String fileName, String filePath, String beforeYear, String afterYear, String mapSheetNum) {
MapSheetLearnDataEntity mainData = createMainDataEntity(geoJsonContent, fileName, filePath, beforeYear, afterYear, mapSheetNum);
MapSheetLearnDataEntity saved = mapSheetLearnDataRepository.save(mainData);
logger.info("Created main data record with ID: {}", saved.getId());
return saved;
}
@Transactional
private int processBatchSafely(List<JsonNode> features, Long dataUid, String beforeYear, String afterYear, String mapSheetNum) {
int processed = 0;
for (JsonNode feature : features) {
try {
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000);
insertFeatureWithPostGIS(feature, geoUid, dataUid, beforeYear, afterYear, mapSheetNum);
processed++;
// Small delay to prevent ID collisions
try { Thread.sleep(1); } catch (InterruptedException e) { Thread.currentThread().interrupt(); }
} catch (Exception e) {
logger.warn("Failed to process individual feature: {}", e.getMessage());
// Continue processing other features in this batch
}
}
return processed;
}
/**
* Process a learning model result GeoJSON file and store it in the database
*
* @param geoJsonFilePath Path to the GeoJSON file
* @return Number of features processed
*/
@Transactional
public int processLearningModelResult(Path geoJsonFilePath) {
try {
logger.info("Processing learning model result file: {}", geoJsonFilePath);
// Read and parse GeoJSON file
String geoJsonContent = Files.readString(geoJsonFilePath);
JsonNode rootNode = objectMapper.readTree(geoJsonContent);
// Validate GeoJSON structure
if (!rootNode.has("type") || !"FeatureCollection".equals(rootNode.get("type").asText())) {
throw new IllegalArgumentException("Invalid GeoJSON: Not a FeatureCollection");
}
JsonNode features = rootNode.get("features");
if (features == null || !features.isArray()) {
throw new IllegalArgumentException("Invalid GeoJSON: No features array found");
}
// Extract metadata from file name and content
String fileName = geoJsonFilePath.getFileName().toString();
String mapSheetName =
rootNode.has("name") ? rootNode.get("name").asText() : fileName.replace(".geojson", "");
// Parse years from filename or data (e.g., "캠코_2021_2022_35813023")
String[] parts = mapSheetName.split("_");
String beforeYear = null, afterYear = null, mapSheetNum = null;
if (parts.length >= 4) {
beforeYear = parts[1]; // 2021 or 2023
afterYear = parts[2]; // 2022 or 2024
mapSheetNum = parts[3]; // 35813023 or 35810049
}
if (beforeYear == null || afterYear == null || mapSheetNum == null) {
throw new IllegalArgumentException(
"Cannot parse years and map sheet number from filename: " + fileName);
}
// Create main data record
MapSheetLearnDataEntity mainData =
createMainDataEntity(
geoJsonContent,
fileName,
geoJsonFilePath.toString(),
beforeYear,
afterYear,
mapSheetNum);
MapSheetLearnDataEntity savedMainData = mapSheetLearnDataRepository.save(mainData);
logger.info("Saved main data record with ID: {}", savedMainData.getId());
// Process each feature in the GeoJSON using direct PostGIS insertion
int featureCount = 0;
int batchSize = 10; // Much smaller batch size to avoid transaction timeout
for (int i = 0; i < features.size(); i += batchSize) {
int endIndex = Math.min(i + batchSize, features.size());
logger.info("Processing batch {}-{} of {} features", i + 1, endIndex, features.size());
// Process each feature individually within this logging batch
for (int j = i; j < endIndex; j++) {
JsonNode feature = features.get(j);
try {
// Generate unique ID for this geometry entity
long geoUid = System.currentTimeMillis() + (long) (Math.random() * 10000) + j;
// Extract feature data and insert directly with PostGIS
insertFeatureWithPostGIS(feature, geoUid, savedMainData.getId(), beforeYear, afterYear, mapSheetNum);
featureCount++;
// Small delay to prevent issues
if (j % 5 == 0) {
try { Thread.sleep(10); } catch (InterruptedException e) { Thread.currentThread().interrupt(); }
}
} catch (Exception e) {
logger.warn("Failed to process feature {}: {}", j + 1, e.getMessage());
}
}
// Log progress after each batch
if (featureCount > 0 && endIndex % batchSize == 0) {
logger.info("Processed {} features so far, success rate: {:.1f}%",
featureCount, (featureCount * 100.0) / endIndex);
}
}
logger.info("Successfully processed {} features from file: {}", featureCount, fileName);
return featureCount;
} catch (IOException e) {
logger.error("Failed to read GeoJSON file: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result file", e);
} catch (Exception e) {
logger.error("Error processing learning model result: {}", geoJsonFilePath, e);
throw new RuntimeException("Failed to process learning model result", e);
}
}
/** Create the main data entity for tb_map_sheet_learn_data table */
private MapSheetLearnDataEntity createMainDataEntity(
String geoJsonContent,
String fileName,
String filePath,
String beforeYear,
String afterYear,
String mapSheetNum) {
MapSheetLearnDataEntity entity = new MapSheetLearnDataEntity();
// Generate unique ID (using current timestamp + random component)
entity.setId(System.currentTimeMillis() + (long) (Math.random() * 1000));
LocalDateTime now = LocalDateTime.now();
entity.setAnalStrtDttm(ZonedDateTime.now());
entity.setAnalEndDttm(ZonedDateTime.now());
entity.setCompareYyyy(Integer.parseInt(beforeYear)); // 첫 번째 연도만 저장
// JSON 데이터를 Map으로 변환하여 저장
try {
@SuppressWarnings("unchecked")
Map<String, Object> jsonMap = objectMapper.readValue(geoJsonContent, Map.class);
entity.setDataJson(jsonMap);
} catch (Exception e) {
logger.warn("JSON 파싱 실패, 빈 Map으로 저장: {}", fileName, e);
entity.setDataJson(new HashMap<>());
}
entity.setDataName(fileName);
entity.setDataPath(filePath);
entity.setDataState("PROCESSED");
entity.setCreatedDttm(ZonedDateTime.now());
entity.setUpdatedDttm(ZonedDateTime.now());
return entity;
}
/** Insert GeoJSON feature directly using PostGIS functions */
private void insertFeatureWithPostGIS(
JsonNode feature, Long geoUid, Long dataUid, String beforeYear, String afterYear, String mapSheetNum)
throws Exception {
JsonNode properties = feature.get("properties");
JsonNode geometry = feature.get("geometry");
if (properties == null || geometry == null) {
throw new IllegalArgumentException("Feature missing properties or geometry");
}
// Extract properties
Double cdProb = properties.has("cd_prob") ? properties.get("cd_prob").asDouble() : null;
Double area = properties.has("area") ? properties.get("area").asDouble() : null;
String classBeforeName = null;
Double classBeforeProb = null;
String classAfterName = null;
Double classAfterProb = null;
// Classification data
JsonNode classNode = properties.get("class");
if (classNode != null) {
// Before classification
JsonNode beforeClass = classNode.get("before");
if (beforeClass != null && beforeClass.isArray() && beforeClass.size() > 0) {
JsonNode firstBefore = beforeClass.get(0);
if (firstBefore.has("class_name")) {
classBeforeName = firstBefore.get("class_name").asText();
}
if (firstBefore.has("probability")) {
classBeforeProb = firstBefore.get("probability").asDouble();
}
}
// After classification
JsonNode afterClass = classNode.get("after");
if (afterClass != null && afterClass.isArray() && afterClass.size() > 0) {
JsonNode firstAfter = afterClass.get(0);
if (firstAfter.has("class_name")) {
classAfterName = firstAfter.get("class_name").asText();
}
if (firstAfter.has("probability")) {
classAfterProb = firstAfter.get("probability").asDouble();
}
}
}
// Get geometry type
String geoType = geometry.has("type") ? geometry.get("type").asText() : "Unknown";
// Convert geometry to JSON string for PostGIS
String geometryJson = geometry.toString();
// Insert using PostGIS functions
mapSheetLearnDataGeomRepository.insertWithPostGISGeometry(
geoUid, cdProb, classBeforeName, classBeforeProb,
classAfterName, classAfterProb, Long.parseLong(mapSheetNum),
Integer.parseInt(beforeYear), Integer.parseInt(afterYear),
area, geometryJson, geoType, dataUid
);
logger.debug("Inserted geometry entity with ID: {} using PostGIS", geoUid);
}
/**
* Process multiple learning model result files
*
* @param filePaths List of GeoJSON file paths
* @return Total number of features processed across all files
*/
@Transactional
public int processMultipleLearningModelResults(List<Path> filePaths) {
int totalProcessed = 0;
for (Path filePath : filePaths) {
try {
int processed = processLearningModelResult(filePath);
totalProcessed += processed;
logger.info("Processed {} features from file: {}", processed, filePath.getFileName());
} catch (Exception e) {
logger.error("Failed to process file: {}", filePath, e);
// Continue processing other files even if one fails
}
}
logger.info("Total features processed across all files: {}", totalProcessed);
return totalProcessed;
}
}

View File

@@ -1,9 +1,11 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.auth.dto.AuthDto;
import com.kamco.cd.kamcoback.auth.dto.AuthDto.SaveReq;
import com.kamco.cd.kamcoback.postgres.entity.UserEntity;
import com.kamco.cd.kamcoback.postgres.repository.auth.AuthRepository;
import jakarta.persistence.EntityNotFoundException;
import java.time.ZonedDateTime;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
@@ -11,33 +13,105 @@ import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class AuthCoreService {
private final AuthRepository authRepository;
/**
* 관리자 등록
*
* @param signup
* @param saveReq
* @return
*/
public Long signup(AuthDto.Signup signup) {
if (authRepository.findByUserId(signup.getUserId()).isPresent()) {
new EntityNotFoundException("중복된 아이디가 있습니다. " + signup.getUserId());
public UserEntity save(SaveReq saveReq) {
if (authRepository.findByUserId(saveReq.getUserId()).isPresent()) {
new EntityNotFoundException("중복된 아이디가 있습니다. " + saveReq.getUserId());
}
return authRepository.signup(signup);
UserEntity userEntity =
new UserEntity(
null,
saveReq.getUserAuth(),
saveReq.getUserNm(),
saveReq.getUserId(),
saveReq.getEmpId(),
saveReq.getUserEmail(),
saveReq.getUserPw());
return authRepository.save(userEntity);
}
/**
* 관리자 정보 수정
*
* @param id
* @param saveReq
* @return
*/
public UserEntity update(Long id, AuthDto.SaveReq saveReq) {
UserEntity userEntity =
authRepository.findById(id).orElseThrow(() -> new RuntimeException("유저가 존재하지 않습니다."));
if (saveReq.getUserAuth() != null) {
userEntity.setUserAuth(saveReq.getUserAuth());
}
if (saveReq.getUserNm() != null) {
userEntity.setUserNm(saveReq.getUserNm());
}
if (saveReq.getUserId() != null) {
userEntity.setUserId(saveReq.getUserId());
}
if (saveReq.getEmpId() != null) {
userEntity.setEmpId(saveReq.getEmpId());
}
if (saveReq.getUserEmail() != null) {
userEntity.setUserEmail(saveReq.getUserEmail());
}
if (saveReq.getUserPw() != null) {
userEntity.setUserPw(saveReq.getUserPw());
}
return authRepository.save(userEntity);
}
/**
* 관리자 삭제
*
* @param id
* @return
*/
public UserEntity withdrawal(Long id) {
UserEntity userEntity =
authRepository.findById(id).orElseThrow(() -> new RuntimeException("유저가 존재하지 않습니다."));
userEntity.setId(id);
userEntity.setDateWithdrawal(ZonedDateTime.now());
userEntity.setState("WITHDRAWAL");
return authRepository.save(userEntity);
}
/**
* 시퀀스 id로 관리자 조회
*
* @param id
* @return
*/
public AuthDto.Basic findUserById(Long id){
UserEntity entity = authRepository.findUserById(id).orElseThrow(() -> new EntityNotFoundException("관리자를 찾을 수 없습니다. " + id));
public AuthDto.Basic findUserById(Long id) {
UserEntity entity =
authRepository
.findUserById(id)
.orElseThrow(() -> new EntityNotFoundException("관리자를 찾을 수 없습니다. " + id));
return entity.toDto();
}
/**
* 관리자 목록 조회
*
* @param searchReq
* @return
*/

View File

@@ -29,7 +29,8 @@ public class ChangeDetectionCoreService {
// 중심 좌표 계산
Point centroid = polygon.getCentroid();
return new ChangeDetectionDto.TestDto(p.getId(), polygon, centroid.getX(), centroid.getY());
return new ChangeDetectionDto.TestDto(
p.getId(), polygon, centroid.getX(), centroid.getY());
})
.collect(Collectors.toList());
}
@@ -39,15 +40,15 @@ public class ChangeDetectionCoreService {
ObjectMapper mapper = new ObjectMapper();
return list.stream()
.map(
s -> {
try {
return mapper.readTree(s);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
})
.collect(Collectors.toList());
.map(
s -> {
try {
return mapper.readTree(s);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
})
.collect(Collectors.toList());
}
public List<ChangeDetectionDto.CountDto> getChangeDetectionClassCount(Long id) {

View File

@@ -10,12 +10,9 @@ import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import java.util.Map;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.type.SqlTypes;
@Getter
@Setter
@@ -149,5 +146,4 @@ public class MapSheetAnalDataEntity {
@Column(name = "ref_map_sheet_num")
private Long refMapSheetNum;
}

View File

@@ -12,13 +12,16 @@ import jakarta.persistence.UniqueConstraint;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import lombok.AccessLevel;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@Getter
@Setter
@Entity
@NoArgsConstructor(access = AccessLevel.PROTECTED)
@Table(
name = "tb_user",
uniqueConstraints = {@UniqueConstraint(name = "ux_tb_user_user_id", columnNames = "user_id")})
@@ -52,7 +55,7 @@ public class UserEntity {
@NotNull
@ColumnDefault("'ACTIVE'")
@Column(name = "state", nullable = false)
private String state;
private String state = "ACTIVE";
@Column(name = "date_withdrawal")
private ZonedDateTime dateWithdrawal;
@@ -85,16 +88,31 @@ public class UserEntity {
@Column(name = "emp_id", nullable = false)
private String empId;
public UserEntity(
Long id,
String userAuth,
String userNm,
String userId,
String empId,
String userEmail,
String userPw) {
this.id = id;
this.userAuth = userAuth;
this.userNm = userNm;
this.userId = userId;
this.empId = empId;
this.userEmail = userEmail;
this.userPw = userPw;
}
public AuthDto.Basic toDto() {
return new AuthDto.Basic(
this.id,
this.userAuth,
this.userNm,
this.userId,
this.empId,
this.userEmail,
this.createdDttm
) ;
this.id,
this.userAuth,
this.userNm,
this.userId,
this.empId,
this.userEmail,
this.createdDttm);
}
}

View File

@@ -3,7 +3,11 @@ package com.kamco.cd.kamcoback.postgres.repository;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnDataGeomEntity;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
@Repository
public interface MapSheetLearnDataGeomRepository
@@ -24,4 +28,40 @@ public interface MapSheetLearnDataGeomRepository
/** 데이터 UID로 기존 지오메트리 데이터 삭제 (재생성 전에 사용) */
void deleteByDataUid(Long dataUid);
/**
* PostGIS 함수를 사용하여 geometry 데이터를 직접 삽입
* ST_SetSRID(ST_GeomFromGeoJSON(...), 5186) 형식으로 저장
*/
@Modifying
@Transactional
@Query(value = """
INSERT INTO tb_map_sheet_learn_data_geom (
geo_uid, cd_prob, class_before_name, class_before_prob,
class_after_name, class_after_prob, map_sheet_num,
before_yyyy, after_yyyy, area, geom, geo_type, data_uid,
created_dttm, updated_dttm
) VALUES (
:geoUid, :cdProb, :classBeforeName, :classBeforeProb,
:classAfterName, :classAfterProb, :mapSheetNum,
:beforeYyyy, :afterYyyy, :area,
ST_SetSRID(ST_GeomFromGeoJSON(CAST(:geometryJson AS TEXT)), 5186),
:geoType, :dataUid, NOW(), NOW()
) ON CONFLICT (geo_uid) DO NOTHING
""", nativeQuery = true)
void insertWithPostGISGeometry(
@Param("geoUid") Long geoUid,
@Param("cdProb") Double cdProb,
@Param("classBeforeName") String classBeforeName,
@Param("classBeforeProb") Double classBeforeProb,
@Param("classAfterName") String classAfterName,
@Param("classAfterProb") Double classAfterProb,
@Param("mapSheetNum") Long mapSheetNum,
@Param("beforeYyyy") Integer beforeYyyy,
@Param("afterYyyy") Integer afterYyyy,
@Param("area") Double area,
@Param("geometryJson") String geometryJson,
@Param("geoType") String geoType,
@Param("dataUid") Long dataUid
);
}

View File

@@ -7,7 +7,6 @@ import java.util.Optional;
import org.springframework.data.domain.Page;
public interface AuthRepositoryCustom {
Long signup(AuthDto.Signup signup);
Optional<UserEntity> findByUserId(String userId);

View File

@@ -19,36 +19,10 @@ import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class AuthRepositoryImpl implements AuthRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final QUserEntity userEntity = QUserEntity.userEntity;
/**
* 관리자 등록
*
* @param signup
* @return
*/
@Override
public Long signup(AuthDto.Signup signup) {
return queryFactory
.insert(userEntity)
.columns(
userEntity.userAuth,
userEntity.userId,
userEntity.userNm,
userEntity.userPw,
userEntity.userEmail,
userEntity.empId)
.values(
signup.getUserAuth(),
signup.getUserId(),
signup.getUserNm(),
signup.getUserPw(),
signup.getUserEmail(),
signup.getEmpId())
.execute();
}
/**
* 유저 아이디로 조회
*
@@ -63,17 +37,19 @@ public class AuthRepositoryImpl implements AuthRepositoryCustom {
/**
* 유저 시퀀스 id로 조회
*
* @param id
* @return
*/
@Override
public Optional<UserEntity> findUserById(Long id) {
return Optional.ofNullable(
queryFactory.selectFrom(userEntity).where(userEntity.id.eq(id)).fetchOne());
queryFactory.selectFrom(userEntity).where(userEntity.id.eq(id)).fetchOne());
}
/**
* 관리자 목록 조회
*
* @param searchReq
* @return
*/
@@ -86,25 +62,23 @@ public class AuthRepositoryImpl implements AuthRepositoryCustom {
}
List<Basic> content =
queryFactory
.select(Projections.constructor(AuthDto.Basic.class,
userEntity.id,
userEntity.userAuth,
userEntity.userNm,
userEntity.userId,
userEntity.empId,
userEntity.userEmail,
userEntity.createdDttm
))
.from(userEntity)
.where(
builder
)
.orderBy(userEntity.userId.asc())
.fetch();
queryFactory
.select(
Projections.constructor(
AuthDto.Basic.class,
userEntity.id,
userEntity.userAuth,
userEntity.userNm,
userEntity.userId,
userEntity.empId,
userEntity.userEmail,
userEntity.createdDttm))
.from(userEntity)
.where(builder)
.orderBy(userEntity.userId.asc())
.fetch();
long total =
queryFactory.select(userEntity.id).from(userEntity).where(builder).fetchCount();
long total = queryFactory.select(userEntity.id).from(userEntity).where(builder).fetchCount();
return new PageImpl<>(content, pageable, total);
}

View File

@@ -40,14 +40,12 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
}
@Override
public List<String> findPolygonJson(){
public List<String> findPolygonJson() {
return queryFactory
.select(
Expressions.stringTemplate("ST_AsGeoJSON({0})", mapSheetAnalDataGeomEntity.geom)
)
.from(mapSheetAnalDataGeomEntity)
.orderBy(mapSheetAnalDataGeomEntity.id.desc())
.fetch();
.select(Expressions.stringTemplate("ST_AsGeoJSON({0})", mapSheetAnalDataGeomEntity.geom))
.from(mapSheetAnalDataGeomEntity)
.orderBy(mapSheetAnalDataGeomEntity.id.desc())
.fetch();
}
@Override