[KC-99] 추론관리 등록 dto 수정, spoless 적용

This commit is contained in:
2026-01-09 11:21:43 +09:00
parent 6235cda597
commit ac5423ef3e
13 changed files with 140 additions and 28 deletions

View File

@@ -64,9 +64,10 @@ public class InferenceResultDto {
@Getter @Getter
@AllArgsConstructor @AllArgsConstructor
public enum MapSheetScope implements EnumType { public enum MapSheetScope implements EnumType {
EXCL("추론제외"), ALL("전체"),
PREV("이전 년도 도엽 사용"), PART("부분"),
; ;
private final String desc; private final String desc;
@Override @Override
@@ -84,8 +85,8 @@ public class InferenceResultDto {
@Getter @Getter
@AllArgsConstructor @AllArgsConstructor
public enum DetectOption implements EnumType { public enum DetectOption implements EnumType {
ALL("전체"), EXCL("추론제외"),
PART("부분"), PREV("이전 년도 도엽 사용"),
; ;
private final String desc; private final String desc;
@@ -131,16 +132,16 @@ public class InferenceResultDto {
@NotNull @NotNull
private Integer targetYyyy; private Integer targetYyyy;
@Schema(description = "분석대상 도엽 - 전체(ALL), 부분(PART)", example = "PART")
@NotBlank
@EnumValid(enumClass = DetectOption.class, message = "분석대상 도엽 옵션은 '전체', '부분' 만 사용 가능합니다.")
private String mapSheetScope;
@Schema(description = "탐지 데이터 옵션 - 추론제외(PREV), 이전 년도 도엽 사용(PREV)", example = "EXCL") @Schema(description = "탐지 데이터 옵션 - 추론제외(PREV), 이전 년도 도엽 사용(PREV)", example = "EXCL")
@NotBlank @NotBlank
@EnumValid( @EnumValid(
enumClass = MapSheetScope.class, enumClass = MapSheetScope.class,
message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.") message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.")
private String mapSheetScope;
@Schema(description = "분석대상 도엽 - 전체(ALL), 부분(PART)", example = "PART")
@NotBlank
@EnumValid(enumClass = DetectOption.class, message = "분석대상 도엽 옵션은 '전체', '부분' 만 사용 가능합니다.")
private String detectOption; private String detectOption;
@Schema(description = "5k 도협 번호 목록", example = "[34607067,34607067]") @Schema(description = "5k 도협 번호 목록", example = "[34607067,34607067]")

View File

@@ -76,9 +76,7 @@ public class LabelAllocateApiController {
schema = @Schema(allowableValues = {"LABELER", "REVIEWER"})) schema = @Schema(allowableValues = {"LABELER", "REVIEWER"}))
@RequestParam(required = false) @RequestParam(required = false)
String type, String type,
@Parameter( @Parameter(description = "검색어 (작업자 이름 또는 사번으로 검색, 부분 일치) - 미입력 시 전체 조회", example = "김라벨")
description = "검색어 (작업자 이름 또는 사번으로 검색, 부분 일치) - 미입력 시 전체 조회",
example = "김라벨")
@RequestParam(required = false) @RequestParam(required = false)
String search, String search,
@Parameter( @Parameter(
@@ -259,27 +257,32 @@ public class LabelAllocateApiController {
examples = { examples = {
@io.swagger.v3.oas.annotations.media.ExampleObject( @io.swagger.v3.oas.annotations.media.ExampleObject(
name = "라벨링 종료", name = "라벨링 종료",
value = """ value =
"""
{"closedType": "LABELING", "closedYn": "Y"} {"closedType": "LABELING", "closedYn": "Y"}
"""), """),
@io.swagger.v3.oas.annotations.media.ExampleObject( @io.swagger.v3.oas.annotations.media.ExampleObject(
name = "검수 종료", name = "검수 종료",
value = """ value =
"""
{"closedType": "INSPECTION", "closedYn": "Y"} {"closedType": "INSPECTION", "closedYn": "Y"}
"""), """),
@io.swagger.v3.oas.annotations.media.ExampleObject( @io.swagger.v3.oas.annotations.media.ExampleObject(
name = "라벨링 재개", name = "라벨링 재개",
value = """ value =
"""
{"closedType": "LABELING", "closedYn": "N"} {"closedType": "LABELING", "closedYn": "N"}
"""), """),
@io.swagger.v3.oas.annotations.media.ExampleObject( @io.swagger.v3.oas.annotations.media.ExampleObject(
name = "검수 재개", name = "검수 재개",
value = """ value =
"""
{"closedType": "INSPECTION", "closedYn": "N"} {"closedType": "INSPECTION", "closedYn": "N"}
"""), """),
@io.swagger.v3.oas.annotations.media.ExampleObject( @io.swagger.v3.oas.annotations.media.ExampleObject(
name = "특정 프로젝트 라벨링 종료", name = "특정 프로젝트 라벨링 종료",
value = """ value =
"""
{"uuid": "f97dc186-e6d3-4645-9737-3173dde8dc64", "closedType": "LABELING", "closedYn": "Y"} {"uuid": "f97dc186-e6d3-4645-9737-3173dde8dc64", "closedType": "LABELING", "closedYn": "Y"}
""") """)
})) }))

View File

@@ -1,6 +1,6 @@
package com.kamco.cd.kamcoback.postgres.core; package com.kamco.cd.kamcoback.postgres.core;
import com.fasterxml.jackson.databind.ObjectMapper; import com.kamco.cd.kamcoback.common.utils.UserUtil;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
@@ -9,15 +9,18 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity; import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearn5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetAnalDataInferenceRepository; import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetAnalDataInferenceRepository;
import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetLearn5kRepository;
import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetLearnRepository; import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetLearnRepository;
import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository; import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityNotFoundException; import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.locationtech.jts.io.geojson.GeoJsonWriter;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@@ -29,9 +32,10 @@ public class InferenceResultCoreService {
private final MapSheetAnalDataInferenceRepository mapSheetAnalDataRepository; private final MapSheetAnalDataInferenceRepository mapSheetAnalDataRepository;
private final MapSheetLearnRepository mapSheetLearnRepository; private final MapSheetLearnRepository mapSheetLearnRepository;
private final MapInkx5kRepository mapInkx5kRepository; private final MapInkx5kRepository mapInkx5kRepository;
private final MapSheetLearn5kRepository mapSheetLearn5kRepository;
private final ObjectMapper objectMapper = new ObjectMapper(); private final EntityManager entityManager;
private final GeoJsonWriter geoJsonWriter = new GeoJsonWriter(); private final UserUtil userUtil;
/** /**
* 추론관리 목록 * 추론관리 목록
@@ -55,7 +59,40 @@ public class InferenceResultCoreService {
mapSheetLearnEntity.setM1ModelUid(req.getModel1Uid()); mapSheetLearnEntity.setM1ModelUid(req.getModel1Uid());
mapSheetLearnEntity.setM2ModelUid(req.getModel2Uid()); mapSheetLearnEntity.setM2ModelUid(req.getModel2Uid());
mapSheetLearnEntity.setM3ModelUid(req.getModel3Uid()); mapSheetLearnEntity.setM3ModelUid(req.getModel3Uid());
// mapSheetLearnRepository.save() mapSheetLearnEntity.setCompareYyyy(req.getCompareYyyy());
mapSheetLearnEntity.setTargetYyyy(req.getTargetYyyy());
mapSheetLearnEntity.setMapSheetScope(req.getMapSheetScope());
mapSheetLearnEntity.setDetectOption(req.getDetectOption());
mapSheetLearnEntity.setCreatedUid(userUtil.getId());
// learn 테이블 저장
MapSheetLearnEntity savedLearn = mapSheetLearnRepository.save(mapSheetLearnEntity);
final int CHUNK = 1000;
List<MapSheetLearn5kEntity> buffer = new ArrayList<>(CHUNK);
List<String> mapSheetNumList = req.getMapSheetNum();
// learn 도엽 저장
for (String mapSheetNum : mapSheetNumList) {
MapSheetLearn5kEntity e = new MapSheetLearn5kEntity();
e.setLearn(savedLearn);
e.setMapSheetNum(Long.parseLong(mapSheetNum));
e.setCreatedUid(userUtil.getId());
buffer.add(e);
if (buffer.size() == CHUNK) {
mapSheetLearn5kRepository.saveAll(buffer);
mapSheetLearn5kRepository.flush();
entityManager.clear();
buffer.clear();
}
}
if (!buffer.isEmpty()) {
mapSheetLearn5kRepository.saveAll(buffer);
mapSheetLearn5kRepository.flush();
entityManager.clear();
}
} }
/****/ /****/

View File

@@ -142,7 +142,6 @@ public class LabelAllocateCoreService {
labelAllocateRepository.assignOwnerReAllocate(uuid, userId, paramUserId, assignCount); labelAllocateRepository.assignOwnerReAllocate(uuid, userId, paramUserId, assignCount);
} }
public void updateClosedYnByUuid(String uuid, String closedType, String closedYn) { public void updateClosedYnByUuid(String uuid, String closedType, String closedYn) {
labelAllocateRepository.updateClosedYnByUuid(uuid, closedType, closedYn); labelAllocateRepository.updateClosedYnByUuid(uuid, closedType, closedYn);
} }

View File

@@ -0,0 +1,51 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import java.time.ZonedDateTime;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.OnDelete;
import org.hibernate.annotations.OnDeleteAction;
@Getter
@Setter
@Entity
@Table(name = "tb_map_sheet_learn_5k")
public class MapSheetLearn5kEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_learn_5k_id_gen")
@SequenceGenerator(
name = "tb_map_sheet_learn_5k_id_gen",
sequenceName = "tb_map_sheet_learn_5k_seq",
allocationSize = 1)
@Column(name = "id", nullable = false)
private Long id;
@NotNull
@ManyToOne(fetch = FetchType.LAZY, optional = false)
@OnDelete(action = OnDeleteAction.CASCADE)
@JoinColumn(name = "learn_id", nullable = false, referencedColumnName = "id")
private MapSheetLearnEntity learn;
@NotNull
@Column(name = "map_sheet_num", nullable = false)
private Long mapSheetNum;
@org.hibernate.annotations.CreationTimestamp
@Column(name = "created_dttm")
private ZonedDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
}

View File

@@ -33,7 +33,7 @@ public class MapSheetLearnEntity {
@ColumnDefault("gen_random_uuid()") @ColumnDefault("gen_random_uuid()")
@Column(name = "uuid") @Column(name = "uuid")
private UUID uuid; private UUID uuid = UUID.randomUUID();
@Size(max = 200) @Size(max = 200)
@NotNull @NotNull
@@ -89,7 +89,7 @@ public class MapSheetLearnEntity {
@Column(name = "apply_dttm") @Column(name = "apply_dttm")
private ZonedDateTime applyDttm; private ZonedDateTime applyDttm;
@ColumnDefault("now()") @org.hibernate.annotations.CreationTimestamp
@Column(name = "created_dttm") @Column(name = "created_dttm")
private ZonedDateTime createdDttm; private ZonedDateTime createdDttm;

View File

@@ -0,0 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearn5kEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface MapSheetLearn5kRepository
extends JpaRepository<MapSheetLearn5kEntity, Long>, MapSheetLearn5kRepositoryCustom {}

View File

@@ -0,0 +1,3 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
public interface MapSheetLearn5kRepositoryCustom {}

View File

@@ -0,0 +1,6 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import org.springframework.stereotype.Repository;
@Repository
public class MapSheetLearn5kRepositoryImpl implements MapSheetLearn5kRepositoryCustom {}

View File

@@ -84,7 +84,6 @@ public interface LabelAllocateRepositoryCustom {
void assignOwnerReAllocate(String uuid, String userId, String paramUserId, Long assignCount); void assignOwnerReAllocate(String uuid, String userId, String paramUserId, Long assignCount);
// 프로젝트 종료 여부 업데이트 (uuid 기반) // 프로젝트 종료 여부 업데이트 (uuid 기반)
void updateClosedYnByUuid(String uuid, String closedType, String closedYn); void updateClosedYnByUuid(String uuid, String closedType, String closedYn);
} }

View File

@@ -1433,7 +1433,6 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
.execute(); .execute();
} }
@Override @Override
public void updateClosedYnByUuid(String uuid, String closedType, String closedYn) { public void updateClosedYnByUuid(String uuid, String closedType, String closedYn) {
var updateQuery = queryFactory.update(mapSheetAnalInferenceEntity); var updateQuery = queryFactory.update(mapSheetAnalInferenceEntity);

View File

@@ -11,8 +11,11 @@ spring:
hibernate: hibernate:
default_batch_fetch_size: 100 # ✅ 성능 - N+1 쿼리 방지 default_batch_fetch_size: 100 # ✅ 성능 - N+1 쿼리 방지
order_updates: true # ✅ 성능 - 업데이트 순서 정렬로 데드락 방지 order_updates: true # ✅ 성능 - 업데이트 순서 정렬로 데드락 방지
order_inserts: true
use_sql_comments: true # ⚠️ 선택 - SQL에 주석 추가 (디버깅용) use_sql_comments: true # ⚠️ 선택 - SQL에 주석 추가 (디버깅용)
format_sql: true # ⚠️ 선택 - SQL 포맷팅 (가독성) format_sql: true # ⚠️ 선택 - SQL 포맷팅 (가독성)
jdbc:
batch_size: 1000 # ✅ 추가 (JDBC batch)
datasource: datasource:
url: jdbc:postgresql://192.168.2.127:15432/kamco_cds url: jdbc:postgresql://192.168.2.127:15432/kamco_cds

View File

@@ -4,14 +4,18 @@ spring:
on-profile: prod on-profile: prod
jpa: jpa:
show-sql: false show-sql: true
hibernate: hibernate:
ddl-auto: validate ddl-auto: validate
properties: properties:
hibernate: hibernate:
default_batch_fetch_size: 100 # ✅ 성능 - N+1 쿼리 방지 default_batch_fetch_size: 100 # ✅ 성능 - N+1 쿼리 방지
order_updates: true # ✅ 성능 - 업데이트 순서 정렬로 데드락 방지 order_updates: true # ✅ 성능 - 업데이트 순서 정렬로 데드락 방지
order_inserts: true
use_sql_comments: true # ⚠️ 선택 - SQL에 주석 추가 (디버깅용) use_sql_comments: true # ⚠️ 선택 - SQL에 주석 추가 (디버깅용)
format_sql: true # ⚠️ 선택 - SQL 포맷팅 (가독성)
jdbc:
batch_size: 1000 # ✅ 추가 (JDBC batch)
datasource: datasource:
url: jdbc:postgresql://10.100.0.10:25432/temp url: jdbc:postgresql://10.100.0.10:25432/temp