Merge branch 'develop' into feat/dev_251201

This commit is contained in:
2025-12-03 13:46:39 +09:00
19 changed files with 614 additions and 42 deletions

View File

@@ -2,18 +2,26 @@ package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository;
import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.constraints.NotNull;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@RequiredArgsConstructor
public class InferenceResultCoreService {
private final InferenceResultRepository inferenceResultRepository;
private final MapInkx5kRepository mapInkx5kRepository;
/**
* 추론관리 > 분석결과 목록 조회
@@ -61,6 +69,27 @@ public class InferenceResultCoreService {
return inferenceResultRepository.getInferenceGeomList(id, searchGeoReq);
}
/**
* 분석결과 상세 목록
*
* @param searchReq
* @return
*/
@Transactional(readOnly = true)
public Page<InferenceResultDto.DetailListEntity> listInferenceResultWithGeom(
@NotNull Long analyId, InferenceResultDto.SearchGeoReq searchReq) {
// 분석 ID 에 해당하는 dataids를 가져온다.
List<Long> dataIds =
inferenceResultRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataEntity::getId)
.boxed()
.toList();
// 해당데이터의 폴리곤데이터를 가져온다
Page<MapSheetAnalDataGeomEntity> mapSheetAnalDataGeomEntities =
inferenceResultRepository.listInferenceResultWithGeom(dataIds, searchReq);
return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataGeomEntity::toEntity);
}
/**
* 추론된 5000:1 도엽 목록
*
@@ -70,4 +99,17 @@ public class InferenceResultCoreService {
public List<Long> getSheets(Long id) {
return inferenceResultRepository.getSheets(id);
}
@Transactional(readOnly = true)
public List<MapSheet> listGetScenes5k(Long analyId) {
List<String> sceneCodes =
inferenceResultRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataEntity::getMapSheetNum)
.mapToObj(String::valueOf)
.toList();
return mapInkx5kRepository.listGetScenes5k(sceneCodes).stream()
.map(MapInkx5kEntity::toEntity)
.toList();
}
}

View File

@@ -1,6 +1,14 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.*;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import lombok.Getter;
import lombok.Setter;
import org.locationtech.jts.geom.Geometry;
@@ -10,6 +18,7 @@ import org.locationtech.jts.geom.Geometry;
@Table(name = "tb_map_inkx_5k")
@Entity
public class MapInkx5kEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_inkx_5k_fid_seq_gen")
@SequenceGenerator(
@@ -29,4 +38,8 @@ public class MapInkx5kEntity {
@Column(name = "fid_k50")
private Long fidK50;
public InferenceResultDto.MapSheet toEntity() {
return new MapSheet(mapidcdNo, mapidNm);
}
}

View File

@@ -1,14 +1,21 @@
package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.common.enums.DetectionClassification;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Clazzes;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@@ -31,6 +38,9 @@ public class MapSheetAnalDataGeomEntity {
@Column(name = "geo_uid", nullable = false)
private Long id;
@Column(name = "uuid")
private UUID uuid;
@Column(name = "cd_prob")
private Double cdProb;
@@ -51,6 +61,10 @@ public class MapSheetAnalDataGeomEntity {
@Column(name = "map_sheet_num")
private Long mapSheetNum;
@ManyToOne(fetch = FetchType.EAGER)
@JoinColumn(name = "map_5k_id", referencedColumnName = "fid")
private MapInkx5kEntity map5k;
@Column(name = "compare_yyyy")
private Integer compareYyyy;
@@ -133,4 +147,21 @@ public class MapSheetAnalDataGeomEntity {
@Column(name = "geom_center", columnDefinition = "geometry")
private Geometry geomCenter;
public InferenceResultDto.DetailListEntity toEntity() {
DetectionClassification classification = DetectionClassification.fromString(classBeforeCd);
Clazzes comparedClazz = new Clazzes(classification, classBeforeProb);
DetectionClassification classification1 = DetectionClassification.fromString(classAfterCd);
Clazzes targetClazz = new Clazzes(classification1, classAfterProb);
InferenceResultDto.MapSheet mapSheet = map5k != null ? map5k.toEntity() : null;
InferenceResultDto.Coordinate coordinate = null;
if (geomCenter != null) {
org.locationtech.jts.geom.Point point = (org.locationtech.jts.geom.Point) geomCenter;
coordinate = new InferenceResultDto.Coordinate(point.getX(), point.getY());
}
return new InferenceResultDto.DetailListEntity(
uuid, cdProb, comparedClazz, targetClazz, mapSheet, coordinate, createdDttm);
}
}

View File

@@ -9,7 +9,9 @@ import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import lombok.AccessLevel;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@@ -17,6 +19,9 @@ import org.hibernate.annotations.ColumnDefault;
@Setter
@Entity
@Table(name = "tb_map_sheet_anal")
@NoArgsConstructor(access = AccessLevel.PROTECTED)
// TODO SETTER 제거
// TODO 주석
public class MapSheetAnalEntity {
@Id
@@ -28,21 +33,24 @@ public class MapSheetAnalEntity {
@Column(name = "anal_uid", nullable = false)
private Long id;
// TODO UUID
// UK 추가
@Column(name = "compare_yyyy")
private Integer compareYyyy;
private Integer compareYyyy; // 비교년도
@Column(name = "target_yyyy")
private Integer targetYyyy;
private Integer targetYyyy; // 기분년도
@Column(name = "model_uid")
private Long modelUid;
private Long modelUid; // 모델식별키 ?
@Size(max = 100)
@Column(name = "server_ids", length = 100)
private String serverIds;
private String serverIds; // 서버ID?
@Column(name = "anal_map_sheet", length = Integer.MAX_VALUE)
private String analMapSheet;
private String analMapSheet; // 분석도엽?
@Column(name = "anal_strt_dttm")
private ZonedDateTime analStrtDttm;
@@ -54,15 +62,15 @@ public class MapSheetAnalEntity {
private Long analSec;
@Column(name = "anal_pred_sec")
private Long analPredSec;
private Long analPredSec; // 예상소요초?
@Size(max = 20)
@Column(name = "anal_state", length = 20)
private String analState;
private String analState; // enum 으로 관리
@Size(max = 20)
@Column(name = "gukyuin_used", length = 20)
private String gukyuinUsed;
private String gukyuinUsed; // Boolean으로 관리
@Column(name = "accuracy")
private Double accuracy;
@@ -71,17 +79,9 @@ public class MapSheetAnalEntity {
@Column(name = "result_url")
private String resultUrl;
@ColumnDefault("now()")
@Column(name = "created_dttm")
private ZonedDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
@ColumnDefault("now()")
@Column(name = "updated_dttm")
private ZonedDateTime updatedDttm;
@Column(name = "updated_uid")
private Long updatedUid;
@@ -94,4 +94,13 @@ public class MapSheetAnalEntity {
@Column(name = "base_map_sheet_num")
private String baseMapSheetNum;
// TODO CommonDateEntity ?
@ColumnDefault("now()")
@Column(name = "created_dttm")
private ZonedDateTime createdDttm;
@ColumnDefault("now()")
@Column(name = "updated_dttm")
private ZonedDateTime updatedDttm;
}

View File

@@ -2,6 +2,10 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import jakarta.validation.constraints.NotNull;
import java.util.List;
import java.util.Optional;
import org.springframework.data.domain.Page;
@@ -16,7 +20,12 @@ public interface InferenceResultRepositoryCustom {
Page<InferenceResultDto.Geom> getInferenceGeomList(
Long id, InferenceResultDto.SearchGeoReq searchGeoReq);
Page<MapSheetAnalDataGeomEntity> listInferenceResultWithGeom(
List<Long> dataIds, SearchGeoReq searchReq);
List<Long> getSheets(Long id);
List<Dashboard> getDashboard(Long id);
List<MapSheetAnalDataEntity> listAnalyGeom(@NotNull Long id);
}

View File

@@ -3,6 +3,8 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity;
@@ -10,17 +12,23 @@ import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Order;
import com.querydsl.core.types.OrderSpecifier;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.JPQLQuery;
import com.querydsl.jpa.impl.JPAQuery;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Repository;
@Repository
@@ -80,7 +88,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(mapSheetAnalEntity.createdDttm.desc())
.orderBy(mapSheetAnalEntity.id.desc())
.fetch();
long total =
@@ -159,6 +167,56 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
.fetch();
}
@Override
public List<MapSheetAnalDataEntity> listAnalyGeom(Long id) {
QMapSheetAnalDataEntity analy = QMapSheetAnalDataEntity.mapSheetAnalDataEntity;
return queryFactory.selectFrom(analy).where(analy.analUid.eq(id)).fetch();
}
/**
* 분석결과 상세 목록
*
* @param searchReq
* @return
*/
@Override
public Page<MapSheetAnalDataGeomEntity> listInferenceResultWithGeom(
List<Long> ids, SearchGeoReq searchReq) {
// 분석 차수
QMapSheetAnalDataGeomEntity detectedEntity =
QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
Pageable pageable = searchReq.toPageable();
// 검색조건
JPAQuery<MapSheetAnalDataGeomEntity> query =
queryFactory
.selectFrom(detectedEntity)
.where(
detectedEntity.dataUid.in(ids),
eqTargetClass(detectedEntity, searchReq.getTargetClass()),
eqCompareClass(detectedEntity, searchReq.getCompareClass()),
containsMapSheetNum(detectedEntity, searchReq.getMapSheetNum()));
// count
long total = query.fetchCount();
// Pageable에서 정렬 가져오기, 없으면 기본 정렬(createdDttm desc) 사용
List<OrderSpecifier<?>> orders = getOrderSpecifiers(pageable.getSort());
if (orders.isEmpty()) {
orders.add(detectedEntity.createdDttm.desc());
}
List<MapSheetAnalDataGeomEntity> content =
query
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(orders.toArray(new OrderSpecifier[0]))
.fetch();
return new PageImpl<>(content, pageable, total);
}
/**
* 분석결과 상세 목록
*
@@ -252,4 +310,61 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
.groupBy(mapSheetAnalDataEntity.mapSheetNum)
.fetch();
}
/** Pageable의 Sort를 QueryDSL OrderSpecifier로 변환 */
@SuppressWarnings({"unchecked", "rawtypes"})
private List<OrderSpecifier<?>> getOrderSpecifiers(Sort sort) {
List<OrderSpecifier<?>> orders = new ArrayList<>();
if (sort.isSorted()) {
QMapSheetAnalDataGeomEntity entity = QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
for (Sort.Order order : sort) {
Order direction = order.isAscending() ? Order.ASC : Order.DESC;
String property = order.getProperty();
// 유효한 필드만 처리
switch (property) {
case "classBeforeCd" -> orders.add(new OrderSpecifier(direction, entity.classBeforeCd));
case "classBeforeProb" ->
orders.add(new OrderSpecifier(direction, entity.classBeforeProb));
case "classAfterCd" -> orders.add(new OrderSpecifier(direction, entity.classAfterCd));
case "classAfterProb" -> orders.add(new OrderSpecifier(direction, entity.classAfterProb));
case "mapSheetNum" -> orders.add(new OrderSpecifier(direction, entity.mapSheetNum));
case "compareYyyy" -> orders.add(new OrderSpecifier(direction, entity.compareYyyy));
case "targetYyyy" -> orders.add(new OrderSpecifier(direction, entity.targetYyyy));
case "area" -> orders.add(new OrderSpecifier(direction, entity.area));
case "createdDttm" -> orders.add(new OrderSpecifier(direction, entity.createdDttm));
case "updatedDttm" -> orders.add(new OrderSpecifier(direction, entity.updatedDttm));
// 유효하지 않은 필드는 무시
default -> {}
}
}
}
return orders;
}
private BooleanExpression eqTargetClass(
QMapSheetAnalDataGeomEntity detectedEntity, String targetClass) {
return targetClass != null && !targetClass.isEmpty()
? detectedEntity.classAfterCd.toLowerCase().eq(targetClass.toLowerCase())
: null;
}
private BooleanExpression eqCompareClass(
QMapSheetAnalDataGeomEntity detectedEntity, String compareClass) {
return compareClass != null && !compareClass.isEmpty()
? detectedEntity.classBeforeCd.toLowerCase().eq(compareClass.toLowerCase())
: null;
}
private BooleanExpression containsMapSheetNum(
QMapSheetAnalDataGeomEntity detectedEntity, List<Long> mapSheet) {
if (mapSheet == null || mapSheet.isEmpty()) {
return null;
}
return detectedEntity.mapSheetNum.in(mapSheet);
}
}

View File

@@ -126,7 +126,8 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
mapSheetAnalDataGeomEntity.classBeforeCd.toUpperCase(),
mapSheetAnalDataGeomEntity.targetYyyy,
mapSheetAnalDataGeomEntity.classAfterProb,
mapSheetAnalDataGeomEntity.classAfterCd.toUpperCase()))
mapSheetAnalDataGeomEntity.classAfterCd.toUpperCase(),
mapSheetAnalDataGeomEntity.cdProb))
.from(mapSheetAnalDataGeomEntity)
.innerJoin(mapSheetAnalDataEntity)
.on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id))
@@ -159,7 +160,8 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
data.getBeforeClass(),
data.getAfterYear(),
data.getAfterConfidence(),
data.getAfterClass());
data.getAfterClass(),
data.getCdProb());
return new ChangeDetectionDto.PolygonFeature(
data.getType(), jsonNode, properties);

View File

@@ -0,0 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.scene;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface MapInkx5kRepository
extends JpaRepository<MapInkx5kEntity, Long>, MapInkx5kRepositoryCustom {}

View File

@@ -0,0 +1,9 @@
package com.kamco.cd.kamcoback.postgres.repository.scene;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import java.util.List;
public interface MapInkx5kRepositoryCustom {
List<MapInkx5kEntity> listGetScenes5k(List<String> codes);
}

View File

@@ -0,0 +1,28 @@
package com.kamco.cd.kamcoback.postgres.repository.scene;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.List;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
public class MapInkx5kRepositoryImpl extends QuerydslRepositorySupport
implements MapInkx5kRepositoryCustom {
private final JPAQueryFactory queryFactory;
public MapInkx5kRepositoryImpl(JPAQueryFactory queryFactory) {
super(MapInkx5kEntity.class);
this.queryFactory = queryFactory;
}
public List<MapInkx5kEntity> listGetScenes5k(List<String> codes) {
QMapInkx5kEntity map5k = QMapInkx5kEntity.mapInkx5kEntity;
return queryFactory
.selectFrom(map5k)
.where(map5k.mapidcdNo.in(codes))
.orderBy(map5k.mapidcdNo.asc())
.fetch();
}
}