analDataGeom, analData 제거

This commit is contained in:
2026-01-07 11:24:05 +09:00
parent 91ebcc9551
commit 904b512f2c
16 changed files with 503 additions and 923 deletions

View File

@@ -26,17 +26,17 @@ public class InferenceResultShpDto {
private Integer input2; // target_yyyy private Integer input2; // target_yyyy
// ===== 추론 결과 ===== // ===== 추론 결과 =====
private Float cdProb; private Double cdProb;
private String beforeClass; private String beforeClass;
private Float beforeProbability; private Double beforeProbability;
private String afterClass; private String afterClass;
private Float afterProbability; private Double afterProbability;
// ===== 공간 정보 ===== // ===== 공간 정보 =====
private Geometry geometry; private Geometry geometry;
private Float area; private Double area;
/** Entity → DTO 변환 */ /** Entity → DTO 변환 */
public static Basic from(MapSheetAnalDataInferenceGeomEntity e) { public static Basic from(MapSheetAnalDataInferenceGeomEntity e) {

View File

@@ -5,7 +5,7 @@ import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto; import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
import com.kamco.cd.kamcoback.common.enums.DetectionClassification; import com.kamco.cd.kamcoback.common.enums.DetectionClassification;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.changedetection.ChangeDetectionRepository; import com.kamco.cd.kamcoback.postgres.repository.changedetection.ChangeDetectionRepository;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@@ -21,7 +21,7 @@ public class ChangeDetectionCoreService {
private final ChangeDetectionRepository changeDetectionRepository; private final ChangeDetectionRepository changeDetectionRepository;
public List<ChangeDetectionDto.TestDto> getPolygonToPoint() { public List<ChangeDetectionDto.TestDto> getPolygonToPoint() {
List<MapSheetAnalDataGeomEntity> list = changeDetectionRepository.findAll(); List<MapSheetAnalDataInferenceGeomEntity> list = changeDetectionRepository.findAll();
return list.stream() return list.stream()
.map( .map(
@@ -31,7 +31,7 @@ public class ChangeDetectionCoreService {
Point centroid = polygon.getCentroid(); Point centroid = polygon.getCentroid();
return new ChangeDetectionDto.TestDto( return new ChangeDetectionDto.TestDto(
p.getId(), polygon, centroid.getX(), centroid.getY()); p.getGeoUid(), polygon, centroid.getX(), centroid.getY());
}) })
.collect(Collectors.toList()); .collect(Collectors.toList());
} }

View File

@@ -4,9 +4,9 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity; import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetAnalDataRepository; import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetAnalDataInferenceRepository;
import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository; import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository;
import jakarta.persistence.EntityNotFoundException; import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
@@ -20,7 +20,7 @@ import org.springframework.transaction.annotation.Transactional;
@RequiredArgsConstructor @RequiredArgsConstructor
public class InferenceResultCoreService { public class InferenceResultCoreService {
private final MapSheetAnalDataRepository mapSheetAnalDataRepository; private final MapSheetAnalDataInferenceRepository mapSheetAnalDataRepository;
private final MapInkx5kRepository mapInkx5kRepository; private final MapInkx5kRepository mapInkx5kRepository;
/** /**
@@ -81,13 +81,13 @@ public class InferenceResultCoreService {
// 분석 ID 에 해당하는 dataids를 가져온다. // 분석 ID 에 해당하는 dataids를 가져온다.
List<Long> dataIds = List<Long> dataIds =
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream() mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataEntity::getId) .mapToLong(MapSheetAnalDataInferenceEntity::getId)
.boxed() .boxed()
.toList(); .toList();
// 해당데이터의 폴리곤데이터를 가져온다 // 해당데이터의 폴리곤데이터를 가져온다
Page<MapSheetAnalDataGeomEntity> mapSheetAnalDataGeomEntities = Page<MapSheetAnalDataInferenceGeomEntity> mapSheetAnalDataGeomEntities =
mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq); mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq);
return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataGeomEntity::toEntity); return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataInferenceGeomEntity::toEntity);
} }
/** /**
@@ -104,7 +104,7 @@ public class InferenceResultCoreService {
public List<MapSheet> listGetScenes5k(Long analyId) { public List<MapSheet> listGetScenes5k(Long analyId) {
List<String> sceneCodes = List<String> sceneCodes =
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream() mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataEntity::getMapSheetNum) .mapToLong(MapSheetAnalDataInferenceEntity::getMapSheetNum)
.mapToObj(String::valueOf) .mapToObj(String::valueOf)
.toList(); .toList();

View File

@@ -1,148 +0,0 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@Getter
@Setter
@Entity
@Table(name = "tb_map_sheet_anal_data")
public class MapSheetAnalDataEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_anal_data_id_gen")
@SequenceGenerator(
name = "tb_map_sheet_anal_data_id_gen",
sequenceName = "tb_map_sheet_learn_data_data_uid",
allocationSize = 1)
@Column(name = "data_uid", nullable = false)
private Long id;
@Size(max = 128)
@Column(name = "data_name", length = 128)
private String dataName;
@Size(max = 255)
@Column(name = "data_path")
private String dataPath;
@Size(max = 128)
@Column(name = "data_type", length = 128)
private String dataType;
@Size(max = 128)
@Column(name = "data_crs_type", length = 128)
private String dataCrsType;
@Size(max = 255)
@Column(name = "data_crs_type_name")
private String dataCrsTypeName;
@ColumnDefault("now()")
@Column(name = "created_dttm", columnDefinition = "TIMESTAMP WITH TIME ZONE DEFAULT now()")
private ZonedDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
@ColumnDefault("now()")
@Column(name = "updated_dttm", columnDefinition = "TIMESTAMP WITH TIME ZONE DEFAULT now()")
private ZonedDateTime updatedDttm;
@Column(name = "updated_uid")
private Long updatedUid;
@Column(name = "compare_yyyy")
private Integer compareYyyy;
@Column(name = "target_yyyy")
private Integer targetYyyy;
@Column(name = "data_json")
private String dataJson;
@Size(max = 20)
@Column(name = "data_state", length = 20)
private String dataState;
@ColumnDefault("now()")
@Column(name = "data_state_dttm", columnDefinition = "TIMESTAMP WITH TIME ZONE DEFAULT now()")
private ZonedDateTime dataStateDttm;
@Column(name = "anal_strt_dttm", columnDefinition = "TIMESTAMP WITH TIME ZONE")
private ZonedDateTime analStrtDttm;
@Column(name = "anal_end_dttm", columnDefinition = "TIMESTAMP WITH TIME ZONE")
private ZonedDateTime analEndDttm;
@Column(name = "anal_sec")
private Long analSec;
@Size(max = 20)
@Column(name = "anal_state", length = 20)
private String analState;
@Column(name = "anal_uid")
private Long analUid;
@Column(name = "map_sheet_num")
private Long mapSheetNum;
@Column(name = "detecting_cnt")
private Long detectingCnt;
@ColumnDefault("0")
@Column(name = "pnu")
private Long pnu;
@Size(max = 20)
@Column(name = "down_state", length = 20)
private String downState;
@Column(name = "down_state_dttm")
private ZonedDateTime downStateDttm;
@Size(max = 20)
@Column(name = "fit_state", length = 20)
private String fitState;
@Column(name = "fit_state_dttm")
private ZonedDateTime fitStateDttm;
@Column(name = "labeler_uid")
private Long labelerUid;
@Size(max = 20)
@ColumnDefault("NULL")
@Column(name = "label_state", length = 20)
private String labelState;
@Column(name = "label_state_dttm")
private ZonedDateTime labelStateDttm;
@Column(name = "tester_uid")
private Long testerUid;
@Size(max = 20)
@Column(name = "test_state", length = 20)
private String testState;
@Column(name = "test_state_dttm")
private ZonedDateTime testStateDttm;
@Column(name = "fit_state_cmmnt", length = Integer.MAX_VALUE)
private String fitStateCmmnt;
@Column(name = "ref_map_sheet_num")
private Long refMapSheetNum;
}

View File

@@ -1,167 +0,0 @@
package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.common.enums.DetectionClassification;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Clazzes;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
import org.locationtech.jts.geom.Geometry;
@Getter
@Setter
@Entity
@Table(name = "tb_map_sheet_anal_data_geom")
public class MapSheetAnalDataGeomEntity {
@Id
@GeneratedValue(
strategy = GenerationType.SEQUENCE,
generator = "tb_map_sheet_anal_data_geom_id_gen")
@SequenceGenerator(
name = "tb_map_sheet_anal_data_geom_id_gen",
sequenceName = "tb_map_sheet_learn_data_geom_geom_uid",
allocationSize = 1)
@Column(name = "geo_uid", nullable = false)
private Long id;
@Column(name = "uuid")
private UUID uuid;
@Column(name = "cd_prob")
private Double cdProb;
@Size(max = 40)
@Column(name = "class_before_cd", length = 40)
private String classBeforeCd;
@Column(name = "class_before_prob")
private Double classBeforeProb;
@Size(max = 40)
@Column(name = "class_after_cd", length = 40)
private String classAfterCd;
@Column(name = "class_after_prob")
private Double classAfterProb;
@Column(name = "map_sheet_num")
private Long mapSheetNum;
@ManyToOne(fetch = FetchType.EAGER)
@JoinColumn(name = "map_5k_id", referencedColumnName = "fid")
private MapInkx5kEntity map5k;
@Column(name = "compare_yyyy")
private Integer compareYyyy;
@Column(name = "target_yyyy")
private Integer targetYyyy;
@Column(name = "area")
private Double area;
@Size(max = 100)
@Column(name = "geo_type", length = 100)
private String geoType;
@Column(name = "data_uid")
private Long dataUid;
@ColumnDefault("now()")
@Column(name = "created_dttm")
private ZonedDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
@ColumnDefault("now()")
@Column(name = "updated_dttm")
private ZonedDateTime updatedDttm;
@Column(name = "updated_uid")
private Long updatedUid;
@ColumnDefault("0")
@Column(name = "geom_cnt")
private Long geomCnt;
@ColumnDefault("0")
@Column(name = "pnu")
private Long pnu;
@Size(max = 20)
@ColumnDefault("0")
@Column(name = "fit_state", length = 20)
private String fitState;
@ColumnDefault("now()")
@Column(name = "fit_state_dttm")
private ZonedDateTime fitStateDttm;
@Column(name = "labeler_uid")
private Long labelerUid;
@Size(max = 20)
@ColumnDefault("0")
@Column(name = "label_state", length = 20)
private String labelState;
@ColumnDefault("now()")
@Column(name = "label_state_dttm")
private ZonedDateTime labelStateDttm;
@Column(name = "tester_uid")
private Long testerUid;
@Size(max = 20)
@ColumnDefault("0")
@Column(name = "test_state", length = 20)
private String testState;
@ColumnDefault("now()")
@Column(name = "test_state_dttm")
private ZonedDateTime testStateDttm;
@Column(name = "fit_state_cmmnt", length = Integer.MAX_VALUE)
private String fitStateCmmnt;
@Column(name = "ref_map_sheet_num")
private Long refMapSheetNum;
@Column(name = "geom", columnDefinition = "geometry")
private Geometry geom;
@Column(name = "geom_center", columnDefinition = "geometry")
private Geometry geomCenter;
public InferenceResultDto.DetailListEntity toEntity() {
DetectionClassification classification = DetectionClassification.fromString(classBeforeCd);
Clazzes comparedClazz = new Clazzes(classification, classBeforeProb);
DetectionClassification classification1 = DetectionClassification.fromString(classAfterCd);
Clazzes targetClazz = new Clazzes(classification1, classAfterProb);
InferenceResultDto.MapSheet mapSheet = map5k != null ? map5k.toEntity() : null;
InferenceResultDto.Coordinate coordinate = null;
if (geomCenter != null) {
org.locationtech.jts.geom.Point point = (org.locationtech.jts.geom.Point) geomCenter;
coordinate = new InferenceResultDto.Coordinate(point.getX(), point.getY());
}
return new InferenceResultDto.DetailListEntity(
uuid, cdProb, comparedClazz, targetClazz, mapSheet, coordinate, createdDttm);
}
}

View File

@@ -1,10 +1,16 @@
package com.kamco.cd.kamcoback.postgres.entity; package com.kamco.cd.kamcoback.postgres.entity;
import com.kamco.cd.kamcoback.common.enums.DetectionClassification;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Clazzes;
import jakarta.persistence.Column; import jakarta.persistence.Column;
import jakarta.persistence.Entity; import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.GeneratedValue; import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType; import jakarta.persistence.GenerationType;
import jakarta.persistence.Id; import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import jakarta.validation.constraints.Size; import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
@@ -26,21 +32,21 @@ public class MapSheetAnalDataInferenceGeomEntity {
private Long geoUid; private Long geoUid;
@Column(name = "cd_prob") @Column(name = "cd_prob")
private Float cdProb; private Double cdProb;
@Size(max = 40) @Size(max = 40)
@Column(name = "class_before_cd", length = 40) @Column(name = "class_before_cd", length = 40)
private String classBeforeCd; private String classBeforeCd;
@Column(name = "class_before_prob") @Column(name = "class_before_prob")
private Float classBeforeProb; private Double classBeforeProb;
@Size(max = 40) @Size(max = 40)
@Column(name = "class_after_cd", length = 40) @Column(name = "class_after_cd", length = 40)
private String classAfterCd; private String classAfterCd;
@Column(name = "class_after_prob") @Column(name = "class_after_prob")
private Float classAfterProb; private Double classAfterProb;
@Column(name = "map_sheet_num") @Column(name = "map_sheet_num")
private Long mapSheetNum; private Long mapSheetNum;
@@ -52,7 +58,7 @@ public class MapSheetAnalDataInferenceGeomEntity {
private Integer targetYyyy; private Integer targetYyyy;
@Column(name = "area") @Column(name = "area")
private Float area; private Double area;
@Size(max = 100) @Size(max = 100)
@Column(name = "geo_type", length = 100) @Column(name = "geo_type", length = 100)
@@ -129,9 +135,6 @@ public class MapSheetAnalDataInferenceGeomEntity {
@Column(name = "stage") @Column(name = "stage")
private Integer stage; private Integer stage;
@Column(name = "map_5k_id")
private Long map5kId;
@Column(name = "file_created_yn") @Column(name = "file_created_yn")
private Boolean fileCreatedYn; private Boolean fileCreatedYn;
@@ -152,4 +155,25 @@ public class MapSheetAnalDataInferenceGeomEntity {
@Column(name = "pass_yn_dttm") @Column(name = "pass_yn_dttm")
private ZonedDateTime passYnDttm; private ZonedDateTime passYnDttm;
@ManyToOne(fetch = FetchType.EAGER)
@JoinColumn(name = "map_5k_id", referencedColumnName = "fid")
private MapInkx5kEntity map5k;
public InferenceResultDto.DetailListEntity toEntity() {
DetectionClassification classification = DetectionClassification.fromString(classBeforeCd);
Clazzes comparedClazz = new Clazzes(classification, classBeforeProb);
DetectionClassification classification1 = DetectionClassification.fromString(classAfterCd);
Clazzes targetClazz = new Clazzes(classification1, classAfterProb);
InferenceResultDto.MapSheet mapSheet = map5k != null ? map5k.toEntity() : null;
InferenceResultDto.Coordinate coordinate = null;
if (geomCenter != null) {
org.locationtech.jts.geom.Point point = (org.locationtech.jts.geom.Point) geomCenter;
coordinate = new InferenceResultDto.Coordinate(point.getX(), point.getY());
}
return new InferenceResultDto.DetailListEntity(
uuid, cdProb, comparedClazz, targetClazz, mapSheet, coordinate, createdDttm);
}
} }

View File

@@ -1,109 +0,0 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import lombok.AccessLevel;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@Getter
@Setter
@Entity
@Table(name = "tb_map_sheet_anal")
@NoArgsConstructor(access = AccessLevel.PROTECTED)
// TODO SETTER 제거
// TODO 주석
public class MapSheetAnalEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_anal_id_gen")
@SequenceGenerator(
name = "tb_map_sheet_anal_id_gen",
sequenceName = "tb_map_sheet_anal_anal_uid",
allocationSize = 1)
@Column(name = "anal_uid", nullable = false)
private Long id;
// TODO UUID
// UK 추가
@Column(name = "compare_yyyy")
private Integer compareYyyy; // 비교년도
@Column(name = "target_yyyy")
private Integer targetYyyy; // 기분년도
@Column(name = "model_uid")
private Long modelUid; // 모델식별키 ?
@Size(max = 100)
@Column(name = "server_ids", length = 100)
private String serverIds; // 서버ID?
@Column(name = "anal_map_sheet", length = Integer.MAX_VALUE)
private String analMapSheet; // 분석도엽?
@Column(name = "anal_strt_dttm")
private ZonedDateTime analStrtDttm;
@Column(name = "anal_end_dttm")
private ZonedDateTime analEndDttm;
@Column(name = "anal_sec")
private Long analSec;
@Column(name = "anal_pred_sec")
private Long analPredSec; // 예상소요초?
@Size(max = 20)
@Column(name = "anal_state", length = 20)
private String analState; // enum 으로 관리
@Size(max = 20)
@Column(name = "gukyuin_used", length = 20)
private String gukyuinUsed; // Boolean으로 관리
@Column(name = "accuracy")
private Double accuracy;
@Size(max = 255)
@Column(name = "result_url")
private String resultUrl;
@Column(name = "created_uid")
private Long createdUid;
@Column(name = "updated_uid")
private Long updatedUid;
@Size(max = 255)
@Column(name = "anal_title")
private String analTitle;
@Column(name = "detecting_cnt")
private Long detectingCnt;
@Column(name = "base_map_sheet_num")
private String baseMapSheetNum;
// TODO CommonDateEntity ?
@ColumnDefault("now()")
@Column(name = "created_dttm")
private ZonedDateTime createdDttm;
@ColumnDefault("now()")
@Column(name = "updated_dttm")
private ZonedDateTime updatedDttm;
@Column(name = "gukyuin_apply_dttm")
private ZonedDateTime gukyuinApplyDttm;
}

View File

@@ -1,3 +1,32 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference; package com.kamco.cd.kamcoback.postgres.repository.Inference;
public interface MapSheetAnalDataInferenceRepositoryCustom {} import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResSummary;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import jakarta.validation.constraints.NotNull;
import java.util.List;
import java.util.Optional;
import org.springframework.data.domain.Page;
public interface MapSheetAnalDataInferenceRepositoryCustom {
Page<AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq);
Optional<AnalResSummary> getInferenceResultSummary(Long id);
Page<InferenceResultDto.Geom> getInferenceGeomList(
Long id, InferenceResultDto.SearchGeoReq searchGeoReq);
Page<MapSheetAnalDataInferenceGeomEntity> listInferenceResultWithGeom(
List<Long> dataIds, SearchGeoReq searchReq);
List<Long> getSheets(Long id);
List<Dashboard> getDashboard(Long id);
List<MapSheetAnalDataInferenceEntity> listAnalyGeom(@NotNull Long id);
}

View File

@@ -1,7 +1,36 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference; package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResSummary;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelMngBakEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Order;
import com.querydsl.core.types.OrderSpecifier;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.JPQLQuery;
import com.querydsl.jpa.impl.JPAQuery;
import com.querydsl.jpa.impl.JPAQueryFactory; import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Repository; import org.springframework.stereotype.Repository;
@Repository @Repository
@@ -10,4 +39,333 @@ public class MapSheetAnalDataInferenceRepositoryImpl
implements MapSheetAnalDataInferenceRepositoryCustom { implements MapSheetAnalDataInferenceRepositoryCustom {
private final JPAQueryFactory queryFactory; private final JPAQueryFactory queryFactory;
private final QModelMngBakEntity tmm = QModelMngBakEntity.modelMngBakEntity;
private final QModelVerEntity tmv = QModelVerEntity.modelVerEntity;
private final QMapSheetAnalInferenceEntity mapSheetAnalEntity =
QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
private final QMapSheetAnalDataInferenceEntity MapSheetAnalDataInferenceEntity =
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
private final QMapSheetAnalDataInferenceGeomEntity MapSheetAnalDataInferenceGeomEntity =
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
private final QMapSheetAnalSttcEntity mapSheetAnalSttcEntity =
QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
/**
* 분석결과 목록 조회
*
* @param searchReq
* @return
*/
@Override
public Page<AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq) {
Pageable pageable = searchReq.toPageable();
// "0000" 전체조회
BooleanBuilder builder = new BooleanBuilder();
if (searchReq.getStatCode() != null && !"0000".equals(searchReq.getStatCode())) {
builder.and(mapSheetAnalEntity.analState.eq(searchReq.getStatCode()));
}
// 제목
if (searchReq.getTitle() != null) {
builder.and(mapSheetAnalEntity.analTitle.like("%" + searchReq.getTitle() + "%"));
}
List<AnalResList> content =
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResList.class,
mapSheetAnalEntity.id,
mapSheetAnalEntity.analTitle,
mapSheetAnalEntity.detectingCnt,
mapSheetAnalEntity.analStrtDttm,
mapSheetAnalEntity.analEndDttm,
mapSheetAnalEntity.analSec,
mapSheetAnalEntity.analPredSec,
mapSheetAnalEntity.analState,
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState),
mapSheetAnalEntity.gukyuinUsed))
.from(mapSheetAnalEntity)
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(mapSheetAnalEntity.id.desc())
.fetch();
long total =
queryFactory
.select(mapSheetAnalEntity.id)
.from(mapSheetAnalEntity)
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total);
}
/**
* 분석결과 요약정보
*
* @param id
* @return
*/
@Override
public Optional<AnalResSummary> getInferenceResultSummary(Long id) {
// 1. 최신 버전 UID를 가져오는 서브쿼리
JPQLQuery<Long> latestVerUidSub =
JPAExpressions.select(tmv.id.max()).from(tmv).where(tmv.modelUid.eq(tmm.id));
Optional<InferenceResultDto.AnalResSummary> content =
Optional.ofNullable(
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResSummary.class,
mapSheetAnalEntity.id,
mapSheetAnalEntity.analTitle,
tmm.modelNm.concat(" ").concat(tmv.modelVer).as("modelInfo"),
mapSheetAnalEntity.targetYyyy,
mapSheetAnalEntity.compareYyyy,
mapSheetAnalEntity.analStrtDttm,
mapSheetAnalEntity.analEndDttm,
mapSheetAnalEntity.analSec,
mapSheetAnalEntity.analPredSec,
mapSheetAnalEntity.resultUrl,
mapSheetAnalEntity.detectingCnt,
mapSheetAnalEntity.accuracy,
mapSheetAnalEntity.analState,
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState)))
.from(mapSheetAnalEntity)
.leftJoin(tmm)
.on(mapSheetAnalEntity.modelUid.eq(tmm.id))
.leftJoin(tmv)
.on(tmv.modelUid.eq(tmm.id).and(tmv.id.eq(latestVerUidSub)))
.where(mapSheetAnalEntity.id.eq(id))
.fetchOne());
return content;
}
/**
* 분석결과 상세 class name별 탐지 개수
*
* @param id
* @return
*/
@Override
public List<Dashboard> getDashboard(Long id) {
return queryFactory
.select(
Projections.constructor(
Dashboard.class,
mapSheetAnalSttcEntity.id.classAfterCd,
mapSheetAnalSttcEntity.classAfterCnt.sum()))
.from(mapSheetAnalSttcEntity)
.where(mapSheetAnalSttcEntity.id.analUid.eq(id))
.groupBy(mapSheetAnalSttcEntity.id.classAfterCd)
.orderBy(mapSheetAnalSttcEntity.id.classAfterCd.asc())
.fetch();
}
@Override
public List<MapSheetAnalDataInferenceEntity> listAnalyGeom(Long id) {
QMapSheetAnalDataInferenceEntity analy =
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
return queryFactory.selectFrom(analy).where(analy.analUid.eq(id)).fetch();
}
/**
* 분석결과 상세 목록
*
* @param searchReq
* @return
*/
@Override
public Page<MapSheetAnalDataInferenceGeomEntity> listInferenceResultWithGeom(
List<Long> ids, SearchGeoReq searchReq) {
// 분석 차수
QMapSheetAnalDataInferenceGeomEntity detectedEntity =
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
Pageable pageable = searchReq.toPageable();
// 검색조건
JPAQuery<MapSheetAnalDataInferenceGeomEntity> query =
queryFactory
.selectFrom(detectedEntity)
.where(
detectedEntity.dataUid.in(ids),
eqTargetClass(detectedEntity, searchReq.getTargetClass()),
eqCompareClass(detectedEntity, searchReq.getCompareClass()),
containsMapSheetNum(detectedEntity, searchReq.getMapSheetNum()));
// count
long total = query.fetchCount();
// Pageable에서 정렬 가져오기, 없으면 기본 정렬(createdDttm desc) 사용
List<OrderSpecifier<?>> orders = getOrderSpecifiers(pageable.getSort());
if (orders.isEmpty()) {
orders.add(detectedEntity.createdDttm.desc());
}
List<MapSheetAnalDataInferenceGeomEntity> content =
query
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(orders.toArray(new OrderSpecifier[0]))
.fetch();
return new PageImpl<>(content, pageable, total);
}
/**
* 분석결과 상세 목록
*
* @param searchGeoReq
* @return
*/
@Override
public Page<InferenceResultDto.Geom> getInferenceGeomList(Long id, SearchGeoReq searchGeoReq) {
Pageable pageable = searchGeoReq.toPageable();
BooleanBuilder builder = new BooleanBuilder();
// 추론결과 id
builder.and(mapSheetAnalEntity.id.eq(id));
// 기준년도 분류
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) {
builder.and(
MapSheetAnalDataInferenceGeomEntity.classAfterCd
.toLowerCase()
.eq(searchGeoReq.getTargetClass().toLowerCase()));
}
// 비교년도 분류
if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")) {
builder.and(
MapSheetAnalDataInferenceGeomEntity.classBeforeCd
.toLowerCase()
.eq(searchGeoReq.getCompareClass().toLowerCase()));
}
// 분석도엽
if (searchGeoReq.getMapSheetNum() != null && !searchGeoReq.getMapSheetNum().isEmpty()) {
List<Long> mapSheetNum = searchGeoReq.getMapSheetNum();
builder.and(MapSheetAnalDataInferenceGeomEntity.mapSheetNum.in(mapSheetNum));
}
List<InferenceResultDto.Geom> content =
queryFactory
.select(
Projections.constructor(
InferenceResultDto.Geom.class,
MapSheetAnalDataInferenceGeomEntity.compareYyyy,
MapSheetAnalDataInferenceGeomEntity.targetYyyy,
MapSheetAnalDataInferenceGeomEntity.classBeforeCd,
MapSheetAnalDataInferenceGeomEntity.classBeforeProb,
MapSheetAnalDataInferenceGeomEntity.classAfterCd,
MapSheetAnalDataInferenceGeomEntity.classAfterProb,
MapSheetAnalDataInferenceGeomEntity.mapSheetNum,
MapSheetAnalDataInferenceGeomEntity.geom,
MapSheetAnalDataInferenceGeomEntity.geomCenter))
.from(mapSheetAnalEntity)
.join(MapSheetAnalDataInferenceEntity)
.on(MapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalEntity.id))
.join(MapSheetAnalDataInferenceGeomEntity)
.on(MapSheetAnalDataInferenceGeomEntity.dataUid.eq(MapSheetAnalDataInferenceEntity.id))
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.fetch();
long total =
queryFactory
.select(MapSheetAnalDataInferenceGeomEntity.geoUid)
.from(mapSheetAnalEntity)
.join(MapSheetAnalDataInferenceEntity)
.on(MapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalEntity.id))
.join(MapSheetAnalDataInferenceGeomEntity)
.on(MapSheetAnalDataInferenceGeomEntity.dataUid.eq(MapSheetAnalDataInferenceEntity.id))
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total);
}
/**
* 추론된 5000:1 도엽 목록
*
* @param id
* @return
*/
@Override
public List<Long> getSheets(Long id) {
return queryFactory
.select(MapSheetAnalDataInferenceEntity.mapSheetNum)
.from(mapSheetAnalEntity)
.join(MapSheetAnalDataInferenceEntity)
.on(MapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalEntity.id))
.where(mapSheetAnalEntity.id.eq(id))
.groupBy(MapSheetAnalDataInferenceEntity.mapSheetNum)
.fetch();
}
/** Pageable의 Sort를 QueryDSL OrderSpecifier로 변환 */
@SuppressWarnings({"unchecked", "rawtypes"})
private List<OrderSpecifier<?>> getOrderSpecifiers(Sort sort) {
List<OrderSpecifier<?>> orders = new ArrayList<>();
if (sort.isSorted()) {
QMapSheetAnalDataInferenceGeomEntity entity =
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
for (Sort.Order order : sort) {
Order direction = order.isAscending() ? Order.ASC : Order.DESC;
String property = order.getProperty();
// 유효한 필드만 처리
switch (property) {
case "classBeforeCd" -> orders.add(new OrderSpecifier(direction, entity.classBeforeCd));
case "classBeforeProb" ->
orders.add(new OrderSpecifier(direction, entity.classBeforeProb));
case "classAfterCd" -> orders.add(new OrderSpecifier(direction, entity.classAfterCd));
case "classAfterProb" -> orders.add(new OrderSpecifier(direction, entity.classAfterProb));
case "mapSheetNum" -> orders.add(new OrderSpecifier(direction, entity.mapSheetNum));
case "compareYyyy" -> orders.add(new OrderSpecifier(direction, entity.compareYyyy));
case "targetYyyy" -> orders.add(new OrderSpecifier(direction, entity.targetYyyy));
case "area" -> orders.add(new OrderSpecifier(direction, entity.area));
case "createdDttm" -> orders.add(new OrderSpecifier(direction, entity.createdDttm));
case "updatedDttm" -> orders.add(new OrderSpecifier(direction, entity.updatedDttm));
// 유효하지 않은 필드는 무시
default -> {}
}
}
}
return orders;
}
private BooleanExpression eqTargetClass(
QMapSheetAnalDataInferenceGeomEntity detectedEntity, String targetClass) {
return targetClass != null && !targetClass.isEmpty()
? detectedEntity.classAfterCd.toLowerCase().eq(targetClass.toLowerCase())
: null;
}
private BooleanExpression eqCompareClass(
QMapSheetAnalDataInferenceGeomEntity detectedEntity, String compareClass) {
return compareClass != null && !compareClass.isEmpty()
? detectedEntity.classBeforeCd.toLowerCase().eq(compareClass.toLowerCase())
: null;
}
private BooleanExpression containsMapSheetNum(
QMapSheetAnalDataInferenceGeomEntity detectedEntity, List<Long> mapSheet) {
if (mapSheet == null || mapSheet.isEmpty()) {
return null;
}
return detectedEntity.mapSheetNum.in(mapSheet);
}
} }

View File

@@ -1,7 +0,0 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface MapSheetAnalDataRepository
extends JpaRepository<MapSheetAnalEntity, Long>, MapSheetAnalDataRepositoryCustom {}

View File

@@ -1,32 +0,0 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResSummary;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import jakarta.validation.constraints.NotNull;
import java.util.List;
import java.util.Optional;
import org.springframework.data.domain.Page;
public interface MapSheetAnalDataRepositoryCustom {
Page<AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq);
Optional<AnalResSummary> getInferenceResultSummary(Long id);
Page<InferenceResultDto.Geom> getInferenceGeomList(
Long id, InferenceResultDto.SearchGeoReq searchGeoReq);
Page<MapSheetAnalDataGeomEntity> listInferenceResultWithGeom(
List<Long> dataIds, SearchGeoReq searchReq);
List<Long> getSheets(Long id);
List<Dashboard> getDashboard(Long id);
List<MapSheetAnalDataEntity> listAnalyGeom(@NotNull Long id);
}

View File

@@ -1,371 +0,0 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResSummary;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelMngBakEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Order;
import com.querydsl.core.types.OrderSpecifier;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.JPQLQuery;
import com.querydsl.jpa.impl.JPAQuery;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class MapSheetAnalDataRepositoryImpl implements MapSheetAnalDataRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final QModelMngBakEntity tmm = QModelMngBakEntity.modelMngBakEntity;
private final QModelVerEntity tmv = QModelVerEntity.modelVerEntity;
private final QMapSheetAnalEntity mapSheetAnalEntity = QMapSheetAnalEntity.mapSheetAnalEntity;
private final QMapSheetAnalDataEntity mapSheetAnalDataEntity =
QMapSheetAnalDataEntity.mapSheetAnalDataEntity;
private final QMapSheetAnalDataGeomEntity mapSheetAnalDataGeomEntity =
QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
private final QMapSheetAnalSttcEntity mapSheetAnalSttcEntity =
QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
/**
* 분석결과 목록 조회
*
* @param searchReq
* @return
*/
@Override
public Page<AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq) {
Pageable pageable = searchReq.toPageable();
// "0000" 전체조회
BooleanBuilder builder = new BooleanBuilder();
if (searchReq.getStatCode() != null && !"0000".equals(searchReq.getStatCode())) {
builder.and(mapSheetAnalEntity.analState.eq(searchReq.getStatCode()));
}
// 제목
if (searchReq.getTitle() != null) {
builder.and(mapSheetAnalEntity.analTitle.like("%" + searchReq.getTitle() + "%"));
}
List<AnalResList> content =
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResList.class,
mapSheetAnalEntity.id,
mapSheetAnalEntity.analTitle,
mapSheetAnalEntity.analMapSheet,
mapSheetAnalEntity.detectingCnt,
mapSheetAnalEntity.analStrtDttm,
mapSheetAnalEntity.analEndDttm,
mapSheetAnalEntity.analSec,
mapSheetAnalEntity.analPredSec,
mapSheetAnalEntity.analState,
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState),
mapSheetAnalEntity.gukyuinUsed))
.from(mapSheetAnalEntity)
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(mapSheetAnalEntity.id.desc())
.fetch();
long total =
queryFactory
.select(mapSheetAnalEntity.id)
.from(mapSheetAnalEntity)
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total);
}
/**
* 분석결과 요약정보
*
* @param id
* @return
*/
@Override
public Optional<AnalResSummary> getInferenceResultSummary(Long id) {
// 1. 최신 버전 UID를 가져오는 서브쿼리
JPQLQuery<Long> latestVerUidSub =
JPAExpressions.select(tmv.id.max()).from(tmv).where(tmv.modelUid.eq(tmm.id));
Optional<InferenceResultDto.AnalResSummary> content =
Optional.ofNullable(
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResSummary.class,
mapSheetAnalEntity.id,
mapSheetAnalEntity.analTitle,
tmm.modelNm.concat(" ").concat(tmv.modelVer).as("modelInfo"),
mapSheetAnalEntity.targetYyyy,
mapSheetAnalEntity.compareYyyy,
mapSheetAnalEntity.analMapSheet,
mapSheetAnalEntity.analStrtDttm,
mapSheetAnalEntity.analEndDttm,
mapSheetAnalEntity.analSec,
mapSheetAnalEntity.analPredSec,
mapSheetAnalEntity.resultUrl,
mapSheetAnalEntity.detectingCnt,
mapSheetAnalEntity.accuracy,
mapSheetAnalEntity.analState,
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState)))
.from(mapSheetAnalEntity)
.leftJoin(tmm)
.on(mapSheetAnalEntity.modelUid.eq(tmm.id))
.leftJoin(tmv)
.on(tmv.modelUid.eq(tmm.id).and(tmv.id.eq(latestVerUidSub)))
.where(mapSheetAnalEntity.id.eq(id))
.fetchOne());
return content;
}
/**
* 분석결과 상세 class name별 탐지 개수
*
* @param id
* @return
*/
@Override
public List<Dashboard> getDashboard(Long id) {
return queryFactory
.select(
Projections.constructor(
Dashboard.class,
mapSheetAnalSttcEntity.id.classAfterCd,
mapSheetAnalSttcEntity.classAfterCnt.sum()))
.from(mapSheetAnalSttcEntity)
.where(mapSheetAnalSttcEntity.id.analUid.eq(id))
.groupBy(mapSheetAnalSttcEntity.id.classAfterCd)
.orderBy(mapSheetAnalSttcEntity.id.classAfterCd.asc())
.fetch();
}
@Override
public List<MapSheetAnalDataEntity> listAnalyGeom(Long id) {
QMapSheetAnalDataEntity analy = QMapSheetAnalDataEntity.mapSheetAnalDataEntity;
return queryFactory.selectFrom(analy).where(analy.analUid.eq(id)).fetch();
}
/**
* 분석결과 상세 목록
*
* @param searchReq
* @return
*/
@Override
public Page<MapSheetAnalDataGeomEntity> listInferenceResultWithGeom(
List<Long> ids, SearchGeoReq searchReq) {
// 분석 차수
QMapSheetAnalDataGeomEntity detectedEntity =
QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
Pageable pageable = searchReq.toPageable();
// 검색조건
JPAQuery<MapSheetAnalDataGeomEntity> query =
queryFactory
.selectFrom(detectedEntity)
.where(
detectedEntity.dataUid.in(ids),
eqTargetClass(detectedEntity, searchReq.getTargetClass()),
eqCompareClass(detectedEntity, searchReq.getCompareClass()),
containsMapSheetNum(detectedEntity, searchReq.getMapSheetNum()));
// count
long total = query.fetchCount();
// Pageable에서 정렬 가져오기, 없으면 기본 정렬(createdDttm desc) 사용
List<OrderSpecifier<?>> orders = getOrderSpecifiers(pageable.getSort());
if (orders.isEmpty()) {
orders.add(detectedEntity.createdDttm.desc());
}
List<MapSheetAnalDataGeomEntity> content =
query
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(orders.toArray(new OrderSpecifier[0]))
.fetch();
return new PageImpl<>(content, pageable, total);
}
/**
* 분석결과 상세 목록
*
* @param searchGeoReq
* @return
*/
@Override
public Page<InferenceResultDto.Geom> getInferenceGeomList(Long id, SearchGeoReq searchGeoReq) {
Pageable pageable = searchGeoReq.toPageable();
BooleanBuilder builder = new BooleanBuilder();
// 추론결과 id
builder.and(mapSheetAnalEntity.id.eq(id));
// 기준년도 분류
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) {
builder.and(
mapSheetAnalDataGeomEntity
.classAfterCd
.toLowerCase()
.eq(searchGeoReq.getTargetClass().toLowerCase()));
}
// 비교년도 분류
if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")) {
builder.and(
mapSheetAnalDataGeomEntity
.classBeforeCd
.toLowerCase()
.eq(searchGeoReq.getCompareClass().toLowerCase()));
}
// 분석도엽
if (searchGeoReq.getMapSheetNum() != null && !searchGeoReq.getMapSheetNum().isEmpty()) {
List<Long> mapSheetNum = searchGeoReq.getMapSheetNum();
builder.and(mapSheetAnalDataGeomEntity.mapSheetNum.in(mapSheetNum));
}
List<InferenceResultDto.Geom> content =
queryFactory
.select(
Projections.constructor(
InferenceResultDto.Geom.class,
mapSheetAnalDataGeomEntity.compareYyyy,
mapSheetAnalDataGeomEntity.targetYyyy,
mapSheetAnalDataGeomEntity.classBeforeCd,
mapSheetAnalDataGeomEntity.classBeforeProb,
mapSheetAnalDataGeomEntity.classAfterCd,
mapSheetAnalDataGeomEntity.classAfterProb,
mapSheetAnalDataGeomEntity.mapSheetNum,
mapSheetAnalDataGeomEntity.geom,
mapSheetAnalDataGeomEntity.geomCenter))
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.join(mapSheetAnalDataGeomEntity)
.on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id))
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.fetch();
long total =
queryFactory
.select(mapSheetAnalDataGeomEntity.id)
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.join(mapSheetAnalDataGeomEntity)
.on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id))
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total);
}
/**
* 추론된 5000:1 도엽 목록
*
* @param id
* @return
*/
@Override
public List<Long> getSheets(Long id) {
return queryFactory
.select(mapSheetAnalDataEntity.mapSheetNum)
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.where(mapSheetAnalEntity.id.eq(id))
.groupBy(mapSheetAnalDataEntity.mapSheetNum)
.fetch();
}
/** Pageable의 Sort를 QueryDSL OrderSpecifier로 변환 */
@SuppressWarnings({"unchecked", "rawtypes"})
private List<OrderSpecifier<?>> getOrderSpecifiers(Sort sort) {
List<OrderSpecifier<?>> orders = new ArrayList<>();
if (sort.isSorted()) {
QMapSheetAnalDataGeomEntity entity = QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
for (Sort.Order order : sort) {
Order direction = order.isAscending() ? Order.ASC : Order.DESC;
String property = order.getProperty();
// 유효한 필드만 처리
switch (property) {
case "classBeforeCd" -> orders.add(new OrderSpecifier(direction, entity.classBeforeCd));
case "classBeforeProb" ->
orders.add(new OrderSpecifier(direction, entity.classBeforeProb));
case "classAfterCd" -> orders.add(new OrderSpecifier(direction, entity.classAfterCd));
case "classAfterProb" -> orders.add(new OrderSpecifier(direction, entity.classAfterProb));
case "mapSheetNum" -> orders.add(new OrderSpecifier(direction, entity.mapSheetNum));
case "compareYyyy" -> orders.add(new OrderSpecifier(direction, entity.compareYyyy));
case "targetYyyy" -> orders.add(new OrderSpecifier(direction, entity.targetYyyy));
case "area" -> orders.add(new OrderSpecifier(direction, entity.area));
case "createdDttm" -> orders.add(new OrderSpecifier(direction, entity.createdDttm));
case "updatedDttm" -> orders.add(new OrderSpecifier(direction, entity.updatedDttm));
// 유효하지 않은 필드는 무시
default -> {}
}
}
}
return orders;
}
private BooleanExpression eqTargetClass(
QMapSheetAnalDataGeomEntity detectedEntity, String targetClass) {
return targetClass != null && !targetClass.isEmpty()
? detectedEntity.classAfterCd.toLowerCase().eq(targetClass.toLowerCase())
: null;
}
private BooleanExpression eqCompareClass(
QMapSheetAnalDataGeomEntity detectedEntity, String compareClass) {
return compareClass != null && !compareClass.isEmpty()
? detectedEntity.classBeforeCd.toLowerCase().eq(compareClass.toLowerCase())
: null;
}
private BooleanExpression containsMapSheetNum(
QMapSheetAnalDataGeomEntity detectedEntity, List<Long> mapSheet) {
if (mapSheet == null || mapSheet.isEmpty()) {
return null;
}
return detectedEntity.mapSheetNum.in(mapSheet);
}
}

View File

@@ -1,7 +1,8 @@
package com.kamco.cd.kamcoback.postgres.repository.changedetection; package com.kamco.cd.kamcoback.postgres.repository.changedetection;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
public interface ChangeDetectionRepository public interface ChangeDetectionRepository
extends JpaRepository<MapSheetAnalDataGeomEntity, Long>, ChangeDetectionRepositoryCustom {} extends JpaRepository<MapSheetAnalDataInferenceGeomEntity, Long>,
ChangeDetectionRepositoryCustom {}

View File

@@ -3,16 +3,16 @@ package com.kamco.cd.kamcoback.postgres.repository.changedetection;
import static com.kamco.cd.kamcoback.postgres.entity.QDemoLearningAnalysisSceneItemEntity.demoLearningAnalysisSceneItemEntity; import static com.kamco.cd.kamcoback.postgres.entity.QDemoLearningAnalysisSceneItemEntity.demoLearningAnalysisSceneItemEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QImageryEntity.imageryEntity; import static com.kamco.cd.kamcoback.postgres.entity.QImageryEntity.imageryEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity.mapInkx5kEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapInkx5kEntity.mapInkx5kEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataEntity.mapSheetAnalDataEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity.mapSheetAnalEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto; import com.kamco.cd.kamcoback.changedetection.dto.ChangeDetectionDto;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.querydsl.core.types.Projections; import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.CaseBuilder; import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions; import com.querydsl.core.types.dsl.Expressions;
@@ -29,7 +29,7 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
private final JPAQueryFactory queryFactory; private final JPAQueryFactory queryFactory;
public ChangeDetectionRepositoryImpl(JPAQueryFactory queryFactory) { public ChangeDetectionRepositoryImpl(JPAQueryFactory queryFactory) {
super(MapSheetAnalDataGeomEntity.class); super(MapSheetAnalDataInferenceGeomEntity.class);
this.queryFactory = queryFactory; this.queryFactory = queryFactory;
} }
@@ -42,9 +42,11 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
@Override @Override
public List<String> findPolygonJson() { public List<String> findPolygonJson() {
return queryFactory return queryFactory
.select(Expressions.stringTemplate("ST_AsGeoJSON({0})", mapSheetAnalDataGeomEntity.geom)) .select(
.from(mapSheetAnalDataGeomEntity) Expressions.stringTemplate(
.orderBy(mapSheetAnalDataGeomEntity.id.desc()) "ST_AsGeoJSON({0})", mapSheetAnalDataInferenceGeomEntity.geom))
.from(mapSheetAnalDataInferenceGeomEntity)
.orderBy(mapSheetAnalDataInferenceGeomEntity.geoUid.desc())
.fetch(); .fetch();
} }
@@ -58,13 +60,13 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
mapSheetAnalSttcEntity.id.classAfterCd.as("classCd"), mapSheetAnalSttcEntity.id.classAfterCd.as("classCd"),
mapSheetAnalSttcEntity.id.classAfterCd.as("classNm"), // 앞단 CoreService 에서 한글명으로 변환 mapSheetAnalSttcEntity.id.classAfterCd.as("classNm"), // 앞단 CoreService 에서 한글명으로 변환
mapSheetAnalSttcEntity.classAfterCnt.sum())) mapSheetAnalSttcEntity.classAfterCnt.sum()))
.from(mapSheetAnalEntity) .from(mapSheetAnalInferenceEntity)
.innerJoin(mapSheetAnalDataEntity) .innerJoin(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id)) .on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id))
.innerJoin(mapSheetAnalSttcEntity) .innerJoin(mapSheetAnalSttcEntity)
.on(mapSheetAnalSttcEntity.id.dataUid.eq(mapSheetAnalDataEntity.id)) .on(mapSheetAnalSttcEntity.id.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
.where( .where(
mapSheetAnalEntity.id.eq(id), mapSheetAnalInferenceEntity.id.eq(id),
mapSheetAnalSttcEntity.id.mapSheetNum.eq(Long.valueOf(mapSheetNum))) mapSheetAnalSttcEntity.id.mapSheetNum.eq(Long.valueOf(mapSheetNum)))
.groupBy(mapSheetAnalSttcEntity.id.classAfterCd) .groupBy(mapSheetAnalSttcEntity.id.classAfterCd)
.fetch(); .fetch();
@@ -112,13 +114,13 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
.select( .select(
Projections.constructor( Projections.constructor(
ChangeDetectionDto.AnalYearList.class, ChangeDetectionDto.AnalYearList.class,
mapSheetAnalEntity.id, mapSheetAnalInferenceEntity.id,
mapSheetAnalEntity.analTitle, mapSheetAnalInferenceEntity.analTitle,
mapSheetAnalEntity.compareYyyy.as("beforeYear"), mapSheetAnalInferenceEntity.compareYyyy.as("beforeYear"),
mapSheetAnalEntity.targetYyyy.as("afterYear"), mapSheetAnalInferenceEntity.targetYyyy.as("afterYear"),
mapSheetAnalEntity.baseMapSheetNum)) mapSheetAnalInferenceEntity.baseMapSheetNum))
.from(mapSheetAnalEntity) .from(mapSheetAnalInferenceEntity)
.orderBy(mapSheetAnalEntity.id.asc()) .orderBy(mapSheetAnalInferenceEntity.id.asc())
.fetch(); .fetch();
} }
@@ -133,24 +135,24 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
ChangeDetectionDto.PolygonQueryData.class, ChangeDetectionDto.PolygonQueryData.class,
Expressions.stringTemplate("{0}", "Feature"), Expressions.stringTemplate("{0}", "Feature"),
Expressions.stringTemplate( Expressions.stringTemplate(
"ST_AsGeoJSON({0})", mapSheetAnalDataGeomEntity.geom), "ST_AsGeoJSON({0})", mapSheetAnalDataInferenceGeomEntity.geom),
mapSheetAnalDataGeomEntity.id, mapSheetAnalDataInferenceGeomEntity.geoUid,
mapSheetAnalDataGeomEntity.area, mapSheetAnalDataInferenceGeomEntity.area,
mapSheetAnalDataGeomEntity.compareYyyy, mapSheetAnalDataInferenceGeomEntity.compareYyyy,
mapSheetAnalDataGeomEntity.classBeforeProb, mapSheetAnalDataInferenceGeomEntity.classBeforeProb,
mapSheetAnalDataGeomEntity.classBeforeCd.toUpperCase(), mapSheetAnalDataInferenceGeomEntity.classBeforeCd.toUpperCase(),
mapSheetAnalDataGeomEntity.targetYyyy, mapSheetAnalDataInferenceGeomEntity.targetYyyy,
mapSheetAnalDataGeomEntity.classAfterProb, mapSheetAnalDataInferenceGeomEntity.classAfterProb,
mapSheetAnalDataGeomEntity.classAfterCd.toUpperCase(), mapSheetAnalDataInferenceGeomEntity.classAfterCd.toUpperCase(),
mapSheetAnalDataGeomEntity.cdProb)) mapSheetAnalDataInferenceGeomEntity.cdProb))
.from(mapSheetAnalDataGeomEntity) .from(mapSheetAnalDataInferenceGeomEntity)
.innerJoin(mapSheetAnalDataEntity) .innerJoin(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id)) .on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
.innerJoin(mapSheetAnalEntity) .innerJoin(mapSheetAnalInferenceEntity)
.on(mapSheetAnalEntity.id.eq(mapSheetAnalDataEntity.analUid)) .on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
.where( .where(
mapSheetAnalEntity.id.eq(analUid), mapSheetAnalInferenceEntity.id.eq(analUid),
mapSheetAnalDataGeomEntity.mapSheetNum.eq(Long.valueOf(mapSheetNum))) mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(Long.valueOf(mapSheetNum)))
.fetch(); .fetch();
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
@@ -199,19 +201,20 @@ public class ChangeDetectionRepositoryImpl extends QuerydslRepositorySupport
ChangeDetectionDto.PointQueryData.class, ChangeDetectionDto.PointQueryData.class,
Expressions.stringTemplate("{0}", "Feature"), Expressions.stringTemplate("{0}", "Feature"),
Expressions.stringTemplate( Expressions.stringTemplate(
"ST_AsGeoJSON({0})", mapSheetAnalDataGeomEntity.geomCenter), // point "ST_AsGeoJSON({0})",
mapSheetAnalDataInferenceGeomEntity.geomCenter), // point
Projections.constructor( Projections.constructor(
ChangeDetectionDto.PointProperties.class, ChangeDetectionDto.PointProperties.class,
mapSheetAnalDataGeomEntity.id, mapSheetAnalDataInferenceGeomEntity.geoUid,
mapSheetAnalDataGeomEntity.classAfterCd.toUpperCase()))) mapSheetAnalDataInferenceGeomEntity.classAfterCd.toUpperCase())))
.from(mapSheetAnalDataGeomEntity) .from(mapSheetAnalDataInferenceGeomEntity)
.innerJoin(mapSheetAnalDataEntity) .innerJoin(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id)) .on(mapSheetAnalDataInferenceGeomEntity.dataUid.eq(mapSheetAnalDataInferenceEntity.id))
.innerJoin(mapSheetAnalEntity) .innerJoin(mapSheetAnalInferenceEntity)
.on(mapSheetAnalEntity.id.eq(mapSheetAnalDataEntity.analUid)) .on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
.where( .where(
mapSheetAnalEntity.id.eq(analUid), mapSheetAnalInferenceEntity.id.eq(analUid),
mapSheetAnalDataGeomEntity.mapSheetNum.eq(Long.valueOf(mapSheetNum))) mapSheetAnalDataInferenceGeomEntity.mapSheetNum.eq(Long.valueOf(mapSheetNum)))
.fetch(); .fetch();
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();

View File

@@ -3,7 +3,6 @@ package com.kamco.cd.kamcoback.postgres.repository.label;
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity; import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingInspectorEntity.labelingInspectorEntity; import static com.kamco.cd.kamcoback.postgres.entity.QLabelingInspectorEntity.labelingInspectorEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity.mapSheetAnalEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMemberEntity.memberEntity; import static com.kamco.cd.kamcoback.postgres.entity.QMemberEntity.memberEntity;
@@ -114,11 +113,11 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
connection -> { connection -> {
String sql = String sql =
""" """
insert into tb_labeling_assignment insert into tb_labeling_assignment
(assignment_uid, inference_geom_uid, worker_uid, (assignment_uid, inference_geom_uid, worker_uid,
work_state, assign_group_id, anal_uid) work_state, assign_group_id, anal_uid)
values (?, ?, ?, ?, ?, ?) values (?, ?, ?, ?, ?, ?)
"""; """;
try (PreparedStatement ps = connection.prepareStatement(sql)) { try (PreparedStatement ps = connection.prepareStatement(sql)) {
int batchSize = 0; int batchSize = 0;
@@ -488,20 +487,20 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
.select( .select(
Projections.constructor( Projections.constructor(
InferenceDetail.class, InferenceDetail.class,
mapSheetAnalEntity.analTitle, mapSheetAnalInferenceEntity.analTitle,
Expressions.numberTemplate(Integer.class, "{0}", 4), Expressions.numberTemplate(Integer.class, "{0}", 4),
mapSheetAnalEntity.gukyuinApplyDttm, mapSheetAnalInferenceEntity.gukyuinApplyDttm,
mapSheetAnalEntity.detectingCnt, mapSheetAnalInferenceEntity.detectingCnt,
labelingAssignmentEntity.workerUid.countDistinct(), labelingAssignmentEntity.workerUid.countDistinct(),
labelingAssignmentEntity.inspectorUid.countDistinct())) labelingAssignmentEntity.inspectorUid.countDistinct()))
.from(mapSheetAnalEntity) .from(mapSheetAnalInferenceEntity)
.innerJoin(labelingAssignmentEntity) .innerJoin(labelingAssignmentEntity)
.on(mapSheetAnalEntity.id.eq(labelingAssignmentEntity.analUid)) .on(mapSheetAnalInferenceEntity.id.eq(labelingAssignmentEntity.analUid))
.where(mapSheetAnalEntity.id.eq(analEntity.getId())) .where(mapSheetAnalInferenceEntity.id.eq(analEntity.getId()))
.groupBy( .groupBy(
mapSheetAnalEntity.analTitle, mapSheetAnalInferenceEntity.analTitle,
mapSheetAnalEntity.gukyuinApplyDttm, mapSheetAnalInferenceEntity.gukyuinApplyDttm,
mapSheetAnalEntity.detectingCnt) mapSheetAnalInferenceEntity.detectingCnt)
.fetchOne(); .fetchOne();
} }

View File

@@ -11,7 +11,7 @@ import com.kamco.cd.kamcoback.label.dto.LabelWorkDto;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMng; import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMng;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMngDetail; import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMngDetail;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.WorkerState; import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.WorkerState;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity;
import com.querydsl.core.BooleanBuilder; import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.OrderSpecifier; import com.querydsl.core.types.OrderSpecifier;
@@ -53,7 +53,7 @@ public class LabelWorkRepositoryImpl extends QuerydslRepositorySupport
@PersistenceContext private EntityManager em; @PersistenceContext private EntityManager em;
public LabelWorkRepositoryImpl(JPAQueryFactory queryFactory) { public LabelWorkRepositoryImpl(JPAQueryFactory queryFactory) {
super(MapSheetAnalDataGeomEntity.class); super(MapSheetAnalDataInferenceGeomEntity.class);
this.queryFactory = queryFactory; this.queryFactory = queryFactory;
} }