파일생성 추가

This commit is contained in:
2025-12-26 10:22:53 +09:00
parent d5f6d7e91d
commit 8e6b41c9ad
24 changed files with 1571 additions and 381 deletions

View File

@@ -38,6 +38,9 @@ dependencies {
//geometry //geometry
implementation 'com.fasterxml.jackson.core:jackson-databind' implementation 'com.fasterxml.jackson.core:jackson-databind'
implementation "org.geotools:gt-shapefile:30.0"
implementation "org.geotools:gt-referencing:30.0"
implementation "org.geotools:gt-geojson:30.0"
implementation 'org.locationtech.jts.io:jts-io-common:1.20.0' implementation 'org.locationtech.jts.io:jts-io-common:1.20.0'
implementation 'org.locationtech.jts:jts-core:1.19.0' implementation 'org.locationtech.jts:jts-core:1.19.0'
implementation 'org.hibernate:hibernate-spatial:6.2.7.Final' implementation 'org.hibernate:hibernate-spatial:6.2.7.Final'

View File

@@ -2,6 +2,7 @@ package com.kamco.cd.kamcoback.config;
import com.kamco.cd.kamcoback.auth.CustomAuthenticationProvider; import com.kamco.cd.kamcoback.auth.CustomAuthenticationProvider;
import com.kamco.cd.kamcoback.auth.JwtAuthenticationFilter; import com.kamco.cd.kamcoback.auth.JwtAuthenticationFilter;
import com.kamco.cd.kamcoback.auth.MenuAuthorizationManager;
import java.util.List; import java.util.List;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
@@ -28,6 +29,7 @@ public class SecurityConfig {
private final JwtAuthenticationFilter jwtAuthenticationFilter; private final JwtAuthenticationFilter jwtAuthenticationFilter;
private final CustomAuthenticationProvider customAuthenticationProvider; private final CustomAuthenticationProvider customAuthenticationProvider;
private final MenuAuthorizationManager menuAuthorizationManager;
@Bean @Bean
public SecurityFilterChain securityFilterChain(HttpSecurity http) throws Exception { public SecurityFilterChain securityFilterChain(HttpSecurity http) throws Exception {

View File

@@ -0,0 +1,27 @@
package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "추론결과 데이터 생성", description = "추론결과 데이터 생성 API")
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/inference/shp")
public class InferenceResultShpApiController {
private final InferenceResultShpService inferenceResultShpService;
@Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장")
@PostMapping
public ApiResponseDto<Void> saveInferenceResultData() {
inferenceResultShpService.saveInferenceResultData();
return ApiResponseDto.createOK(null);
}
}

View File

@@ -0,0 +1,13 @@
package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import java.util.List;
public interface ShpWriter {
// SHP (.shp/.shx/.dbf)
void writeShp(String shpBasePath, List<InferenceResultShpDto.Basic> rows);
// GeoJSON (.geojson)
void writeGeoJson(String geoJsonPath, List<InferenceResultShpDto.Basic> rows);
}

View File

@@ -0,0 +1,86 @@
package com.kamco.cd.kamcoback.inference.dto;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.locationtech.jts.geom.Geometry;
public class InferenceResultShpDto {
@Getter
@Setter
public static class Basic {
private Long id;
private UUID uuid;
private Integer stage;
private Float cdProb;
private Integer input1;
private Integer input2;
private Long mapId;
private String beforeClass;
private Float beforeProbability;
private String afterClass;
private Float afterProbability;
private Float area;
private ZonedDateTime createdDttm;
private ZonedDateTime updatedDttm;
private Geometry geometry;
public Basic(
Long id,
UUID uuid,
Integer stage,
Float cdProb,
Integer input1,
Integer input2,
Long mapId,
String beforeClass,
Float beforeProbability,
String afterClass,
Float afterProbability,
Float area,
ZonedDateTime createdDttm,
ZonedDateTime updatedDttm,
Geometry geometry
) {
this.id = id;
this.uuid = uuid;
this.stage = stage;
this.cdProb = cdProb;
this.input1 = input1;
this.input2 = input2;
this.mapId = mapId;
this.beforeClass = beforeClass;
this.beforeProbability = beforeProbability;
this.afterClass = afterClass;
this.afterProbability = afterProbability;
this.area = area;
this.createdDttm = createdDttm;
this.updatedDttm = updatedDttm;
this.geometry = geometry;
}
public static Basic from(InferenceResultEntity e) {
return new Basic(
e.getId(),
e.getUuid(),
e.getStage(),
e.getCdProb(),
e.getInput1(),
e.getInput2(),
e.getMapId(),
e.getBeforeClass(),
e.getBeforeProbability(),
e.getAfterClass(),
e.getAfterProbability(),
e.getArea(),
e.getCreatedDttm(),
e.getUpdatedDttm(),
e.getGeometry()
);
}
}
}

View File

@@ -0,0 +1,435 @@
package com.kamco.cd.kamcoback.inference.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.kamco.cd.kamcoback.inference.ShpWriter;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.Serializable;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.geotools.api.data.SimpleFeatureSource;
import org.geotools.api.data.SimpleFeatureStore;
import org.geotools.api.data.Transaction;
import org.geotools.api.feature.simple.SimpleFeature;
import org.geotools.api.feature.simple.SimpleFeatureType;
import org.geotools.api.referencing.crs.CoordinateReferenceSystem;
import org.geotools.data.shapefile.ShapefileDataStore;
import org.geotools.data.shapefile.ShapefileDataStoreFactory;
import org.geotools.feature.DefaultFeatureCollection;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
import org.geotools.geojson.geom.GeometryJSON;
import org.geotools.referencing.CRS;
import org.locationtech.jts.geom.Geometry;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class GeoToolsShpWriter implements ShpWriter {
// DBF 파일 한글 깨짐 방지를 위해 EUC-KR 사용
private static final Charset DBF_CHARSET = Charset.forName("EUC-KR");
// GeoJSON 출력은 UTF-8
private static final Charset GEOJSON_CHARSET = StandardCharsets.UTF_8;
// 좌표계: Korea 2000 / Central Belt 2010
private static final String EPSG_5186 = "EPSG:5186";
/**
* SHP 파일(.shp/.shx/.dbf/.prj)을 생성한다.
* <p>
* - shpBasePath를 기준으로 파일을 생성한다. 예) /Users/kim/export/shp/1_map_2021_2022 → 1_map_2021_2022.shp → 1_map_2021_2022.shx → 1_map_2021_2022.dbf → 1_map_2021_2022.prj
* <p>
* - geometry 타입은 첫 번째 유효 geometry 기준으로 스키마를 생성한다. - 좌표계는 EPSG:5186으로 설정하며, .prj 파일을 직접 생성한다.
*
* @param shpBasePath 확장자를 제외한 SHP 파일 기본 경로
* @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록
*/
@Override
public void writeShp(String shpBasePath, List<InferenceResultShpDto.Basic> rows) {
if (rows == null || rows.isEmpty()) {
return;
}
// SHP는 Geometry.class를 허용하지 않으므로
// 첫 번째 유효 geometry의 "구체 타입"을 기준으로 스키마를 생성한다.
Geometry firstGeom = firstNonNullGeometry(rows);
if (firstGeom == null) {
throw new IllegalArgumentException(
"SHP 생성 실패: geometry가 전부 null 입니다. path=" + shpBasePath);
}
@SuppressWarnings("unchecked")
Class<? extends Geometry> geomType =
(Class<? extends Geometry>) firstGeom.getClass();
ShapefileDataStore dataStore = null;
try {
File shpFile = new File(shpBasePath + ".shp");
createDirectories(shpFile);
// EPSG:5186 CRS 로딩
CoordinateReferenceSystem crs = CRS.decode(EPSG_5186, false);
// FeatureType(schema) 생성
SimpleFeatureType schema = createSchema(geomType, crs);
// ShapefileDataStore 생성
dataStore = createDataStore(shpFile, schema);
// FeatureCollection 생성
DefaultFeatureCollection collection =
buildFeatureCollection(schema, rows);
// 실제 SHP 파일에 feature 쓰기
writeFeatures(dataStore, collection);
// .prj 파일 직접 생성 (EPSG:5186)
writePrjFile(shpBasePath, crs);
log.info(
"SHP 생성 완료: {} ({} features)",
shpFile.getAbsolutePath(),
collection.size()
);
} catch (Exception e) {
throw new RuntimeException("SHP 생성 실패: " + shpBasePath, e);
} finally {
if (dataStore != null) {
try {
dataStore.dispose();
} catch (Exception ignore) {
}
}
}
}
/**
* GeoJSON 파일(.geojson)을 생성한다.
* <p>
* - FeatureCollection 형태로 출력한다. - 최상단에 name / crs / properties를 포함한다. - 각 Feature는 polygon 단위로 생성된다. - geometry는 GeoTools GeometryJSON을 사용하여 직렬화한다.
* <p>
* GeoJSON 구조 예: { "type": "FeatureCollection", "name": "stage_input1_input2_mapId", "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::5186" } },
* "properties": { ... }, "features": [ ... ] }
*
* @param geoJsonPath 생성할 GeoJSON 파일의 전체 경로 (.geojson 포함)
* @param rows 동일 그룹(stage, mapId, input1, input2)의 데이터 목록
*/
@Override
public void writeGeoJson(String geoJsonPath, List<InferenceResultShpDto.Basic> rows) {
if (rows == null || rows.isEmpty()) {
return;
}
try {
File geoJsonFile = new File(geoJsonPath);
createDirectories(geoJsonFile);
// 그룹 공통 메타 정보는 첫 row 기준
InferenceResultShpDto.Basic first = rows.get(0);
ObjectMapper om = new ObjectMapper();
GeometryJSON gj = new GeometryJSON(15);
// FeatureCollection 루트
ObjectNode root = om.createObjectNode();
root.put("type", "FeatureCollection");
// name: stage_input1_input2_mapId
String name = String.format(
"%d_%d_%d_%d",
first.getStage(),
first.getInput1(),
first.getInput2(),
first.getMapId()
);
root.put("name", name);
// CRS (EPSG:5186)
ObjectNode crs = om.createObjectNode();
crs.put("type", "name");
ObjectNode crsProps = om.createObjectNode();
crsProps.put("name", "urn:ogc:def:crs:EPSG::5186");
crs.set("properties", crsProps);
root.set("crs", crs);
// 그룹 공통 properties
ObjectNode groupProps = om.createObjectNode();
groupProps.put("stage", first.getStage());
groupProps.put("input1", first.getInput1());
groupProps.put("input2", first.getInput2());
groupProps.put("map_id", first.getMapId());
// 학습서버 버전은 추후 추가
// groupProps.put("m1", "v1.2222.251223121212");
// groupProps.put("m2", "v2.211.251223121213");
// groupProps.put("m3", "v3.233.251223121214");
root.set("properties", groupProps);
// features 배열
ArrayNode features = om.createArrayNode();
for (InferenceResultShpDto.Basic dto : rows) {
if (dto.getGeometry() == null) {
continue;
}
ObjectNode feature = om.createObjectNode();
feature.put("type", "Feature");
// feature properties
ObjectNode p = om.createObjectNode();
p.put("polygon_id",
dto.getUuid() != null ? dto.getUuid().toString() : null);
if (dto.getCdProb() != null) {
p.put("cd_prob", dto.getCdProb());
}
if (dto.getInput1() != null) {
p.put("input1", dto.getInput1());
}
if (dto.getInput2() != null) {
p.put("input2", dto.getInput2());
}
if (dto.getMapId() != null) {
p.put("map_id", dto.getMapId());
}
if (dto.getArea() != null) {
p.put("area", dto.getArea());
}
p.put("before_c", dto.getBeforeClass());
if (dto.getBeforeProbability() != null) {
p.put("before_p", dto.getBeforeProbability());
}
p.put("after_c", dto.getAfterClass());
if (dto.getAfterProbability() != null) {
p.put("after_p", dto.getAfterProbability());
}
feature.set("properties", p);
// geometry
String geomJson = gj.toString(dto.getGeometry());
JsonNode geomNode = om.readTree(geomJson);
feature.set("geometry", geomNode);
features.add(feature);
}
root.set("features", features);
// 파일 쓰기
try (OutputStreamWriter w =
new OutputStreamWriter(
new FileOutputStream(geoJsonFile),
GEOJSON_CHARSET
)) {
om.writerWithDefaultPrettyPrinter().writeValue(w, root);
}
log.info(
"GeoJSON 생성 완료: {} ({} features)",
geoJsonFile.getAbsolutePath(),
features.size()
);
} catch (Exception e) {
throw new RuntimeException("GeoJSON 생성 실패: " + geoJsonPath, e);
}
}
/**
* rows 목록에서 첫 번째로 발견되는 non-null Geometry를 반환한다.
* <p>
* - SHP 스키마 생성 시 geometry 타입 결정을 위해 사용된다.
*
* @param rows DTO 목록
* @return 첫 번째 non-null Geometry, 없으면 null
*/
private Geometry firstNonNullGeometry(List<InferenceResultShpDto.Basic> rows) {
for (InferenceResultShpDto.Basic r : rows) {
if (r != null && r.getGeometry() != null) {
return r.getGeometry();
}
}
return null;
}
/**
* SHP 파일에 사용할 SimpleFeatureType(schema)를 생성한다.
* <p>
* - geometry 컬럼은 반드시 첫 번째 컬럼이어야 한다. - DBF 컬럼은 SHP 제약(컬럼명 10자, 길이 제한)을 고려한다.
*
* @param geomType geometry의 구체 타입 (Polygon, MultiPolygon 등)
* @param crs 좌표계(EPSG:5186)
* @return SimpleFeatureType
*/
private SimpleFeatureType createSchema(
Class<? extends Geometry> geomType,
CoordinateReferenceSystem crs
) {
SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder();
b.setName("inference_result");
b.setCRS(crs);
// geometry는 반드시 첫 컬럼
b.add("the_geom", geomType);
// DBF 컬럼 정의 (10자 제한 고려)
b.length(36).add("poly_id", String.class);
b.add("cd_prob", Double.class);
b.add("input1", Integer.class);
b.add("input2", Integer.class);
b.add("map_id", Long.class);
b.add("area", Double.class);
b.length(20).add("before_c", String.class);
b.add("before_p", Double.class);
b.length(20).add("after_c", String.class);
b.add("after_p", Double.class);
return b.buildFeatureType();
}
/**
* ShapefileDataStore를 생성하고 스키마를 등록한다.
* <p>
* - DBF 파일 인코딩은 EUC-KR로 설정한다. - spatial index(.qix)를 생성한다.
*
* @param shpFile SHP 파일 객체
* @param schema SimpleFeatureType
* @return 생성된 ShapefileDataStore
*/
private ShapefileDataStore createDataStore(
File shpFile,
SimpleFeatureType schema
) throws Exception {
Map<String, Serializable> params = new HashMap<>();
params.put("url", shpFile.toURI().toURL());
params.put("create spatial index", Boolean.TRUE);
ShapefileDataStoreFactory factory = new ShapefileDataStoreFactory();
ShapefileDataStore dataStore =
(ShapefileDataStore) factory.createNewDataStore(params);
dataStore.setCharset(DBF_CHARSET);
dataStore.createSchema(schema);
return dataStore;
}
/**
* DTO 목록을 SimpleFeatureCollection으로 변환한다.
* <p>
* - DTO 1건당 Feature 1개 생성 - geometry가 null인 데이터는 제외한다.
*
* @param schema FeatureType
* @param rows DTO 목록
* @return DefaultFeatureCollection
*/
private DefaultFeatureCollection buildFeatureCollection(
SimpleFeatureType schema,
List<InferenceResultShpDto.Basic> rows
) {
DefaultFeatureCollection collection = new DefaultFeatureCollection();
SimpleFeatureBuilder builder = new SimpleFeatureBuilder(schema);
for (InferenceResultShpDto.Basic dto : rows) {
if (dto == null || dto.getGeometry() == null) {
continue;
}
builder.add(dto.getGeometry());
builder.add(dto.getUuid() != null ? dto.getUuid().toString() : null);
builder.add(dto.getCdProb() != null ? dto.getCdProb().doubleValue() : null);
builder.add(dto.getInput1());
builder.add(dto.getInput2());
builder.add(dto.getMapId());
builder.add(dto.getArea() != null ? dto.getArea().doubleValue() : null);
builder.add(dto.getBeforeClass());
builder.add(dto.getBeforeProbability() != null
? dto.getBeforeProbability().doubleValue()
: null);
builder.add(dto.getAfterClass());
builder.add(dto.getAfterProbability() != null
? dto.getAfterProbability().doubleValue()
: null);
SimpleFeature feature = builder.buildFeature(null);
collection.add(feature);
builder.reset();
}
return collection;
}
/**
* FeatureCollection을 SHP 파일에 실제로 기록한다.
*
* @param dataStore ShapefileDataStore
* @param collection FeatureCollection
*/
private void writeFeatures(
ShapefileDataStore dataStore,
DefaultFeatureCollection collection
) throws Exception {
String typeName = dataStore.getTypeNames()[0];
SimpleFeatureSource featureSource =
dataStore.getFeatureSource(typeName);
if (!(featureSource instanceof SimpleFeatureStore store)) {
throw new IllegalStateException("FeatureStore 생성 실패");
}
store.setTransaction(Transaction.AUTO_COMMIT);
store.addFeatures(collection);
store.getTransaction().commit();
}
/**
* SHP 좌표계 정보를 담은 .prj 파일을 생성한다.
*
* @param shpBasePath SHP 기본 경로 (확장자 제외)
* @param crs 좌표계(EPSG:5186)
*/
private void writePrjFile(
String shpBasePath,
CoordinateReferenceSystem crs
) throws Exception {
File prjFile = new File(shpBasePath + ".prj");
createDirectories(prjFile);
Files.writeString(
prjFile.toPath(),
crs.toWKT(),
StandardCharsets.UTF_8
);
}
/**
* 파일이 생성될 디렉토리가 없으면 생성한다.
*
* @param file 생성 대상 파일
*/
private void createDirectories(File file) throws Exception {
File parent = file.getParentFile();
if (parent != null) {
Files.createDirectories(parent.toPath());
}
}
}

View File

@@ -0,0 +1,58 @@
package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.inference.ShpWriter;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService.ShpKey;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@RequiredArgsConstructor
@Transactional(readOnly = true)
public class InferenceResultShpService {
private final InferenceResultShpCoreService coreService;
private final ShpWriter shpWriter;
public void saveInferenceResultData() {
coreService.streamGrouped(1000, (key, entities) -> {
// Entity -> DTO
List<InferenceResultShpDto.Basic> dtoList =
entities.stream().map(InferenceResultShpDto.Basic::from).toList();
flushGroup(key, dtoList);
});
}
/**
* 파일명 및 파일 경로
*
* @param key
* @param dtoList
*/
private void flushGroup(ShpKey key, List<InferenceResultShpDto.Basic> dtoList) {
// TODO 경로 정해지면 수정해야함
String baseDir = System.getProperty("user.home") + "/export";
// 파일명 stage_input1_input2_mapId
String baseName = String.format(
"%d_%d_%d_%d",
key.stage(), key.mapId(), key.input1(), key.input2()
);
String shpBasePath = baseDir + "/shp/" + baseName; // 확장자 없이
String geoJsonPath = baseDir + "/geojson/" + baseName + ".geojson";
// shp: .shp/.shx/.dbf 생성
shpWriter.writeShp(shpBasePath, dtoList);
// geojson: .geojson 생성
shpWriter.writeGeoJson(geoJsonPath, dtoList);
}
}

View File

@@ -6,7 +6,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheet;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity; import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository; import com.kamco.cd.kamcoback.postgres.repository.Inference.MapSheetAnalDataRepository;
import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository; import com.kamco.cd.kamcoback.postgres.repository.scene.MapInkx5kRepository;
import jakarta.persistence.EntityNotFoundException; import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
@@ -20,7 +20,7 @@ import org.springframework.transaction.annotation.Transactional;
@RequiredArgsConstructor @RequiredArgsConstructor
public class InferenceResultCoreService { public class InferenceResultCoreService {
private final InferenceResultRepository inferenceResultRepository; private final MapSheetAnalDataRepository mapSheetAnalDataRepository;
private final MapInkx5kRepository mapInkx5kRepository; private final MapInkx5kRepository mapInkx5kRepository;
/** /**
@@ -31,7 +31,7 @@ public class InferenceResultCoreService {
*/ */
public Page<InferenceResultDto.AnalResList> getInferenceResultList( public Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq) { InferenceResultDto.SearchReq searchReq) {
return inferenceResultRepository.getInferenceResultList(searchReq); return mapSheetAnalDataRepository.getInferenceResultList(searchReq);
} }
/** /**
@@ -42,7 +42,7 @@ public class InferenceResultCoreService {
*/ */
public InferenceResultDto.AnalResSummary getInferenceResultSummary(Long id) { public InferenceResultDto.AnalResSummary getInferenceResultSummary(Long id) {
InferenceResultDto.AnalResSummary summary = InferenceResultDto.AnalResSummary summary =
inferenceResultRepository mapSheetAnalDataRepository
.getInferenceResultSummary(id) .getInferenceResultSummary(id)
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id)); .orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
return summary; return summary;
@@ -55,7 +55,7 @@ public class InferenceResultCoreService {
* @return * @return
*/ */
public List<Dashboard> getDashboard(Long id) { public List<Dashboard> getDashboard(Long id) {
return inferenceResultRepository.getDashboard(id); return mapSheetAnalDataRepository.getDashboard(id);
} }
/** /**
@@ -66,7 +66,7 @@ public class InferenceResultCoreService {
*/ */
public Page<InferenceResultDto.Geom> getInferenceResultGeomList( public Page<InferenceResultDto.Geom> getInferenceResultGeomList(
Long id, InferenceResultDto.SearchGeoReq searchGeoReq) { Long id, InferenceResultDto.SearchGeoReq searchGeoReq) {
return inferenceResultRepository.getInferenceGeomList(id, searchGeoReq); return mapSheetAnalDataRepository.getInferenceGeomList(id, searchGeoReq);
} }
/** /**
@@ -80,13 +80,13 @@ public class InferenceResultCoreService {
@NotNull Long analyId, InferenceResultDto.SearchGeoReq searchReq) { @NotNull Long analyId, InferenceResultDto.SearchGeoReq searchReq) {
// 분석 ID 에 해당하는 dataids를 가져온다. // 분석 ID 에 해당하는 dataids를 가져온다.
List<Long> dataIds = List<Long> dataIds =
inferenceResultRepository.listAnalyGeom(analyId).stream() mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataEntity::getId) .mapToLong(MapSheetAnalDataEntity::getId)
.boxed() .boxed()
.toList(); .toList();
// 해당데이터의 폴리곤데이터를 가져온다 // 해당데이터의 폴리곤데이터를 가져온다
Page<MapSheetAnalDataGeomEntity> mapSheetAnalDataGeomEntities = Page<MapSheetAnalDataGeomEntity> mapSheetAnalDataGeomEntities =
inferenceResultRepository.listInferenceResultWithGeom(dataIds, searchReq); mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq);
return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataGeomEntity::toEntity); return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataGeomEntity::toEntity);
} }
@@ -97,13 +97,13 @@ public class InferenceResultCoreService {
* @return * @return
*/ */
public List<Long> getSheets(Long id) { public List<Long> getSheets(Long id) {
return inferenceResultRepository.getSheets(id); return mapSheetAnalDataRepository.getSheets(id);
} }
@Transactional(readOnly = true) @Transactional(readOnly = true)
public List<MapSheet> listGetScenes5k(Long analyId) { public List<MapSheet> listGetScenes5k(Long analyId) {
List<String> sceneCodes = List<String> sceneCodes =
inferenceResultRepository.listAnalyGeom(analyId).stream() mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataEntity::getMapSheetNum) .mapToLong(MapSheetAnalDataEntity::getMapSheetNum)
.mapToObj(String::valueOf) .mapToObj(String::valueOf)
.toList(); .toList();

View File

@@ -0,0 +1,59 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
import java.util.ArrayList;
import java.util.List;
import java.util.function.BiConsumer;
import lombok.RequiredArgsConstructor;
import org.hibernate.ScrollableResults;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@RequiredArgsConstructor
public class InferenceResultShpCoreService {
private final InferenceResultRepository inferenceResultRepository;
public record ShpKey(Integer stage, Long mapId, Integer input1, Integer input2) {
}
/**
* DB를 스트리밍하면서 그룹이 완성될 때마다 handler로 넘김 handler: (key, groupRows)
*/
@Transactional(readOnly = true)
public void streamGrouped(int fetchSize, BiConsumer<ShpKey, List<InferenceResultEntity>> handler) {
ScrollableResults cursor = inferenceResultRepository.scrollAllOrdered(fetchSize);
ShpKey currentKey = null;
List<InferenceResultEntity> buffer = new ArrayList<>(2000);
try {
while (cursor.next()) {
InferenceResultEntity row = (InferenceResultEntity) cursor.get();
ShpKey key = new ShpKey(row.getStage(), row.getMapId(), row.getInput1(), row.getInput2());
// 키 변경 -> 이전 그룹 완료
if (currentKey != null && !currentKey.equals(key)) {
handler.accept(currentKey, buffer);
buffer = new ArrayList<>(2000);
}
currentKey = key;
buffer.add(row);
}
// 마지막 그룹
if (currentKey != null && !buffer.isEmpty()) {
handler.accept(currentKey, buffer);
}
} finally {
cursor.close();
}
}
}

View File

@@ -0,0 +1,79 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
import org.locationtech.jts.geom.Geometry;
@Getter
@Setter
@Entity
@Table(name = "inference_results")
public class InferenceResultEntity {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "uid", nullable = false)
private Long id;
@NotNull
@ColumnDefault("uuid_generate_v4()")
@Column(name = "uuid", nullable = false)
private UUID uuid;
@Column(name = "stage")
private Integer stage;
@Column(name = "cd_prob")
private Float cdProb;
@Column(name = "input1")
private Integer input1;
@Column(name = "input2")
private Integer input2;
@Column(name = "map_id")
private Long mapId;
@Size(max = 20)
@Column(name = "before_class", length = 20)
private String beforeClass;
@Column(name = "before_probability")
private Float beforeProbability;
@Size(max = 20)
@Column(name = "after_class", length = 20)
private String afterClass;
@Column(name = "after_probability")
private Float afterProbability;
@ColumnDefault("st_area(geometry)")
@Column(name = "area")
private Float area;
@NotNull
@ColumnDefault("now()")
@Column(name = "created_dttm", nullable = false)
private ZonedDateTime createdDttm;
@NotNull
@ColumnDefault("now()")
@Column(name = "updated_dttm", nullable = false)
private ZonedDateTime updatedDttm;
@Column(name = "geometry", columnDefinition = "geometry not null")
private Geometry geometry;
}

View File

@@ -0,0 +1,164 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.ZonedDateTime;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@Getter
@Setter
@Entity
@Table(name = "tb_map_sheet_anal_data_inference")
public class MapSheetAnalDataInferenceEntity {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "data_uid", nullable = false)
private Long id;
@Size(max = 128)
@Column(name = "data_name", length = 128)
private String dataName;
@Size(max = 255)
@Column(name = "data_path")
private String dataPath;
@Size(max = 128)
@Column(name = "data_type", length = 128)
private String dataType;
@Size(max = 128)
@Column(name = "data_crs_type", length = 128)
private String dataCrsType;
@Size(max = 255)
@Column(name = "data_crs_type_name")
private String dataCrsTypeName;
@ColumnDefault("now()")
@Column(name = "created_dttm")
private ZonedDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
@ColumnDefault("now()")
@Column(name = "updated_dttm")
private ZonedDateTime updatedDttm;
@Column(name = "updated_uid")
private Long updatedUid;
@Column(name = "compare_yyyy")
private Integer compareYyyy;
@Column(name = "target_yyyy")
private Integer targetYyyy;
@Column(name = "data_json", length = Integer.MAX_VALUE)
private String dataJson;
@Size(max = 20)
@ColumnDefault("'0'")
@Column(name = "data_state", length = 20)
private String dataState;
@ColumnDefault("now()")
@Column(name = "data_state_dttm")
private ZonedDateTime dataStateDttm;
@Column(name = "anal_strt_dttm")
private ZonedDateTime analStrtDttm;
@Column(name = "anal_end_dttm")
private ZonedDateTime analEndDttm;
@ColumnDefault("0")
@Column(name = "anal_sec")
private Long analSec;
@Size(max = 20)
@Column(name = "anal_state", length = 20)
private String analState;
@Column(name = "anal_uid")
private Long analUid;
@Column(name = "map_sheet_num")
private Long mapSheetNum;
@ColumnDefault("0")
@Column(name = "detecting_cnt")
private Long detectingCnt;
@ColumnDefault("0")
@Column(name = "pnu")
private Long pnu;
@Size(max = 20)
@Column(name = "down_state", length = 20)
private String downState;
@Column(name = "down_state_dttm")
private ZonedDateTime downStateDttm;
@Size(max = 20)
@Column(name = "fit_state", length = 20)
private String fitState;
@Column(name = "fit_state_dttm")
private ZonedDateTime fitStateDttm;
@Column(name = "labeler_uid")
private Long labelerUid;
@Size(max = 20)
@ColumnDefault("NULL")
@Column(name = "label_state", length = 20)
private String labelState;
@Column(name = "label_state_dttm")
private ZonedDateTime labelStateDttm;
@Column(name = "tester_uid")
private Long testerUid;
@Size(max = 20)
@Column(name = "test_state", length = 20)
private String testState;
@Column(name = "test_state_dttm")
private ZonedDateTime testStateDttm;
@Column(name = "fit_state_cmmnt", length = Integer.MAX_VALUE)
private String fitStateCmmnt;
@Column(name = "ref_map_sheet_num")
private Long refMapSheetNum;
@Column(name = "stage")
private Integer stage;
@Column(name = "file_created_yn")
private Boolean fileCreatedYn;
@Size(max = 100)
@Column(name = "m1", length = 100)
private String m1;
@Size(max = 100)
@Column(name = "m2", length = 100)
private String m2;
@Size(max = 100)
@Column(name = "m3", length = 100)
private String m3;
}

View File

@@ -0,0 +1,150 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Size;
import java.time.OffsetDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
import org.locationtech.jts.geom.Geometry;
@Getter
@Setter
@Entity
@Table(name = "tb_map_sheet_anal_data_inference_geom")
public class MapSheetAnalDataInferenceGeomEntity {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "geo_uid")
private Long geoUid;
@Column(name = "cd_prob")
private Float cdProb;
@Size(max = 40)
@Column(name = "class_before_cd", length = 40)
private String classBeforeCd;
@Column(name = "class_before_prob")
private Float classBeforeProb;
@Size(max = 40)
@Column(name = "class_after_cd", length = 40)
private String classAfterCd;
@Column(name = "class_after_prob")
private Float classAfterProb;
@Column(name = "map_sheet_num")
private Long mapSheetNum;
@Column(name = "compare_yyyy")
private Integer compareYyyy;
@Column(name = "target_yyyy")
private Integer targetYyyy;
@Column(name = "area")
private Float area;
@Size(max = 100)
@Column(name = "geo_type", length = 100)
private String geoType;
@Column(name = "data_uid")
private Long dataUid;
@ColumnDefault("now()")
@Column(name = "created_dttm")
private OffsetDateTime createdDttm;
@Column(name = "created_uid")
private Long createdUid;
@ColumnDefault("now()")
@Column(name = "updated_dttm")
private OffsetDateTime updatedDttm;
@Column(name = "updated_uid")
private Long updatedUid;
@ColumnDefault("0")
@Column(name = "geom_cnt")
private Long geomCnt;
@ColumnDefault("0")
@Column(name = "pnu")
private Long pnu;
@Size(max = 20)
@ColumnDefault("'0'")
@Column(name = "fit_state", length = 20)
private String fitState;
@ColumnDefault("now()")
@Column(name = "fit_state_dttm")
private OffsetDateTime fitStateDttm;
@Column(name = "labeler_uid")
private Long labelerUid;
@Size(max = 20)
@ColumnDefault("'0'")
@Column(name = "label_state", length = 20)
private String labelState;
@ColumnDefault("now()")
@Column(name = "label_state_dttm")
private OffsetDateTime labelStateDttm;
@Column(name = "tester_uid")
private Long testerUid;
@Size(max = 20)
@ColumnDefault("'0'")
@Column(name = "test_state", length = 20)
private String testState;
@ColumnDefault("now()")
@Column(name = "test_state_dttm")
private OffsetDateTime testStateDttm;
@Column(name = "fit_state_cmmnt", length = Integer.MAX_VALUE)
private String fitStateCmmnt;
@Column(name = "ref_map_sheet_num")
private Long refMapSheetNum;
@ColumnDefault("uuid_generate_v4()")
@Column(name = "uuid")
private UUID uuid;
@Column(name = "stage")
private Integer stage;
@Column(name = "map_5k_id")
private Long map5kId;
@Column(name = "file_created_yn")
private Boolean fileCreatedYn;
@Column(name = "geom", columnDefinition = "geometry")
private Geometry geom;
@Column(name = "geom_center", columnDefinition = "geometry")
private Geometry geomCenter;
@Column(name = "before_geom", columnDefinition = "geometry")
private Geometry beforeGeom;
}

View File

@@ -1,7 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference; package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity; import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
public interface InferenceResultRepository public interface InferenceResultRepository
extends JpaRepository<MapSheetAnalEntity, Long>, InferenceResultRepositoryCustom {} extends JpaRepository<InferenceResultEntity, Long>, InferenceResultRepositoryCustom {}

View File

@@ -1,31 +1,9 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference; package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; import org.hibernate.ScrollableResults;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import jakarta.validation.constraints.NotNull;
import java.util.List;
import java.util.Optional;
import org.springframework.data.domain.Page;
public interface InferenceResultRepositoryCustom { public interface InferenceResultRepositoryCustom {
Page<InferenceResultDto.AnalResList> getInferenceResultList( ScrollableResults scrollAllOrdered(int fetchSize);
InferenceResultDto.SearchReq searchReq);
Optional<InferenceResultDto.AnalResSummary> getInferenceResultSummary(Long id);
Page<InferenceResultDto.Geom> getInferenceGeomList(
Long id, InferenceResultDto.SearchGeoReq searchGeoReq);
Page<MapSheetAnalDataGeomEntity> listInferenceResultWithGeom(
List<Long> dataIds, SearchGeoReq searchReq);
List<Long> getSheets(Long id);
List<Dashboard> getDashboard(Long id);
List<MapSheetAnalDataEntity> listAnalyGeom(@NotNull Long id);
} }

View File

@@ -1,27 +1,13 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference; package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; import com.kamco.cd.kamcoback.postgres.entity.InferenceResultEntity;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard; import com.kamco.cd.kamcoback.postgres.entity.QInferenceResultEntity;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.*;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Order;
import com.querydsl.core.types.OrderSpecifier;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.JPQLQuery;
import com.querydsl.jpa.impl.JPAQuery; import com.querydsl.jpa.impl.JPAQuery;
import com.querydsl.jpa.impl.JPAQueryFactory; import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page; import org.hibernate.ScrollMode;
import org.springframework.data.domain.PageImpl; import org.hibernate.ScrollableResults;
import org.springframework.data.domain.Pageable; import org.hibernate.query.Query;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Repository; import org.springframework.stereotype.Repository;
@Repository @Repository
@@ -29,335 +15,27 @@ import org.springframework.stereotype.Repository;
public class InferenceResultRepositoryImpl implements InferenceResultRepositoryCustom { public class InferenceResultRepositoryImpl implements InferenceResultRepositoryCustom {
private final JPAQueryFactory queryFactory; private final JPAQueryFactory queryFactory;
private final QModelMngBakEntity tmm = QModelMngBakEntity.modelMngBakEntity;
private final QModelVerEntity tmv = QModelVerEntity.modelVerEntity;
private final QMapSheetAnalEntity mapSheetAnalEntity = QMapSheetAnalEntity.mapSheetAnalEntity;
private final QMapSheetAnalDataEntity mapSheetAnalDataEntity =
QMapSheetAnalDataEntity.mapSheetAnalDataEntity;
private final QMapSheetAnalDataGeomEntity mapSheetAnalDataGeomEntity =
QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
private final QMapSheetAnalSttcEntity mapSheetAnalSttcEntity =
QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
/** public ScrollableResults scrollAllOrdered(int fetchSize) {
* 분석결과 목록 조회 QInferenceResultEntity e = QInferenceResultEntity.inferenceResultEntity;
*
* @param searchReq
* @return
*/
@Override
public Page<InferenceResultDto.AnalResList> getInferenceResultList(
InferenceResultDto.SearchReq searchReq) {
Pageable pageable = searchReq.toPageable();
// "0000" 전체조회
BooleanBuilder builder = new BooleanBuilder();
if (searchReq.getStatCode() != null && !"0000".equals(searchReq.getStatCode())) {
builder.and(mapSheetAnalEntity.analState.eq(searchReq.getStatCode()));
}
// 제목 JPAQuery<InferenceResultEntity> q =
if (searchReq.getTitle() != null) {
builder.and(mapSheetAnalEntity.analTitle.like("%" + searchReq.getTitle() + "%"));
}
List<InferenceResultDto.AnalResList> content =
queryFactory queryFactory
.select( .selectFrom(e)
Projections.constructor( .orderBy(
InferenceResultDto.AnalResList.class, e.stage.asc(),
mapSheetAnalEntity.id, e.mapId.asc(),
mapSheetAnalEntity.analTitle, e.input1.asc(),
mapSheetAnalEntity.analMapSheet, e.input2.asc(),
mapSheetAnalEntity.detectingCnt, e.id.asc()
mapSheetAnalEntity.analStrtDttm, );
mapSheetAnalEntity.analEndDttm,
mapSheetAnalEntity.analSec,
mapSheetAnalEntity.analPredSec,
mapSheetAnalEntity.analState,
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState),
mapSheetAnalEntity.gukyuinUsed))
.from(mapSheetAnalEntity)
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(mapSheetAnalEntity.id.desc())
.fetch();
long total = // QueryDSL -> Hibernate Query로 unwrap 해서 커서 스트리밍
queryFactory Query<?> hQuery = q.createQuery().unwrap(Query.class);
.select(mapSheetAnalEntity.id)
.from(mapSheetAnalEntity)
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total); return hQuery
} .setReadOnly(true)
.setFetchSize(fetchSize) // PostgreSQL 커서/스트리밍에 영향
/** .scroll(ScrollMode.FORWARD_ONLY);
* 분석결과 요약정보
*
* @param id
* @return
*/
@Override
public Optional<InferenceResultDto.AnalResSummary> getInferenceResultSummary(Long id) {
// 1. 최신 버전 UID를 가져오는 서브쿼리
JPQLQuery<Long> latestVerUidSub =
JPAExpressions.select(tmv.id.max()).from(tmv).where(tmv.modelUid.eq(tmm.id));
Optional<InferenceResultDto.AnalResSummary> content =
Optional.ofNullable(
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResSummary.class,
mapSheetAnalEntity.id,
mapSheetAnalEntity.analTitle,
tmm.modelNm.concat(" ").concat(tmv.modelVer).as("modelInfo"),
mapSheetAnalEntity.targetYyyy,
mapSheetAnalEntity.compareYyyy,
mapSheetAnalEntity.analMapSheet,
mapSheetAnalEntity.analStrtDttm,
mapSheetAnalEntity.analEndDttm,
mapSheetAnalEntity.analSec,
mapSheetAnalEntity.analPredSec,
mapSheetAnalEntity.resultUrl,
mapSheetAnalEntity.detectingCnt,
mapSheetAnalEntity.accuracy,
mapSheetAnalEntity.analState,
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState)))
.from(mapSheetAnalEntity)
.leftJoin(tmm)
.on(mapSheetAnalEntity.modelUid.eq(tmm.id))
.leftJoin(tmv)
.on(tmv.modelUid.eq(tmm.id).and(tmv.id.eq(latestVerUidSub)))
.where(mapSheetAnalEntity.id.eq(id))
.fetchOne());
return content;
}
/**
* 분석결과 상세 class name별 탐지 개수
*
* @param id
* @return
*/
@Override
public List<Dashboard> getDashboard(Long id) {
return queryFactory
.select(
Projections.constructor(
Dashboard.class,
mapSheetAnalSttcEntity.id.classAfterCd,
mapSheetAnalSttcEntity.classAfterCnt.sum()))
.from(mapSheetAnalSttcEntity)
.where(mapSheetAnalSttcEntity.id.analUid.eq(id))
.groupBy(mapSheetAnalSttcEntity.id.classAfterCd)
.orderBy(mapSheetAnalSttcEntity.id.classAfterCd.asc())
.fetch();
}
@Override
public List<MapSheetAnalDataEntity> listAnalyGeom(Long id) {
QMapSheetAnalDataEntity analy = QMapSheetAnalDataEntity.mapSheetAnalDataEntity;
return queryFactory.selectFrom(analy).where(analy.analUid.eq(id)).fetch();
}
/**
* 분석결과 상세 목록
*
* @param searchReq
* @return
*/
@Override
public Page<MapSheetAnalDataGeomEntity> listInferenceResultWithGeom(
List<Long> ids, SearchGeoReq searchReq) {
// 분석 차수
QMapSheetAnalDataGeomEntity detectedEntity =
QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
Pageable pageable = searchReq.toPageable();
// 검색조건
JPAQuery<MapSheetAnalDataGeomEntity> query =
queryFactory
.selectFrom(detectedEntity)
.where(
detectedEntity.dataUid.in(ids),
eqTargetClass(detectedEntity, searchReq.getTargetClass()),
eqCompareClass(detectedEntity, searchReq.getCompareClass()),
containsMapSheetNum(detectedEntity, searchReq.getMapSheetNum()));
// count
long total = query.fetchCount();
// Pageable에서 정렬 가져오기, 없으면 기본 정렬(createdDttm desc) 사용
List<OrderSpecifier<?>> orders = getOrderSpecifiers(pageable.getSort());
if (orders.isEmpty()) {
orders.add(detectedEntity.createdDttm.desc());
}
List<MapSheetAnalDataGeomEntity> content =
query
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(orders.toArray(new OrderSpecifier[0]))
.fetch();
return new PageImpl<>(content, pageable, total);
}
/**
* 분석결과 상세 목록
*
* @param searchGeoReq
* @return
*/
@Override
public Page<InferenceResultDto.Geom> getInferenceGeomList(Long id, SearchGeoReq searchGeoReq) {
Pageable pageable = searchGeoReq.toPageable();
BooleanBuilder builder = new BooleanBuilder();
// 추론결과 id
builder.and(mapSheetAnalEntity.id.eq(id));
// 기준년도 분류
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) {
builder.and(
mapSheetAnalDataGeomEntity
.classAfterCd
.toLowerCase()
.eq(searchGeoReq.getTargetClass().toLowerCase()));
}
// 비교년도 분류
if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")) {
builder.and(
mapSheetAnalDataGeomEntity
.classBeforeCd
.toLowerCase()
.eq(searchGeoReq.getCompareClass().toLowerCase()));
}
// 분석도엽
if (searchGeoReq.getMapSheetNum() != null && !searchGeoReq.getMapSheetNum().isEmpty()) {
List<Long> mapSheetNum = searchGeoReq.getMapSheetNum();
builder.and(mapSheetAnalDataGeomEntity.mapSheetNum.in(mapSheetNum));
}
List<InferenceResultDto.Geom> content =
queryFactory
.select(
Projections.constructor(
InferenceResultDto.Geom.class,
mapSheetAnalDataGeomEntity.compareYyyy,
mapSheetAnalDataGeomEntity.targetYyyy,
mapSheetAnalDataGeomEntity.classBeforeCd,
mapSheetAnalDataGeomEntity.classBeforeProb,
mapSheetAnalDataGeomEntity.classAfterCd,
mapSheetAnalDataGeomEntity.classAfterProb,
mapSheetAnalDataGeomEntity.mapSheetNum,
mapSheetAnalDataGeomEntity.geom,
mapSheetAnalDataGeomEntity.geomCenter))
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.join(mapSheetAnalDataGeomEntity)
.on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id))
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.fetch();
long total =
queryFactory
.select(mapSheetAnalDataGeomEntity.id)
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.join(mapSheetAnalDataGeomEntity)
.on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id))
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total);
}
/**
* 추론된 5000:1 도엽 목록
*
* @param id
* @return
*/
@Override
public List<Long> getSheets(Long id) {
return queryFactory
.select(mapSheetAnalDataEntity.mapSheetNum)
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.where(mapSheetAnalEntity.id.eq(id))
.groupBy(mapSheetAnalDataEntity.mapSheetNum)
.fetch();
}
/** Pageable의 Sort를 QueryDSL OrderSpecifier로 변환 */
@SuppressWarnings({"unchecked", "rawtypes"})
private List<OrderSpecifier<?>> getOrderSpecifiers(Sort sort) {
List<OrderSpecifier<?>> orders = new ArrayList<>();
if (sort.isSorted()) {
QMapSheetAnalDataGeomEntity entity = QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
for (Sort.Order order : sort) {
Order direction = order.isAscending() ? Order.ASC : Order.DESC;
String property = order.getProperty();
// 유효한 필드만 처리
switch (property) {
case "classBeforeCd" -> orders.add(new OrderSpecifier(direction, entity.classBeforeCd));
case "classBeforeProb" ->
orders.add(new OrderSpecifier(direction, entity.classBeforeProb));
case "classAfterCd" -> orders.add(new OrderSpecifier(direction, entity.classAfterCd));
case "classAfterProb" -> orders.add(new OrderSpecifier(direction, entity.classAfterProb));
case "mapSheetNum" -> orders.add(new OrderSpecifier(direction, entity.mapSheetNum));
case "compareYyyy" -> orders.add(new OrderSpecifier(direction, entity.compareYyyy));
case "targetYyyy" -> orders.add(new OrderSpecifier(direction, entity.targetYyyy));
case "area" -> orders.add(new OrderSpecifier(direction, entity.area));
case "createdDttm" -> orders.add(new OrderSpecifier(direction, entity.createdDttm));
case "updatedDttm" -> orders.add(new OrderSpecifier(direction, entity.updatedDttm));
// 유효하지 않은 필드는 무시
default -> {}
}
}
}
return orders;
}
private BooleanExpression eqTargetClass(
QMapSheetAnalDataGeomEntity detectedEntity, String targetClass) {
return targetClass != null && !targetClass.isEmpty()
? detectedEntity.classAfterCd.toLowerCase().eq(targetClass.toLowerCase())
: null;
}
private BooleanExpression eqCompareClass(
QMapSheetAnalDataGeomEntity detectedEntity, String compareClass) {
return compareClass != null && !compareClass.isEmpty()
? detectedEntity.classBeforeCd.toLowerCase().eq(compareClass.toLowerCase())
: null;
}
private BooleanExpression containsMapSheetNum(
QMapSheetAnalDataGeomEntity detectedEntity, List<Long> mapSheet) {
if (mapSheet == null || mapSheet.isEmpty()) {
return null;
}
return detectedEntity.mapSheetNum.in(mapSheet);
} }
} }

View File

@@ -0,0 +1,8 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface MapSheetAnalDataInferenceGeomRepository
extends JpaRepository<MapSheetAnalDataInferenceGeomEntity, Long>,
MapSheetAnalDataInferenceGeomRepositoryCustom {}

View File

@@ -0,0 +1,3 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
public interface MapSheetAnalDataInferenceGeomRepositoryCustom {}

View File

@@ -0,0 +1,11 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class MapSheetAnalDataInferenceGeomRepositoryImpl
implements MapSheetAnalDataInferenceGeomRepositoryCustom {
}

View File

@@ -0,0 +1,10 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface MapSheetAnalDataInferenceRepository
extends JpaRepository<MapSheetAnalDataInferenceEntity, Long>,
MapSheetAnalDataInferenceRepositoryCustom {
}

View File

@@ -0,0 +1,3 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
public interface MapSheetAnalDataInferenceRepositoryCustom {}

View File

@@ -0,0 +1,13 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.querydsl.jpa.impl.JPAQueryFactory;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class MapSheetAnalDataInferenceRepositoryImpl
implements MapSheetAnalDataInferenceRepositoryCustom {
private final JPAQueryFactory queryFactory;
}

View File

@@ -0,0 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface MapSheetAnalDataRepository
extends JpaRepository<MapSheetAnalEntity, Long>, MapSheetAnalDataRepositoryCustom {}

View File

@@ -0,0 +1,32 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResSummary;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import jakarta.validation.constraints.NotNull;
import java.util.List;
import java.util.Optional;
import org.springframework.data.domain.Page;
public interface MapSheetAnalDataRepositoryCustom {
Page<AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq);
Optional<AnalResSummary> getInferenceResultSummary(Long id);
Page<InferenceResultDto.Geom> getInferenceGeomList(
Long id, InferenceResultDto.SearchGeoReq searchGeoReq);
Page<MapSheetAnalDataGeomEntity> listInferenceResultWithGeom(
List<Long> dataIds, SearchGeoReq searchReq);
List<Long> getSheets(Long id);
List<Dashboard> getDashboard(Long id);
List<MapSheetAnalDataEntity> listAnalyGeom(@NotNull Long id);
}

View File

@@ -0,0 +1,371 @@
package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.AnalResSummary;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SearchGeoReq;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalSttcEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelMngBakEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelVerEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Order;
import com.querydsl.core.types.OrderSpecifier;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.JPQLQuery;
import com.querydsl.jpa.impl.JPAQuery;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Repository;
@Repository
@RequiredArgsConstructor
public class MapSheetAnalDataRepositoryImpl implements MapSheetAnalDataRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final QModelMngBakEntity tmm = QModelMngBakEntity.modelMngBakEntity;
private final QModelVerEntity tmv = QModelVerEntity.modelVerEntity;
private final QMapSheetAnalEntity mapSheetAnalEntity = QMapSheetAnalEntity.mapSheetAnalEntity;
private final QMapSheetAnalDataEntity mapSheetAnalDataEntity =
QMapSheetAnalDataEntity.mapSheetAnalDataEntity;
private final QMapSheetAnalDataGeomEntity mapSheetAnalDataGeomEntity =
QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
private final QMapSheetAnalSttcEntity mapSheetAnalSttcEntity =
QMapSheetAnalSttcEntity.mapSheetAnalSttcEntity;
/**
* 분석결과 목록 조회
*
* @param searchReq
* @return
*/
@Override
public Page<AnalResList> getInferenceResultList(InferenceResultDto.SearchReq searchReq) {
Pageable pageable = searchReq.toPageable();
// "0000" 전체조회
BooleanBuilder builder = new BooleanBuilder();
if (searchReq.getStatCode() != null && !"0000".equals(searchReq.getStatCode())) {
builder.and(mapSheetAnalEntity.analState.eq(searchReq.getStatCode()));
}
// 제목
if (searchReq.getTitle() != null) {
builder.and(mapSheetAnalEntity.analTitle.like("%" + searchReq.getTitle() + "%"));
}
List<AnalResList> content =
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResList.class,
mapSheetAnalEntity.id,
mapSheetAnalEntity.analTitle,
mapSheetAnalEntity.analMapSheet,
mapSheetAnalEntity.detectingCnt,
mapSheetAnalEntity.analStrtDttm,
mapSheetAnalEntity.analEndDttm,
mapSheetAnalEntity.analSec,
mapSheetAnalEntity.analPredSec,
mapSheetAnalEntity.analState,
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState),
mapSheetAnalEntity.gukyuinUsed))
.from(mapSheetAnalEntity)
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(mapSheetAnalEntity.id.desc())
.fetch();
long total =
queryFactory
.select(mapSheetAnalEntity.id)
.from(mapSheetAnalEntity)
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total);
}
/**
* 분석결과 요약정보
*
* @param id
* @return
*/
@Override
public Optional<AnalResSummary> getInferenceResultSummary(Long id) {
// 1. 최신 버전 UID를 가져오는 서브쿼리
JPQLQuery<Long> latestVerUidSub =
JPAExpressions.select(tmv.id.max()).from(tmv).where(tmv.modelUid.eq(tmm.id));
Optional<InferenceResultDto.AnalResSummary> content =
Optional.ofNullable(
queryFactory
.select(
Projections.constructor(
InferenceResultDto.AnalResSummary.class,
mapSheetAnalEntity.id,
mapSheetAnalEntity.analTitle,
tmm.modelNm.concat(" ").concat(tmv.modelVer).as("modelInfo"),
mapSheetAnalEntity.targetYyyy,
mapSheetAnalEntity.compareYyyy,
mapSheetAnalEntity.analMapSheet,
mapSheetAnalEntity.analStrtDttm,
mapSheetAnalEntity.analEndDttm,
mapSheetAnalEntity.analSec,
mapSheetAnalEntity.analPredSec,
mapSheetAnalEntity.resultUrl,
mapSheetAnalEntity.detectingCnt,
mapSheetAnalEntity.accuracy,
mapSheetAnalEntity.analState,
Expressions.stringTemplate(
"fn_code_name({0}, {1})", "0002", mapSheetAnalEntity.analState)))
.from(mapSheetAnalEntity)
.leftJoin(tmm)
.on(mapSheetAnalEntity.modelUid.eq(tmm.id))
.leftJoin(tmv)
.on(tmv.modelUid.eq(tmm.id).and(tmv.id.eq(latestVerUidSub)))
.where(mapSheetAnalEntity.id.eq(id))
.fetchOne());
return content;
}
/**
* 분석결과 상세 class name별 탐지 개수
*
* @param id
* @return
*/
@Override
public List<Dashboard> getDashboard(Long id) {
return queryFactory
.select(
Projections.constructor(
Dashboard.class,
mapSheetAnalSttcEntity.id.classAfterCd,
mapSheetAnalSttcEntity.classAfterCnt.sum()))
.from(mapSheetAnalSttcEntity)
.where(mapSheetAnalSttcEntity.id.analUid.eq(id))
.groupBy(mapSheetAnalSttcEntity.id.classAfterCd)
.orderBy(mapSheetAnalSttcEntity.id.classAfterCd.asc())
.fetch();
}
@Override
public List<MapSheetAnalDataEntity> listAnalyGeom(Long id) {
QMapSheetAnalDataEntity analy = QMapSheetAnalDataEntity.mapSheetAnalDataEntity;
return queryFactory.selectFrom(analy).where(analy.analUid.eq(id)).fetch();
}
/**
* 분석결과 상세 목록
*
* @param searchReq
* @return
*/
@Override
public Page<MapSheetAnalDataGeomEntity> listInferenceResultWithGeom(
List<Long> ids, SearchGeoReq searchReq) {
// 분석 차수
QMapSheetAnalDataGeomEntity detectedEntity =
QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
Pageable pageable = searchReq.toPageable();
// 검색조건
JPAQuery<MapSheetAnalDataGeomEntity> query =
queryFactory
.selectFrom(detectedEntity)
.where(
detectedEntity.dataUid.in(ids),
eqTargetClass(detectedEntity, searchReq.getTargetClass()),
eqCompareClass(detectedEntity, searchReq.getCompareClass()),
containsMapSheetNum(detectedEntity, searchReq.getMapSheetNum()));
// count
long total = query.fetchCount();
// Pageable에서 정렬 가져오기, 없으면 기본 정렬(createdDttm desc) 사용
List<OrderSpecifier<?>> orders = getOrderSpecifiers(pageable.getSort());
if (orders.isEmpty()) {
orders.add(detectedEntity.createdDttm.desc());
}
List<MapSheetAnalDataGeomEntity> content =
query
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.orderBy(orders.toArray(new OrderSpecifier[0]))
.fetch();
return new PageImpl<>(content, pageable, total);
}
/**
* 분석결과 상세 목록
*
* @param searchGeoReq
* @return
*/
@Override
public Page<InferenceResultDto.Geom> getInferenceGeomList(Long id, SearchGeoReq searchGeoReq) {
Pageable pageable = searchGeoReq.toPageable();
BooleanBuilder builder = new BooleanBuilder();
// 추론결과 id
builder.and(mapSheetAnalEntity.id.eq(id));
// 기준년도 분류
if (searchGeoReq.getTargetClass() != null && !searchGeoReq.getTargetClass().equals("")) {
builder.and(
mapSheetAnalDataGeomEntity
.classAfterCd
.toLowerCase()
.eq(searchGeoReq.getTargetClass().toLowerCase()));
}
// 비교년도 분류
if (searchGeoReq.getCompareClass() != null && !searchGeoReq.getCompareClass().equals("")) {
builder.and(
mapSheetAnalDataGeomEntity
.classBeforeCd
.toLowerCase()
.eq(searchGeoReq.getCompareClass().toLowerCase()));
}
// 분석도엽
if (searchGeoReq.getMapSheetNum() != null && !searchGeoReq.getMapSheetNum().isEmpty()) {
List<Long> mapSheetNum = searchGeoReq.getMapSheetNum();
builder.and(mapSheetAnalDataGeomEntity.mapSheetNum.in(mapSheetNum));
}
List<InferenceResultDto.Geom> content =
queryFactory
.select(
Projections.constructor(
InferenceResultDto.Geom.class,
mapSheetAnalDataGeomEntity.compareYyyy,
mapSheetAnalDataGeomEntity.targetYyyy,
mapSheetAnalDataGeomEntity.classBeforeCd,
mapSheetAnalDataGeomEntity.classBeforeProb,
mapSheetAnalDataGeomEntity.classAfterCd,
mapSheetAnalDataGeomEntity.classAfterProb,
mapSheetAnalDataGeomEntity.mapSheetNum,
mapSheetAnalDataGeomEntity.geom,
mapSheetAnalDataGeomEntity.geomCenter))
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.join(mapSheetAnalDataGeomEntity)
.on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id))
.where(builder)
.offset(pageable.getOffset())
.limit(pageable.getPageSize())
.fetch();
long total =
queryFactory
.select(mapSheetAnalDataGeomEntity.id)
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.join(mapSheetAnalDataGeomEntity)
.on(mapSheetAnalDataGeomEntity.dataUid.eq(mapSheetAnalDataEntity.id))
.where(builder)
.fetchCount();
return new PageImpl<>(content, pageable, total);
}
/**
* 추론된 5000:1 도엽 목록
*
* @param id
* @return
*/
@Override
public List<Long> getSheets(Long id) {
return queryFactory
.select(mapSheetAnalDataEntity.mapSheetNum)
.from(mapSheetAnalEntity)
.join(mapSheetAnalDataEntity)
.on(mapSheetAnalDataEntity.analUid.eq(mapSheetAnalEntity.id))
.where(mapSheetAnalEntity.id.eq(id))
.groupBy(mapSheetAnalDataEntity.mapSheetNum)
.fetch();
}
/** Pageable의 Sort를 QueryDSL OrderSpecifier로 변환 */
@SuppressWarnings({"unchecked", "rawtypes"})
private List<OrderSpecifier<?>> getOrderSpecifiers(Sort sort) {
List<OrderSpecifier<?>> orders = new ArrayList<>();
if (sort.isSorted()) {
QMapSheetAnalDataGeomEntity entity = QMapSheetAnalDataGeomEntity.mapSheetAnalDataGeomEntity;
for (Sort.Order order : sort) {
Order direction = order.isAscending() ? Order.ASC : Order.DESC;
String property = order.getProperty();
// 유효한 필드만 처리
switch (property) {
case "classBeforeCd" -> orders.add(new OrderSpecifier(direction, entity.classBeforeCd));
case "classBeforeProb" ->
orders.add(new OrderSpecifier(direction, entity.classBeforeProb));
case "classAfterCd" -> orders.add(new OrderSpecifier(direction, entity.classAfterCd));
case "classAfterProb" -> orders.add(new OrderSpecifier(direction, entity.classAfterProb));
case "mapSheetNum" -> orders.add(new OrderSpecifier(direction, entity.mapSheetNum));
case "compareYyyy" -> orders.add(new OrderSpecifier(direction, entity.compareYyyy));
case "targetYyyy" -> orders.add(new OrderSpecifier(direction, entity.targetYyyy));
case "area" -> orders.add(new OrderSpecifier(direction, entity.area));
case "createdDttm" -> orders.add(new OrderSpecifier(direction, entity.createdDttm));
case "updatedDttm" -> orders.add(new OrderSpecifier(direction, entity.updatedDttm));
// 유효하지 않은 필드는 무시
default -> {}
}
}
}
return orders;
}
private BooleanExpression eqTargetClass(
QMapSheetAnalDataGeomEntity detectedEntity, String targetClass) {
return targetClass != null && !targetClass.isEmpty()
? detectedEntity.classAfterCd.toLowerCase().eq(targetClass.toLowerCase())
: null;
}
private BooleanExpression eqCompareClass(
QMapSheetAnalDataGeomEntity detectedEntity, String compareClass) {
return compareClass != null && !compareClass.isEmpty()
? detectedEntity.classBeforeCd.toLowerCase().eq(compareClass.toLowerCase())
: null;
}
private BooleanExpression containsMapSheetNum(
QMapSheetAnalDataGeomEntity detectedEntity, List<Long> mapSheet) {
if (mapSheet == null || mapSheet.isEmpty()) {
return null;
}
return detectedEntity.mapSheetNum.in(mapSheet);
}
}