Merge remote-tracking branch 'origin/feat/infer_dev_260107' into feat/infer_dev_260107
This commit is contained in:
@@ -37,10 +37,10 @@ public class InferenceResultShpApiController {
|
|||||||
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
|
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
|
||||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||||
})
|
})
|
||||||
@PostMapping("/save/{id}")
|
@PostMapping("/save/{learnId}")
|
||||||
public ApiResponseDto<InferenceResultShpDto.InferenceCntDto> saveInferenceData(
|
public ApiResponseDto<InferenceResultShpDto.InferenceCntDto> saveInferenceData(
|
||||||
@PathVariable Long id) {
|
@PathVariable Long learnId) {
|
||||||
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData(id));
|
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData(learnId));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Operation(summary = "shp 파일 생성", description = "shp 파일 생성")
|
@Operation(summary = "shp 파일 생성", description = "shp 파일 생성")
|
||||||
@@ -56,8 +56,9 @@ public class InferenceResultShpApiController {
|
|||||||
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
|
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
|
||||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||||
})
|
})
|
||||||
@PostMapping("/create")
|
@PostMapping("/create/{learnId}")
|
||||||
public ApiResponseDto<InferenceResultShpDto.FileCntDto> createShpFile() {
|
public ApiResponseDto<InferenceResultShpDto.FileCntDto> createShpFile(
|
||||||
return ApiResponseDto.createOK(inferenceResultShpService.createShpFile());
|
@PathVariable Long learnId) {
|
||||||
|
return ApiResponseDto.createOK(inferenceResultShpService.createShpFile(learnId));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ public class InferenceResultShpService {
|
|||||||
* <p>중간 실패 시 다음 실행에서 전체 재생성된다.
|
* <p>중간 실패 시 다음 실행에서 전체 재생성된다.
|
||||||
*/
|
*/
|
||||||
@Transactional
|
@Transactional
|
||||||
public InferenceResultShpDto.FileCntDto createShpFile() {
|
public InferenceResultShpDto.FileCntDto createShpFile(Long learnId) {
|
||||||
|
|
||||||
// TODO 배치 실행으로 변경 필요
|
// TODO 배치 실행으로 변경 필요
|
||||||
int batchSize = 100;
|
int batchSize = 100;
|
||||||
@@ -43,7 +43,7 @@ public class InferenceResultShpService {
|
|||||||
|
|
||||||
WriteCnt total = WriteCnt.zero();
|
WriteCnt total = WriteCnt.zero();
|
||||||
|
|
||||||
List<Long> dataUids = coreService.findPendingDataUids(batchSize);
|
List<Long> dataUids = coreService.findPendingDataUids(batchSize, learnId);
|
||||||
|
|
||||||
for (Long dataUid : dataUids) {
|
for (Long dataUid : dataUids) {
|
||||||
|
|
||||||
|
|||||||
@@ -414,4 +414,16 @@ public class InferenceResultCoreService {
|
|||||||
public Page<Geom> getInferenceGeomList(String uuid, SearchGeoReq searchGeoReq) {
|
public Page<Geom> getInferenceGeomList(String uuid, SearchGeoReq searchGeoReq) {
|
||||||
return mapSheetLearnRepository.getInferenceGeomList(uuid, searchGeoReq);
|
return mapSheetLearnRepository.getInferenceGeomList(uuid, searchGeoReq);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* geom 데이터 저장
|
||||||
|
*
|
||||||
|
* @param id learn 테이블 id
|
||||||
|
*/
|
||||||
|
public void upsertGeomData(Long id) {
|
||||||
|
Long analId = inferenceResultRepository.upsertGroupsFromMapSheetAnal(id);
|
||||||
|
inferenceResultRepository.upsertGroupsFromInferenceResults(analId);
|
||||||
|
inferenceResultRepository.upsertGeomsFromInferenceResults(analId);
|
||||||
|
inferenceResultRepository.upsertSttcFromInferenceResults(analId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,22 +20,19 @@ public class InferenceResultShpCoreService {
|
|||||||
*/
|
*/
|
||||||
@Transactional
|
@Transactional
|
||||||
public InferenceResultShpDto.InferenceCntDto buildInferenceData(Long id) {
|
public InferenceResultShpDto.InferenceCntDto buildInferenceData(Long id) {
|
||||||
// int sheetAnalDataCnt = repo.upsertGroupsFromMapSheetAnal();
|
Long analId = repo.upsertGroupsFromMapSheetAnal(id);
|
||||||
// int inferenceCnt = repo.upsertGroupsFromInferenceResults();
|
repo.upsertGroupsFromInferenceResults(analId);
|
||||||
int inferenceGeomCnt = repo.upsertGeomsFromInferenceResults(id);
|
repo.upsertGeomsFromInferenceResults(analId);
|
||||||
|
repo.upsertSttcFromInferenceResults(analId);
|
||||||
InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto();
|
InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto();
|
||||||
// cntDto.setSheetAnalDataCnt(sheetAnalDataCnt);
|
|
||||||
// cntDto.setInferenceCnt(inferenceCnt);
|
|
||||||
cntDto.setInferenceGeomCnt(inferenceGeomCnt);
|
|
||||||
|
|
||||||
return cntDto;
|
return cntDto;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** 파일 생성이 완료되지 않은 분석 데이터(data_uid) 목록을 조회한다. */
|
/** 파일 생성이 완료되지 않은 분석 데이터(data_uid) 목록을 조회한다. */
|
||||||
@Transactional(readOnly = true)
|
@Transactional(readOnly = true)
|
||||||
public List<Long> findPendingDataUids(int limit) {
|
public List<Long> findPendingDataUids(int limit, Long learnId) {
|
||||||
return repo.findPendingDataUids(limit);
|
return repo.findPendingDataUids(limit, learnId);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -23,25 +23,25 @@ public class MapSheetAnalDataInferenceEntity {
|
|||||||
@Column(name = "data_uid", nullable = false)
|
@Column(name = "data_uid", nullable = false)
|
||||||
private Long id;
|
private Long id;
|
||||||
|
|
||||||
@Size(max = 128)
|
// @Size(max = 128)
|
||||||
@Column(name = "data_name", length = 128)
|
// @Column(name = "data_name", length = 128)
|
||||||
private String dataName;
|
// private String dataName;
|
||||||
|
//
|
||||||
@Size(max = 255)
|
// @Size(max = 255)
|
||||||
@Column(name = "data_path")
|
// @Column(name = "data_path")
|
||||||
private String dataPath;
|
// private String dataPath;
|
||||||
|
//
|
||||||
@Size(max = 128)
|
// @Size(max = 128)
|
||||||
@Column(name = "data_type", length = 128)
|
// @Column(name = "data_type", length = 128)
|
||||||
private String dataType;
|
// private String dataType;
|
||||||
|
//
|
||||||
@Size(max = 128)
|
// @Size(max = 128)
|
||||||
@Column(name = "data_crs_type", length = 128)
|
// @Column(name = "data_crs_type", length = 128)
|
||||||
private String dataCrsType;
|
// private String dataCrsType;
|
||||||
|
//
|
||||||
@Size(max = 255)
|
// @Size(max = 255)
|
||||||
@Column(name = "data_crs_type_name")
|
// @Column(name = "data_crs_type_name")
|
||||||
private String dataCrsTypeName;
|
// private String dataCrsTypeName;
|
||||||
|
|
||||||
@ColumnDefault("now()")
|
@ColumnDefault("now()")
|
||||||
@Column(name = "created_dttm")
|
@Column(name = "created_dttm")
|
||||||
@@ -63,27 +63,27 @@ public class MapSheetAnalDataInferenceEntity {
|
|||||||
@Column(name = "target_yyyy")
|
@Column(name = "target_yyyy")
|
||||||
private Integer targetYyyy;
|
private Integer targetYyyy;
|
||||||
|
|
||||||
@Column(name = "data_json", length = Integer.MAX_VALUE)
|
// @Column(name = "data_json", length = Integer.MAX_VALUE)
|
||||||
private String dataJson;
|
// private String dataJson;
|
||||||
|
//
|
||||||
|
// @Size(max = 20)
|
||||||
|
// @ColumnDefault("'0'")
|
||||||
|
// @Column(name = "data_state", length = 20)
|
||||||
|
// private String dataState;
|
||||||
|
|
||||||
@Size(max = 20)
|
// @ColumnDefault("now()")
|
||||||
@ColumnDefault("'0'")
|
// @Column(name = "data_state_dttm")
|
||||||
@Column(name = "data_state", length = 20)
|
// private ZonedDateTime dataStateDttm;
|
||||||
private String dataState;
|
//
|
||||||
|
// @Column(name = "anal_strt_dttm")
|
||||||
@ColumnDefault("now()")
|
// private ZonedDateTime analStrtDttm;
|
||||||
@Column(name = "data_state_dttm")
|
//
|
||||||
private ZonedDateTime dataStateDttm;
|
// @Column(name = "anal_end_dttm")
|
||||||
|
// private ZonedDateTime analEndDttm;
|
||||||
@Column(name = "anal_strt_dttm")
|
//
|
||||||
private ZonedDateTime analStrtDttm;
|
// @ColumnDefault("0")
|
||||||
|
// @Column(name = "anal_sec")
|
||||||
@Column(name = "anal_end_dttm")
|
// private Long analSec;
|
||||||
private ZonedDateTime analEndDttm;
|
|
||||||
|
|
||||||
@ColumnDefault("0")
|
|
||||||
@Column(name = "anal_sec")
|
|
||||||
private Long analSec;
|
|
||||||
|
|
||||||
@Size(max = 20)
|
@Size(max = 20)
|
||||||
@Column(name = "anal_state", length = 20)
|
@Column(name = "anal_state", length = 20)
|
||||||
@@ -95,20 +95,20 @@ public class MapSheetAnalDataInferenceEntity {
|
|||||||
@Column(name = "map_sheet_num")
|
@Column(name = "map_sheet_num")
|
||||||
private Long mapSheetNum;
|
private Long mapSheetNum;
|
||||||
|
|
||||||
@ColumnDefault("0")
|
// @ColumnDefault("0")
|
||||||
@Column(name = "detecting_cnt")
|
// @Column(name = "detecting_cnt")
|
||||||
private Long detectingCnt;
|
// private Long detectingCnt;
|
||||||
|
|
||||||
@ColumnDefault("0")
|
// @ColumnDefault("0")
|
||||||
@Column(name = "pnu")
|
// @Column(name = "pnu")
|
||||||
private Long pnu;
|
// private Long pnu;
|
||||||
|
|
||||||
@Size(max = 20)
|
// @Size(max = 20)
|
||||||
@Column(name = "down_state", length = 20)
|
// @Column(name = "down_state", length = 20)
|
||||||
private String downState;
|
// private String downState;
|
||||||
|
//
|
||||||
@Column(name = "down_state_dttm")
|
// @Column(name = "down_state_dttm")
|
||||||
private ZonedDateTime downStateDttm;
|
// private ZonedDateTime downStateDttm;
|
||||||
|
|
||||||
@Size(max = 20)
|
@Size(max = 20)
|
||||||
@Column(name = "fit_state", length = 20)
|
@Column(name = "fit_state", length = 20)
|
||||||
@@ -150,18 +150,18 @@ public class MapSheetAnalDataInferenceEntity {
|
|||||||
@Column(name = "file_created_yn")
|
@Column(name = "file_created_yn")
|
||||||
private Boolean fileCreatedYn;
|
private Boolean fileCreatedYn;
|
||||||
|
|
||||||
@Size(max = 100)
|
|
||||||
@Column(name = "m1", length = 100)
|
|
||||||
private String m1;
|
|
||||||
|
|
||||||
@Size(max = 100)
|
|
||||||
@Column(name = "m2", length = 100)
|
|
||||||
private String m2;
|
|
||||||
|
|
||||||
@Size(max = 100)
|
|
||||||
@Column(name = "m3", length = 100)
|
|
||||||
private String m3;
|
|
||||||
|
|
||||||
@Column(name = "file_created_dttm")
|
@Column(name = "file_created_dttm")
|
||||||
private ZonedDateTime fileCreatedDttm;
|
private ZonedDateTime fileCreatedDttm;
|
||||||
|
// @Size(max = 100)
|
||||||
|
// @Column(name = "m1", length = 100)
|
||||||
|
// private String m1;
|
||||||
|
//
|
||||||
|
// @Size(max = 100)
|
||||||
|
// @Column(name = "m2", length = 100)
|
||||||
|
// private String m2;
|
||||||
|
//
|
||||||
|
// @Size(max = 100)
|
||||||
|
// @Column(name = "m3", length = 100)
|
||||||
|
// private String m3;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -159,4 +159,7 @@ public class MapSheetAnalInferenceEntity {
|
|||||||
@ColumnDefault("'N'")
|
@ColumnDefault("'N'")
|
||||||
@Column(name = "inspection_closed_yn", length = 1)
|
@Column(name = "inspection_closed_yn", length = 1)
|
||||||
private String inspectionClosedYn = "N";
|
private String inspectionClosedYn = "N";
|
||||||
|
|
||||||
|
@Column(name = "learn_id")
|
||||||
|
private Long learnId;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,13 +5,15 @@ import java.util.List;
|
|||||||
|
|
||||||
public interface InferenceResultRepositoryCustom {
|
public interface InferenceResultRepositoryCustom {
|
||||||
|
|
||||||
int upsertGroupsFromMapSheetAnal();
|
Long upsertGroupsFromMapSheetAnal(Long id);
|
||||||
|
|
||||||
int upsertGroupsFromInferenceResults();
|
void upsertGroupsFromInferenceResults(Long analId);
|
||||||
|
|
||||||
int upsertGeomsFromInferenceResults(Long id);
|
void upsertGeomsFromInferenceResults(Long analId);
|
||||||
|
|
||||||
List<Long> findPendingDataUids(int limit);
|
void upsertSttcFromInferenceResults(Long analId);
|
||||||
|
|
||||||
|
List<Long> findPendingDataUids(int limit, Long learnId);
|
||||||
|
|
||||||
int resetInferenceCreated(Long dataUid);
|
int resetInferenceCreated(Long dataUid);
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference;
|
|||||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
|
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
|
||||||
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity;
|
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity;
|
||||||
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity;
|
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity;
|
||||||
|
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity;
|
||||||
import com.querydsl.jpa.impl.JPAQueryFactory;
|
import com.querydsl.jpa.impl.JPAQueryFactory;
|
||||||
import jakarta.persistence.EntityManager;
|
import jakarta.persistence.EntityManager;
|
||||||
import java.time.ZonedDateTime;
|
import java.time.ZonedDateTime;
|
||||||
@@ -17,8 +18,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
private final JPAQueryFactory queryFactory;
|
private final JPAQueryFactory queryFactory;
|
||||||
private final EntityManager em;
|
private final EntityManager em;
|
||||||
|
|
||||||
|
private final QMapSheetAnalInferenceEntity inferenceEntity =
|
||||||
|
QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
|
||||||
|
|
||||||
/** tb_map_sheet_anal_data_inference */
|
/** tb_map_sheet_anal_data_inference */
|
||||||
private final QMapSheetAnalDataInferenceEntity inferenceEntity =
|
private final QMapSheetAnalDataInferenceEntity inferenceDataEntity =
|
||||||
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
|
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
|
||||||
|
|
||||||
/** tb_map_sheet_anal_data_inference_geom */
|
/** tb_map_sheet_anal_data_inference_geom */
|
||||||
@@ -30,7 +34,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
// ===============================
|
// ===============================
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int upsertGroupsFromMapSheetAnal() {
|
public Long upsertGroupsFromMapSheetAnal(Long id) {
|
||||||
String sql =
|
String sql =
|
||||||
"""
|
"""
|
||||||
INSERT INTO tb_map_sheet_anal_inference (
|
INSERT INTO tb_map_sheet_anal_inference (
|
||||||
@@ -40,26 +44,36 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
anal_title,
|
anal_title,
|
||||||
detecting_cnt,
|
detecting_cnt,
|
||||||
created_dttm,
|
created_dttm,
|
||||||
updated_dttm
|
m1_model_batch_id,
|
||||||
|
m2_model_batch_id,
|
||||||
|
m3_model_batch_id,
|
||||||
|
learn_id
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
r.stage,
|
r.stage,
|
||||||
r.input1 AS compare_yyyy,
|
r.compare_yyyy,
|
||||||
r.input2 AS target_yyyy,
|
r.target_yyyy,
|
||||||
CONCAT(r.stage, '_', r.input1, '_', r.input2) AS anal_title,
|
CONCAT(r.stage, '_', r.compare_yyyy, '_', r.target_yyyy) AS anal_title,
|
||||||
COUNT(*) AS detecting_cnt,
|
r.detecting_cnt,
|
||||||
now(),
|
now(),
|
||||||
now()
|
r.m1_model_batch_id,
|
||||||
FROM inference_results r
|
r.m2_model_batch_id,
|
||||||
GROUP BY r.stage, r.input1, r.input2
|
r.m3_model_batch_id,
|
||||||
|
r.id
|
||||||
|
FROM tb_map_sheet_learn r
|
||||||
|
WHERE r.id = :id
|
||||||
ON CONFLICT (stage, compare_yyyy, target_yyyy)
|
ON CONFLICT (stage, compare_yyyy, target_yyyy)
|
||||||
DO UPDATE SET
|
DO UPDATE SET
|
||||||
detecting_cnt = EXCLUDED.detecting_cnt,
|
detecting_cnt = EXCLUDED.detecting_cnt,
|
||||||
anal_title = EXCLUDED.anal_title,
|
anal_title = EXCLUDED.anal_title,
|
||||||
updated_dttm = now()
|
updated_dttm = now(),
|
||||||
|
learn_id = EXCLUDED.learn_id
|
||||||
|
RETURNING anal_uid
|
||||||
""";
|
""";
|
||||||
|
|
||||||
return em.createNativeQuery(sql).executeUpdate();
|
Object result = em.createNativeQuery(sql).setParameter("id", id).getSingleResult();
|
||||||
|
|
||||||
|
return ((Number) result).longValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -71,50 +85,51 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
* @return 반영된 행 수
|
* @return 반영된 행 수
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public int upsertGroupsFromInferenceResults() {
|
public void upsertGroupsFromInferenceResults(Long analId) {
|
||||||
|
|
||||||
String sql =
|
String sql =
|
||||||
"""
|
"""
|
||||||
INSERT INTO tb_map_sheet_anal_data_inference (
|
INSERT INTO tb_map_sheet_anal_data_inference (
|
||||||
anal_uid,
|
anal_uid,
|
||||||
stage,
|
|
||||||
compare_yyyy,
|
compare_yyyy,
|
||||||
target_yyyy,
|
target_yyyy,
|
||||||
map_sheet_num,
|
map_sheet_num,
|
||||||
detecting_cnt,
|
stage,
|
||||||
file_created_yn,
|
|
||||||
created_dttm,
|
created_dttm,
|
||||||
updated_dttm
|
ref_map_sheet_num
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
ai.id AS anal_uid,
|
msl.anal_uid,
|
||||||
r.stage,
|
msl.compare_yyyy,
|
||||||
r.input1 AS compare_yyyy,
|
msl.target_yyyy,
|
||||||
r.input2 AS target_yyyy,
|
CASE
|
||||||
r.map_id AS map_sheet_num,
|
WHEN r.map_id ~ '^[0-9]+$' THEN r.map_id::bigint
|
||||||
COUNT(*) AS detecting_cnt,
|
ELSE NULL
|
||||||
false AS file_created_yn,
|
END AS map_sheet_num,
|
||||||
|
msl.stage,
|
||||||
now(),
|
now(),
|
||||||
now()
|
((map_id::INTEGER)/1000) as ref_map_sheet_num
|
||||||
FROM inference_results r
|
FROM inference_results_testing r
|
||||||
JOIN tb_map_sheet_anal_inference ai
|
JOIN tb_map_sheet_anal_inference msl
|
||||||
ON ai.stage = r.stage
|
ON r.batch_id IN (
|
||||||
AND ai.compare_yyyy = r.input1
|
msl.m1_model_batch_id,
|
||||||
AND ai.target_yyyy = r.input2
|
msl.m2_model_batch_id,
|
||||||
GROUP BY
|
msl.m3_model_batch_id
|
||||||
ai.id,
|
)
|
||||||
r.stage,
|
WHERE msl.anal_uid = :analId
|
||||||
r.input1,
|
group by msl.anal_uid,
|
||||||
r.input2,
|
msl.compare_yyyy,
|
||||||
r.map_id
|
msl.target_yyyy,
|
||||||
|
r.map_id,
|
||||||
|
msl.stage
|
||||||
ON CONFLICT (stage, compare_yyyy, target_yyyy, map_sheet_num)
|
ON CONFLICT (stage, compare_yyyy, target_yyyy, map_sheet_num)
|
||||||
DO UPDATE SET
|
DO UPDATE SET
|
||||||
anal_uid = EXCLUDED.anal_uid,
|
anal_uid = EXCLUDED.anal_uid,
|
||||||
detecting_cnt = EXCLUDED.detecting_cnt,
|
ref_map_sheet_num = EXCLUDED.ref_map_sheet_num,
|
||||||
updated_dttm = now()
|
updated_dttm = now()
|
||||||
""";
|
""";
|
||||||
|
|
||||||
return em.createNativeQuery(sql).executeUpdate();
|
em.createNativeQuery(sql).setParameter("analId", analId).executeUpdate();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -126,12 +141,12 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
* @return 반영된 행 수
|
* @return 반영된 행 수
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public int upsertGeomsFromInferenceResults(Long id) {
|
public void upsertGeomsFromInferenceResults(Long analUid) {
|
||||||
|
|
||||||
String sql =
|
String sql =
|
||||||
"""
|
"""
|
||||||
INSERT INTO tb_map_sheet_anal_data_inference_geom (
|
INSERT INTO tb_map_sheet_anal_data_inference_geom (
|
||||||
uuid,
|
result_uid,
|
||||||
stage,
|
stage,
|
||||||
cd_prob,
|
cd_prob,
|
||||||
compare_yyyy,
|
compare_yyyy,
|
||||||
@@ -145,10 +160,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
geom_center,
|
geom_center,
|
||||||
area,
|
area,
|
||||||
data_uid,
|
data_uid,
|
||||||
created_dttm
|
created_dttm,
|
||||||
|
ref_map_sheet_num
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
x.uuid,
|
x.result_uid,
|
||||||
x.stage,
|
x.stage,
|
||||||
x.cd_prob,
|
x.cd_prob,
|
||||||
x.compare_yyyy,
|
x.compare_yyyy,
|
||||||
@@ -162,11 +178,12 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
ST_Centroid(x.geom),
|
ST_Centroid(x.geom),
|
||||||
x.area,
|
x.area,
|
||||||
x.data_uid,
|
x.data_uid,
|
||||||
x.created_dttm
|
x.created_dttm,
|
||||||
|
x.ref_map_sheet_num
|
||||||
FROM (
|
FROM (
|
||||||
SELECT DISTINCT ON (r.uid)
|
SELECT
|
||||||
r.uid AS uuid,
|
r.uid AS result_uid,
|
||||||
di.stage AS stage,
|
msadi.stage,
|
||||||
r.cd_prob,
|
r.cd_prob,
|
||||||
r.input1 AS compare_yyyy,
|
r.input1 AS compare_yyyy,
|
||||||
r.input2 AS target_yyyy,
|
r.input2 AS target_yyyy,
|
||||||
@@ -185,36 +202,112 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
ELSE ST_SetSRID(ST_GeomFromText(r.geometry), 5186)
|
ELSE ST_SetSRID(ST_GeomFromText(r.geometry), 5186)
|
||||||
END AS geom,
|
END AS geom,
|
||||||
r.area,
|
r.area,
|
||||||
di.id AS data_uid,
|
msadi.data_uid,
|
||||||
r.created_date AS created_dttm
|
r.created_date AS created_dttm,
|
||||||
|
msadi.ref_map_sheet_num
|
||||||
FROM inference_results_testing r
|
FROM inference_results_testing r
|
||||||
JOIN tb_map_sheet_learn di
|
JOIN tb_map_sheet_anal_inference msl
|
||||||
ON r.batch_id IN (
|
ON r.batch_id IN (
|
||||||
di.m1_model_batch_id,
|
msl.m1_model_batch_id,
|
||||||
di.m2_model_batch_id,
|
msl.m2_model_batch_id,
|
||||||
di.m3_model_batch_id
|
msl.m3_model_batch_id
|
||||||
)
|
)
|
||||||
where di.id = :id
|
inner join tb_map_sheet_anal_data_inference msadi
|
||||||
|
on msadi.anal_uid = msl.anal_uid
|
||||||
|
AND r.map_id ~ '^[0-9]+$'
|
||||||
|
AND r.map_id::bigint = msadi.map_sheet_num
|
||||||
|
where msl.anal_uid = :analUid
|
||||||
ORDER BY r.uid, r.created_date DESC NULLS LAST
|
ORDER BY r.uid, r.created_date DESC NULLS LAST
|
||||||
) x
|
) x
|
||||||
ON CONFLICT (uuid)
|
ON CONFLICT (result_uid)
|
||||||
DO UPDATE SET
|
DO UPDATE SET
|
||||||
stage = EXCLUDED.stage,
|
|
||||||
cd_prob = EXCLUDED.cd_prob,
|
cd_prob = EXCLUDED.cd_prob,
|
||||||
compare_yyyy = EXCLUDED.compare_yyyy,
|
|
||||||
target_yyyy = EXCLUDED.target_yyyy,
|
|
||||||
map_sheet_num = EXCLUDED.map_sheet_num,
|
|
||||||
class_before_cd = EXCLUDED.class_before_cd,
|
class_before_cd = EXCLUDED.class_before_cd,
|
||||||
class_before_prob = EXCLUDED.class_before_prob,
|
class_before_prob = EXCLUDED.class_before_prob,
|
||||||
class_after_cd = EXCLUDED.class_after_cd,
|
class_after_cd = EXCLUDED.class_after_cd,
|
||||||
class_after_prob = EXCLUDED.class_after_prob,
|
class_after_prob = EXCLUDED.class_after_prob,
|
||||||
geom = EXCLUDED.geom,
|
geom = EXCLUDED.geom,
|
||||||
area = EXCLUDED.area,
|
area = EXCLUDED.area,
|
||||||
data_uid = EXCLUDED.data_uid,
|
updated_dttm = now(),
|
||||||
updated_dttm = now()
|
ref_map_sheet_num = EXCLUDED.ref_map_sheet_num
|
||||||
""";
|
""";
|
||||||
|
|
||||||
return em.createNativeQuery(sql).setParameter("id", id).executeUpdate();
|
em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void upsertSttcFromInferenceResults(Long analUid) {
|
||||||
|
|
||||||
|
String sql =
|
||||||
|
"""
|
||||||
|
INSERT INTO tb_map_sheet_anal_sttc
|
||||||
|
(
|
||||||
|
compare_yyyy
|
||||||
|
,target_yyyy
|
||||||
|
,map_sheet_num
|
||||||
|
,class_before_cnt
|
||||||
|
,class_after_cnt
|
||||||
|
,created_dttm
|
||||||
|
,created_uid
|
||||||
|
,updated_dttm
|
||||||
|
,updated_uid
|
||||||
|
,ref_map_sheet_num
|
||||||
|
,data_uid
|
||||||
|
,class_before_cd
|
||||||
|
,class_after_cd
|
||||||
|
,class_after_prob_avg
|
||||||
|
,anal_uid
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
msadig.compare_yyyy
|
||||||
|
,msadig.target_yyyy
|
||||||
|
,msadig.map_sheet_num
|
||||||
|
,0 AS class_before_cnt
|
||||||
|
,COUNT(*) AS class_after_cnt
|
||||||
|
,now() AS created_dttm
|
||||||
|
,1 AS created_uid
|
||||||
|
,now() AS updated_dttm
|
||||||
|
,1 AS updated_uid
|
||||||
|
,msadig.ref_map_sheet_num
|
||||||
|
,msadig.data_uid
|
||||||
|
,msadig.class_before_cd
|
||||||
|
,msadig.class_after_cd
|
||||||
|
,AVG(msadig.class_after_prob) AS class_after_prob_avg
|
||||||
|
,msai.anal_uid
|
||||||
|
FROM tb_map_sheet_anal_inference msai
|
||||||
|
INNER JOIN tb_map_sheet_anal_data_inference msadi
|
||||||
|
ON msai.anal_uid = msadi.anal_uid
|
||||||
|
INNER JOIN tb_map_sheet_anal_data_inference_geom msadig
|
||||||
|
ON msadi.data_uid = msadig.data_uid
|
||||||
|
WHERE msai.anal_uid = :analUid
|
||||||
|
GROUP BY
|
||||||
|
msadig.compare_yyyy
|
||||||
|
,msadig.target_yyyy
|
||||||
|
,msadig.map_sheet_num
|
||||||
|
,msadig.ref_map_sheet_num
|
||||||
|
,msadig.class_before_cd
|
||||||
|
,msadig.class_after_cd
|
||||||
|
,msadig.data_uid
|
||||||
|
,msai.anal_uid
|
||||||
|
ON CONFLICT (
|
||||||
|
anal_uid,
|
||||||
|
compare_yyyy,
|
||||||
|
target_yyyy,
|
||||||
|
map_sheet_num,
|
||||||
|
ref_map_sheet_num,
|
||||||
|
data_uid,
|
||||||
|
class_before_cd,
|
||||||
|
class_after_cd
|
||||||
|
)
|
||||||
|
DO UPDATE SET
|
||||||
|
class_before_cnt = EXCLUDED.class_before_cnt
|
||||||
|
,class_after_cnt = EXCLUDED.class_after_cnt
|
||||||
|
,class_after_prob_avg = EXCLUDED.class_after_prob_avg
|
||||||
|
,updated_dttm = now()
|
||||||
|
,updated_uid = EXCLUDED.updated_uid
|
||||||
|
""";
|
||||||
|
|
||||||
|
em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
|
||||||
}
|
}
|
||||||
|
|
||||||
// ===============================
|
// ===============================
|
||||||
@@ -228,12 +321,23 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
* @return data_uid 목록
|
* @return data_uid 목록
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public List<Long> findPendingDataUids(int limit) {
|
public List<Long> findPendingDataUids(int limit, Long learnId) {
|
||||||
|
|
||||||
return queryFactory
|
return queryFactory
|
||||||
.select(inferenceEntity.id)
|
.select(inferenceDataEntity.id)
|
||||||
.from(inferenceEntity)
|
.from(inferenceEntity)
|
||||||
.where(inferenceEntity.fileCreatedYn.isFalse().or(inferenceEntity.fileCreatedYn.isNull()))
|
.innerJoin(inferenceDataEntity)
|
||||||
.orderBy(inferenceEntity.id.asc())
|
.on(inferenceEntity.id.eq(inferenceDataEntity.analUid))
|
||||||
|
.where(
|
||||||
|
inferenceEntity
|
||||||
|
.learnId
|
||||||
|
.eq(learnId)
|
||||||
|
.and(
|
||||||
|
inferenceDataEntity
|
||||||
|
.fileCreatedYn
|
||||||
|
.isFalse()
|
||||||
|
.or(inferenceDataEntity.fileCreatedYn.isNull())))
|
||||||
|
.orderBy(inferenceDataEntity.id.asc())
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
.fetch();
|
.fetch();
|
||||||
}
|
}
|
||||||
@@ -255,11 +359,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
|
|
||||||
return (int)
|
return (int)
|
||||||
queryFactory
|
queryFactory
|
||||||
.update(inferenceEntity)
|
.update(inferenceDataEntity)
|
||||||
.set(inferenceEntity.fileCreatedYn, false)
|
.set(inferenceDataEntity.fileCreatedYn, false)
|
||||||
.set(inferenceEntity.fileCreatedDttm, (ZonedDateTime) null)
|
.set(inferenceDataEntity.fileCreatedDttm, (ZonedDateTime) null)
|
||||||
.set(inferenceEntity.updatedDttm, now)
|
.set(inferenceDataEntity.updatedDttm, now)
|
||||||
.where(inferenceEntity.id.eq(dataUid))
|
.where(inferenceDataEntity.id.eq(dataUid))
|
||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -274,11 +378,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
|||||||
|
|
||||||
return (int)
|
return (int)
|
||||||
queryFactory
|
queryFactory
|
||||||
.update(inferenceEntity)
|
.update(inferenceDataEntity)
|
||||||
.set(inferenceEntity.fileCreatedYn, true)
|
.set(inferenceDataEntity.fileCreatedYn, true)
|
||||||
.set(inferenceEntity.fileCreatedDttm, now)
|
.set(inferenceDataEntity.fileCreatedDttm, now)
|
||||||
.set(inferenceEntity.updatedDttm, now)
|
.set(inferenceDataEntity.updatedDttm, now)
|
||||||
.where(inferenceEntity.id.eq(dataUid))
|
.where(inferenceDataEntity.id.eq(dataUid))
|
||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto;
|
|||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
|
||||||
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto;
|
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto;
|
||||||
|
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
|
||||||
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
|
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
|
||||||
import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto;
|
import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto;
|
||||||
import jakarta.transaction.Transactional;
|
import jakarta.transaction.Transactional;
|
||||||
@@ -34,8 +35,8 @@ import org.springframework.stereotype.Service;
|
|||||||
public class MapSheetInferenceJobService {
|
public class MapSheetInferenceJobService {
|
||||||
|
|
||||||
private final InferenceResultCoreService inferenceResultCoreService;
|
private final InferenceResultCoreService inferenceResultCoreService;
|
||||||
|
private final InferenceResultShpService inferenceResultShpService;
|
||||||
private final ExternalHttpClient externalHttpClient;
|
private final ExternalHttpClient externalHttpClient;
|
||||||
|
|
||||||
private final ObjectMapper objectMapper;
|
private final ObjectMapper objectMapper;
|
||||||
|
|
||||||
@Value("${inference.batch-url}")
|
@Value("${inference.batch-url}")
|
||||||
@@ -47,6 +48,9 @@ public class MapSheetInferenceJobService {
|
|||||||
@Value("${inference.url}")
|
@Value("${inference.url}")
|
||||||
private String inferenceUrl;
|
private String inferenceUrl;
|
||||||
|
|
||||||
|
@Value("${mapsheet.shp.baseurl}")
|
||||||
|
private String baseDir;
|
||||||
|
|
||||||
/** 추론 진행 배치 1분 */
|
/** 추론 진행 배치 1분 */
|
||||||
@Scheduled(fixedDelay = 60_000)
|
@Scheduled(fixedDelay = 60_000)
|
||||||
@Transactional
|
@Transactional
|
||||||
@@ -204,6 +208,13 @@ public class MapSheetInferenceJobService {
|
|||||||
save.setInferEndDttm(now);
|
save.setInferEndDttm(now);
|
||||||
save.setType("M3"); // 마지막 모델 기준
|
save.setType("M3"); // 마지막 모델 기준
|
||||||
inferenceResultCoreService.update(save);
|
inferenceResultCoreService.update(save);
|
||||||
|
|
||||||
|
// 추론 종료일때 geom 데이터 저장
|
||||||
|
inferenceResultCoreService.upsertGeomData(sheet.getId());
|
||||||
|
|
||||||
|
// TODO jar로 생성하는걸로 변경
|
||||||
|
// 추론 종료일때 shp 파일 생성
|
||||||
|
// inferenceResultShpService.createShpFile(sheet.getId());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
Reference in New Issue
Block a user