Merge pull request '[KC-99] 추론 완료 상세목록 수정' (#245) from feat/infer_dev_260107 into develop
Reviewed-on: https://kamco.gitea.gs.dabeeo.com/dabeeo/kamco-dabeeo-backoffice/pulls/245
This commit is contained in:
@@ -37,10 +37,10 @@ public class InferenceResultShpApiController {
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@PostMapping("/save/{id}")
|
||||
@PostMapping("/save/{learnId}")
|
||||
public ApiResponseDto<InferenceResultShpDto.InferenceCntDto> saveInferenceData(
|
||||
@PathVariable Long id) {
|
||||
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData(id));
|
||||
@PathVariable Long learnId) {
|
||||
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData(learnId));
|
||||
}
|
||||
|
||||
@Operation(summary = "shp 파일 생성", description = "shp 파일 생성")
|
||||
@@ -56,8 +56,9 @@ public class InferenceResultShpApiController {
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@PostMapping("/create")
|
||||
public ApiResponseDto<InferenceResultShpDto.FileCntDto> createShpFile() {
|
||||
return ApiResponseDto.createOK(inferenceResultShpService.createShpFile());
|
||||
@PostMapping("/create/{learnId}")
|
||||
public ApiResponseDto<InferenceResultShpDto.FileCntDto> createShpFile(
|
||||
@PathVariable Long learnId) {
|
||||
return ApiResponseDto.createOK(inferenceResultShpService.createShpFile(learnId));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ public class InferenceResultShpService {
|
||||
* <p>중간 실패 시 다음 실행에서 전체 재생성된다.
|
||||
*/
|
||||
@Transactional
|
||||
public InferenceResultShpDto.FileCntDto createShpFile() {
|
||||
public InferenceResultShpDto.FileCntDto createShpFile(Long learnId) {
|
||||
|
||||
// TODO 배치 실행으로 변경 필요
|
||||
int batchSize = 100;
|
||||
@@ -43,7 +43,7 @@ public class InferenceResultShpService {
|
||||
|
||||
WriteCnt total = WriteCnt.zero();
|
||||
|
||||
List<Long> dataUids = coreService.findPendingDataUids(batchSize);
|
||||
List<Long> dataUids = coreService.findPendingDataUids(batchSize, learnId);
|
||||
|
||||
for (Long dataUid : dataUids) {
|
||||
|
||||
|
||||
@@ -414,4 +414,16 @@ public class InferenceResultCoreService {
|
||||
public Page<Geom> getInferenceGeomList(String uuid, SearchGeoReq searchGeoReq) {
|
||||
return mapSheetLearnRepository.getInferenceGeomList(uuid, searchGeoReq);
|
||||
}
|
||||
|
||||
/**
|
||||
* geom 데이터 저장
|
||||
*
|
||||
* @param id learn 테이블 id
|
||||
*/
|
||||
public void upsertGeomData(Long id) {
|
||||
Long analId = inferenceResultRepository.upsertGroupsFromMapSheetAnal(id);
|
||||
inferenceResultRepository.upsertGroupsFromInferenceResults(analId);
|
||||
inferenceResultRepository.upsertGeomsFromInferenceResults(analId);
|
||||
inferenceResultRepository.upsertSttcFromInferenceResults(analId);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,22 +20,19 @@ public class InferenceResultShpCoreService {
|
||||
*/
|
||||
@Transactional
|
||||
public InferenceResultShpDto.InferenceCntDto buildInferenceData(Long id) {
|
||||
// int sheetAnalDataCnt = repo.upsertGroupsFromMapSheetAnal();
|
||||
// int inferenceCnt = repo.upsertGroupsFromInferenceResults();
|
||||
int inferenceGeomCnt = repo.upsertGeomsFromInferenceResults(id);
|
||||
|
||||
Long analId = repo.upsertGroupsFromMapSheetAnal(id);
|
||||
repo.upsertGroupsFromInferenceResults(analId);
|
||||
repo.upsertGeomsFromInferenceResults(analId);
|
||||
repo.upsertSttcFromInferenceResults(analId);
|
||||
InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto();
|
||||
// cntDto.setSheetAnalDataCnt(sheetAnalDataCnt);
|
||||
// cntDto.setInferenceCnt(inferenceCnt);
|
||||
cntDto.setInferenceGeomCnt(inferenceGeomCnt);
|
||||
|
||||
return cntDto;
|
||||
}
|
||||
|
||||
/** 파일 생성이 완료되지 않은 분석 데이터(data_uid) 목록을 조회한다. */
|
||||
@Transactional(readOnly = true)
|
||||
public List<Long> findPendingDataUids(int limit) {
|
||||
return repo.findPendingDataUids(limit);
|
||||
public List<Long> findPendingDataUids(int limit, Long learnId) {
|
||||
return repo.findPendingDataUids(limit, learnId);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -23,25 +23,25 @@ public class MapSheetAnalDataInferenceEntity {
|
||||
@Column(name = "data_uid", nullable = false)
|
||||
private Long id;
|
||||
|
||||
@Size(max = 128)
|
||||
@Column(name = "data_name", length = 128)
|
||||
private String dataName;
|
||||
|
||||
@Size(max = 255)
|
||||
@Column(name = "data_path")
|
||||
private String dataPath;
|
||||
|
||||
@Size(max = 128)
|
||||
@Column(name = "data_type", length = 128)
|
||||
private String dataType;
|
||||
|
||||
@Size(max = 128)
|
||||
@Column(name = "data_crs_type", length = 128)
|
||||
private String dataCrsType;
|
||||
|
||||
@Size(max = 255)
|
||||
@Column(name = "data_crs_type_name")
|
||||
private String dataCrsTypeName;
|
||||
// @Size(max = 128)
|
||||
// @Column(name = "data_name", length = 128)
|
||||
// private String dataName;
|
||||
//
|
||||
// @Size(max = 255)
|
||||
// @Column(name = "data_path")
|
||||
// private String dataPath;
|
||||
//
|
||||
// @Size(max = 128)
|
||||
// @Column(name = "data_type", length = 128)
|
||||
// private String dataType;
|
||||
//
|
||||
// @Size(max = 128)
|
||||
// @Column(name = "data_crs_type", length = 128)
|
||||
// private String dataCrsType;
|
||||
//
|
||||
// @Size(max = 255)
|
||||
// @Column(name = "data_crs_type_name")
|
||||
// private String dataCrsTypeName;
|
||||
|
||||
@ColumnDefault("now()")
|
||||
@Column(name = "created_dttm")
|
||||
@@ -63,27 +63,27 @@ public class MapSheetAnalDataInferenceEntity {
|
||||
@Column(name = "target_yyyy")
|
||||
private Integer targetYyyy;
|
||||
|
||||
@Column(name = "data_json", length = Integer.MAX_VALUE)
|
||||
private String dataJson;
|
||||
// @Column(name = "data_json", length = Integer.MAX_VALUE)
|
||||
// private String dataJson;
|
||||
//
|
||||
// @Size(max = 20)
|
||||
// @ColumnDefault("'0'")
|
||||
// @Column(name = "data_state", length = 20)
|
||||
// private String dataState;
|
||||
|
||||
@Size(max = 20)
|
||||
@ColumnDefault("'0'")
|
||||
@Column(name = "data_state", length = 20)
|
||||
private String dataState;
|
||||
|
||||
@ColumnDefault("now()")
|
||||
@Column(name = "data_state_dttm")
|
||||
private ZonedDateTime dataStateDttm;
|
||||
|
||||
@Column(name = "anal_strt_dttm")
|
||||
private ZonedDateTime analStrtDttm;
|
||||
|
||||
@Column(name = "anal_end_dttm")
|
||||
private ZonedDateTime analEndDttm;
|
||||
|
||||
@ColumnDefault("0")
|
||||
@Column(name = "anal_sec")
|
||||
private Long analSec;
|
||||
// @ColumnDefault("now()")
|
||||
// @Column(name = "data_state_dttm")
|
||||
// private ZonedDateTime dataStateDttm;
|
||||
//
|
||||
// @Column(name = "anal_strt_dttm")
|
||||
// private ZonedDateTime analStrtDttm;
|
||||
//
|
||||
// @Column(name = "anal_end_dttm")
|
||||
// private ZonedDateTime analEndDttm;
|
||||
//
|
||||
// @ColumnDefault("0")
|
||||
// @Column(name = "anal_sec")
|
||||
// private Long analSec;
|
||||
|
||||
@Size(max = 20)
|
||||
@Column(name = "anal_state", length = 20)
|
||||
@@ -95,20 +95,20 @@ public class MapSheetAnalDataInferenceEntity {
|
||||
@Column(name = "map_sheet_num")
|
||||
private Long mapSheetNum;
|
||||
|
||||
@ColumnDefault("0")
|
||||
@Column(name = "detecting_cnt")
|
||||
private Long detectingCnt;
|
||||
// @ColumnDefault("0")
|
||||
// @Column(name = "detecting_cnt")
|
||||
// private Long detectingCnt;
|
||||
|
||||
@ColumnDefault("0")
|
||||
@Column(name = "pnu")
|
||||
private Long pnu;
|
||||
// @ColumnDefault("0")
|
||||
// @Column(name = "pnu")
|
||||
// private Long pnu;
|
||||
|
||||
@Size(max = 20)
|
||||
@Column(name = "down_state", length = 20)
|
||||
private String downState;
|
||||
|
||||
@Column(name = "down_state_dttm")
|
||||
private ZonedDateTime downStateDttm;
|
||||
// @Size(max = 20)
|
||||
// @Column(name = "down_state", length = 20)
|
||||
// private String downState;
|
||||
//
|
||||
// @Column(name = "down_state_dttm")
|
||||
// private ZonedDateTime downStateDttm;
|
||||
|
||||
@Size(max = 20)
|
||||
@Column(name = "fit_state", length = 20)
|
||||
@@ -150,18 +150,18 @@ public class MapSheetAnalDataInferenceEntity {
|
||||
@Column(name = "file_created_yn")
|
||||
private Boolean fileCreatedYn;
|
||||
|
||||
@Size(max = 100)
|
||||
@Column(name = "m1", length = 100)
|
||||
private String m1;
|
||||
|
||||
@Size(max = 100)
|
||||
@Column(name = "m2", length = 100)
|
||||
private String m2;
|
||||
|
||||
@Size(max = 100)
|
||||
@Column(name = "m3", length = 100)
|
||||
private String m3;
|
||||
|
||||
@Column(name = "file_created_dttm")
|
||||
private ZonedDateTime fileCreatedDttm;
|
||||
// @Size(max = 100)
|
||||
// @Column(name = "m1", length = 100)
|
||||
// private String m1;
|
||||
//
|
||||
// @Size(max = 100)
|
||||
// @Column(name = "m2", length = 100)
|
||||
// private String m2;
|
||||
//
|
||||
// @Size(max = 100)
|
||||
// @Column(name = "m3", length = 100)
|
||||
// private String m3;
|
||||
|
||||
}
|
||||
|
||||
@@ -159,4 +159,7 @@ public class MapSheetAnalInferenceEntity {
|
||||
@ColumnDefault("'N'")
|
||||
@Column(name = "inspection_closed_yn", length = 1)
|
||||
private String inspectionClosedYn = "N";
|
||||
|
||||
@Column(name = "learn_id")
|
||||
private Long learnId;
|
||||
}
|
||||
|
||||
@@ -5,13 +5,15 @@ import java.util.List;
|
||||
|
||||
public interface InferenceResultRepositoryCustom {
|
||||
|
||||
int upsertGroupsFromMapSheetAnal();
|
||||
Long upsertGroupsFromMapSheetAnal(Long id);
|
||||
|
||||
int upsertGroupsFromInferenceResults();
|
||||
void upsertGroupsFromInferenceResults(Long analId);
|
||||
|
||||
int upsertGeomsFromInferenceResults(Long id);
|
||||
void upsertGeomsFromInferenceResults(Long analId);
|
||||
|
||||
List<Long> findPendingDataUids(int limit);
|
||||
void upsertSttcFromInferenceResults(Long analId);
|
||||
|
||||
List<Long> findPendingDataUids(int limit, Long learnId);
|
||||
|
||||
int resetInferenceCreated(Long dataUid);
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity;
|
||||
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity;
|
||||
import com.querydsl.jpa.impl.JPAQueryFactory;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import java.time.ZonedDateTime;
|
||||
@@ -17,8 +18,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
private final JPAQueryFactory queryFactory;
|
||||
private final EntityManager em;
|
||||
|
||||
private final QMapSheetAnalInferenceEntity inferenceEntity =
|
||||
QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
|
||||
|
||||
/** tb_map_sheet_anal_data_inference */
|
||||
private final QMapSheetAnalDataInferenceEntity inferenceEntity =
|
||||
private final QMapSheetAnalDataInferenceEntity inferenceDataEntity =
|
||||
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
|
||||
|
||||
/** tb_map_sheet_anal_data_inference_geom */
|
||||
@@ -30,7 +34,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
// ===============================
|
||||
|
||||
@Override
|
||||
public int upsertGroupsFromMapSheetAnal() {
|
||||
public Long upsertGroupsFromMapSheetAnal(Long id) {
|
||||
String sql =
|
||||
"""
|
||||
INSERT INTO tb_map_sheet_anal_inference (
|
||||
@@ -40,26 +44,36 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
anal_title,
|
||||
detecting_cnt,
|
||||
created_dttm,
|
||||
updated_dttm
|
||||
m1_model_batch_id,
|
||||
m2_model_batch_id,
|
||||
m3_model_batch_id,
|
||||
learn_id
|
||||
)
|
||||
SELECT
|
||||
r.stage,
|
||||
r.input1 AS compare_yyyy,
|
||||
r.input2 AS target_yyyy,
|
||||
CONCAT(r.stage, '_', r.input1, '_', r.input2) AS anal_title,
|
||||
COUNT(*) AS detecting_cnt,
|
||||
r.compare_yyyy,
|
||||
r.target_yyyy,
|
||||
CONCAT(r.stage, '_', r.compare_yyyy, '_', r.target_yyyy) AS anal_title,
|
||||
r.detecting_cnt,
|
||||
now(),
|
||||
now()
|
||||
FROM inference_results r
|
||||
GROUP BY r.stage, r.input1, r.input2
|
||||
r.m1_model_batch_id,
|
||||
r.m2_model_batch_id,
|
||||
r.m3_model_batch_id,
|
||||
r.id
|
||||
FROM tb_map_sheet_learn r
|
||||
WHERE r.id = :id
|
||||
ON CONFLICT (stage, compare_yyyy, target_yyyy)
|
||||
DO UPDATE SET
|
||||
detecting_cnt = EXCLUDED.detecting_cnt,
|
||||
anal_title = EXCLUDED.anal_title,
|
||||
updated_dttm = now()
|
||||
updated_dttm = now(),
|
||||
learn_id = EXCLUDED.learn_id
|
||||
RETURNING anal_uid
|
||||
""";
|
||||
|
||||
return em.createNativeQuery(sql).executeUpdate();
|
||||
Object result = em.createNativeQuery(sql).setParameter("id", id).getSingleResult();
|
||||
|
||||
return ((Number) result).longValue();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -71,50 +85,51 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
* @return 반영된 행 수
|
||||
*/
|
||||
@Override
|
||||
public int upsertGroupsFromInferenceResults() {
|
||||
public void upsertGroupsFromInferenceResults(Long analId) {
|
||||
|
||||
String sql =
|
||||
"""
|
||||
INSERT INTO tb_map_sheet_anal_data_inference (
|
||||
anal_uid,
|
||||
stage,
|
||||
compare_yyyy,
|
||||
target_yyyy,
|
||||
map_sheet_num,
|
||||
detecting_cnt,
|
||||
file_created_yn,
|
||||
stage,
|
||||
created_dttm,
|
||||
updated_dttm
|
||||
ref_map_sheet_num
|
||||
)
|
||||
SELECT
|
||||
ai.id AS anal_uid,
|
||||
r.stage,
|
||||
r.input1 AS compare_yyyy,
|
||||
r.input2 AS target_yyyy,
|
||||
r.map_id AS map_sheet_num,
|
||||
COUNT(*) AS detecting_cnt,
|
||||
false AS file_created_yn,
|
||||
msl.anal_uid,
|
||||
msl.compare_yyyy,
|
||||
msl.target_yyyy,
|
||||
CASE
|
||||
WHEN r.map_id ~ '^[0-9]+$' THEN r.map_id::bigint
|
||||
ELSE NULL
|
||||
END AS map_sheet_num,
|
||||
msl.stage,
|
||||
now(),
|
||||
now()
|
||||
FROM inference_results r
|
||||
JOIN tb_map_sheet_anal_inference ai
|
||||
ON ai.stage = r.stage
|
||||
AND ai.compare_yyyy = r.input1
|
||||
AND ai.target_yyyy = r.input2
|
||||
GROUP BY
|
||||
ai.id,
|
||||
r.stage,
|
||||
r.input1,
|
||||
r.input2,
|
||||
r.map_id
|
||||
((map_id::INTEGER)/1000) as ref_map_sheet_num
|
||||
FROM inference_results_testing r
|
||||
JOIN tb_map_sheet_anal_inference msl
|
||||
ON r.batch_id IN (
|
||||
msl.m1_model_batch_id,
|
||||
msl.m2_model_batch_id,
|
||||
msl.m3_model_batch_id
|
||||
)
|
||||
WHERE msl.anal_uid = :analId
|
||||
group by msl.anal_uid,
|
||||
msl.compare_yyyy,
|
||||
msl.target_yyyy,
|
||||
r.map_id,
|
||||
msl.stage
|
||||
ON CONFLICT (stage, compare_yyyy, target_yyyy, map_sheet_num)
|
||||
DO UPDATE SET
|
||||
anal_uid = EXCLUDED.anal_uid,
|
||||
detecting_cnt = EXCLUDED.detecting_cnt,
|
||||
ref_map_sheet_num = EXCLUDED.ref_map_sheet_num,
|
||||
updated_dttm = now()
|
||||
""";
|
||||
|
||||
return em.createNativeQuery(sql).executeUpdate();
|
||||
em.createNativeQuery(sql).setParameter("analId", analId).executeUpdate();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -126,12 +141,12 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
* @return 반영된 행 수
|
||||
*/
|
||||
@Override
|
||||
public int upsertGeomsFromInferenceResults(Long id) {
|
||||
public void upsertGeomsFromInferenceResults(Long analUid) {
|
||||
|
||||
String sql =
|
||||
"""
|
||||
INSERT INTO tb_map_sheet_anal_data_inference_geom (
|
||||
uuid,
|
||||
result_uid,
|
||||
stage,
|
||||
cd_prob,
|
||||
compare_yyyy,
|
||||
@@ -145,10 +160,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
geom_center,
|
||||
area,
|
||||
data_uid,
|
||||
created_dttm
|
||||
created_dttm,
|
||||
ref_map_sheet_num
|
||||
)
|
||||
SELECT
|
||||
x.uuid,
|
||||
x.result_uid,
|
||||
x.stage,
|
||||
x.cd_prob,
|
||||
x.compare_yyyy,
|
||||
@@ -162,59 +178,136 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
ST_Centroid(x.geom),
|
||||
x.area,
|
||||
x.data_uid,
|
||||
x.created_dttm
|
||||
x.created_dttm,
|
||||
x.ref_map_sheet_num
|
||||
FROM (
|
||||
SELECT DISTINCT ON (r.uid)
|
||||
r.uid AS uuid,
|
||||
di.stage AS stage,
|
||||
r.cd_prob,
|
||||
r.input1 AS compare_yyyy,
|
||||
r.input2 AS target_yyyy,
|
||||
CASE
|
||||
WHEN r.map_id ~ '^[0-9]+$' THEN r.map_id::bigint
|
||||
ELSE NULL
|
||||
END AS map_sheet_num,
|
||||
r.before_c AS class_before_cd,
|
||||
r.before_p AS class_before_prob,
|
||||
r.after_c AS class_after_cd,
|
||||
r.after_p AS class_after_prob,
|
||||
CASE
|
||||
WHEN r.geometry IS NULL THEN NULL
|
||||
WHEN LEFT(r.geometry, 2) = '01'
|
||||
THEN ST_SetSRID(ST_GeomFromWKB(decode(r.geometry, 'hex')), 5186)
|
||||
ELSE ST_SetSRID(ST_GeomFromText(r.geometry), 5186)
|
||||
END AS geom,
|
||||
r.area,
|
||||
di.id AS data_uid,
|
||||
r.created_date AS created_dttm
|
||||
FROM inference_results_testing r
|
||||
JOIN tb_map_sheet_learn di
|
||||
ON r.batch_id IN (
|
||||
di.m1_model_batch_id,
|
||||
di.m2_model_batch_id,
|
||||
di.m3_model_batch_id
|
||||
)
|
||||
where di.id = :id
|
||||
ORDER BY r.uid, r.created_date DESC NULLS LAST
|
||||
SELECT
|
||||
r.uid AS result_uid,
|
||||
msadi.stage,
|
||||
r.cd_prob,
|
||||
r.input1 AS compare_yyyy,
|
||||
r.input2 AS target_yyyy,
|
||||
CASE
|
||||
WHEN r.map_id ~ '^[0-9]+$' THEN r.map_id::bigint
|
||||
ELSE NULL
|
||||
END AS map_sheet_num,
|
||||
r.before_c AS class_before_cd,
|
||||
r.before_p AS class_before_prob,
|
||||
r.after_c AS class_after_cd,
|
||||
r.after_p AS class_after_prob,
|
||||
CASE
|
||||
WHEN r.geometry IS NULL THEN NULL
|
||||
WHEN LEFT(r.geometry, 2) = '01'
|
||||
THEN ST_SetSRID(ST_GeomFromWKB(decode(r.geometry, 'hex')), 5186)
|
||||
ELSE ST_SetSRID(ST_GeomFromText(r.geometry), 5186)
|
||||
END AS geom,
|
||||
r.area,
|
||||
msadi.data_uid,
|
||||
r.created_date AS created_dttm,
|
||||
msadi.ref_map_sheet_num
|
||||
FROM inference_results_testing r
|
||||
JOIN tb_map_sheet_anal_inference msl
|
||||
ON r.batch_id IN (
|
||||
msl.m1_model_batch_id,
|
||||
msl.m2_model_batch_id,
|
||||
msl.m3_model_batch_id
|
||||
)
|
||||
inner join tb_map_sheet_anal_data_inference msadi
|
||||
on msadi.anal_uid = msl.anal_uid
|
||||
AND r.map_id ~ '^[0-9]+$'
|
||||
AND r.map_id::bigint = msadi.map_sheet_num
|
||||
where msl.anal_uid = :analUid
|
||||
ORDER BY r.uid, r.created_date DESC NULLS LAST
|
||||
) x
|
||||
ON CONFLICT (uuid)
|
||||
ON CONFLICT (result_uid)
|
||||
DO UPDATE SET
|
||||
stage = EXCLUDED.stage,
|
||||
cd_prob = EXCLUDED.cd_prob,
|
||||
compare_yyyy = EXCLUDED.compare_yyyy,
|
||||
target_yyyy = EXCLUDED.target_yyyy,
|
||||
map_sheet_num = EXCLUDED.map_sheet_num,
|
||||
class_before_cd = EXCLUDED.class_before_cd,
|
||||
class_before_prob = EXCLUDED.class_before_prob,
|
||||
class_after_cd = EXCLUDED.class_after_cd,
|
||||
class_after_prob = EXCLUDED.class_after_prob,
|
||||
geom = EXCLUDED.geom,
|
||||
area = EXCLUDED.area,
|
||||
data_uid = EXCLUDED.data_uid,
|
||||
updated_dttm = now()
|
||||
updated_dttm = now(),
|
||||
ref_map_sheet_num = EXCLUDED.ref_map_sheet_num
|
||||
""";
|
||||
|
||||
return em.createNativeQuery(sql).setParameter("id", id).executeUpdate();
|
||||
em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void upsertSttcFromInferenceResults(Long analUid) {
|
||||
|
||||
String sql =
|
||||
"""
|
||||
INSERT INTO tb_map_sheet_anal_sttc
|
||||
(
|
||||
compare_yyyy
|
||||
,target_yyyy
|
||||
,map_sheet_num
|
||||
,class_before_cnt
|
||||
,class_after_cnt
|
||||
,created_dttm
|
||||
,created_uid
|
||||
,updated_dttm
|
||||
,updated_uid
|
||||
,ref_map_sheet_num
|
||||
,data_uid
|
||||
,class_before_cd
|
||||
,class_after_cd
|
||||
,class_after_prob_avg
|
||||
,anal_uid
|
||||
)
|
||||
SELECT
|
||||
msadig.compare_yyyy
|
||||
,msadig.target_yyyy
|
||||
,msadig.map_sheet_num
|
||||
,0 AS class_before_cnt
|
||||
,COUNT(*) AS class_after_cnt
|
||||
,now() AS created_dttm
|
||||
,1 AS created_uid
|
||||
,now() AS updated_dttm
|
||||
,1 AS updated_uid
|
||||
,msadig.ref_map_sheet_num
|
||||
,msadig.data_uid
|
||||
,msadig.class_before_cd
|
||||
,msadig.class_after_cd
|
||||
,AVG(msadig.class_after_prob) AS class_after_prob_avg
|
||||
,msai.anal_uid
|
||||
FROM tb_map_sheet_anal_inference msai
|
||||
INNER JOIN tb_map_sheet_anal_data_inference msadi
|
||||
ON msai.anal_uid = msadi.anal_uid
|
||||
INNER JOIN tb_map_sheet_anal_data_inference_geom msadig
|
||||
ON msadi.data_uid = msadig.data_uid
|
||||
WHERE msai.anal_uid = :analUid
|
||||
GROUP BY
|
||||
msadig.compare_yyyy
|
||||
,msadig.target_yyyy
|
||||
,msadig.map_sheet_num
|
||||
,msadig.ref_map_sheet_num
|
||||
,msadig.class_before_cd
|
||||
,msadig.class_after_cd
|
||||
,msadig.data_uid
|
||||
,msai.anal_uid
|
||||
ON CONFLICT (
|
||||
anal_uid,
|
||||
compare_yyyy,
|
||||
target_yyyy,
|
||||
map_sheet_num,
|
||||
ref_map_sheet_num,
|
||||
data_uid,
|
||||
class_before_cd,
|
||||
class_after_cd
|
||||
)
|
||||
DO UPDATE SET
|
||||
class_before_cnt = EXCLUDED.class_before_cnt
|
||||
,class_after_cnt = EXCLUDED.class_after_cnt
|
||||
,class_after_prob_avg = EXCLUDED.class_after_prob_avg
|
||||
,updated_dttm = now()
|
||||
,updated_uid = EXCLUDED.updated_uid
|
||||
""";
|
||||
|
||||
em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
|
||||
}
|
||||
|
||||
// ===============================
|
||||
@@ -228,12 +321,23 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
* @return data_uid 목록
|
||||
*/
|
||||
@Override
|
||||
public List<Long> findPendingDataUids(int limit) {
|
||||
public List<Long> findPendingDataUids(int limit, Long learnId) {
|
||||
|
||||
return queryFactory
|
||||
.select(inferenceEntity.id)
|
||||
.select(inferenceDataEntity.id)
|
||||
.from(inferenceEntity)
|
||||
.where(inferenceEntity.fileCreatedYn.isFalse().or(inferenceEntity.fileCreatedYn.isNull()))
|
||||
.orderBy(inferenceEntity.id.asc())
|
||||
.innerJoin(inferenceDataEntity)
|
||||
.on(inferenceEntity.id.eq(inferenceDataEntity.analUid))
|
||||
.where(
|
||||
inferenceEntity
|
||||
.learnId
|
||||
.eq(learnId)
|
||||
.and(
|
||||
inferenceDataEntity
|
||||
.fileCreatedYn
|
||||
.isFalse()
|
||||
.or(inferenceDataEntity.fileCreatedYn.isNull())))
|
||||
.orderBy(inferenceDataEntity.id.asc())
|
||||
.limit(limit)
|
||||
.fetch();
|
||||
}
|
||||
@@ -255,11 +359,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
|
||||
return (int)
|
||||
queryFactory
|
||||
.update(inferenceEntity)
|
||||
.set(inferenceEntity.fileCreatedYn, false)
|
||||
.set(inferenceEntity.fileCreatedDttm, (ZonedDateTime) null)
|
||||
.set(inferenceEntity.updatedDttm, now)
|
||||
.where(inferenceEntity.id.eq(dataUid))
|
||||
.update(inferenceDataEntity)
|
||||
.set(inferenceDataEntity.fileCreatedYn, false)
|
||||
.set(inferenceDataEntity.fileCreatedDttm, (ZonedDateTime) null)
|
||||
.set(inferenceDataEntity.updatedDttm, now)
|
||||
.where(inferenceDataEntity.id.eq(dataUid))
|
||||
.execute();
|
||||
}
|
||||
|
||||
@@ -274,11 +378,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
|
||||
return (int)
|
||||
queryFactory
|
||||
.update(inferenceEntity)
|
||||
.set(inferenceEntity.fileCreatedYn, true)
|
||||
.set(inferenceEntity.fileCreatedDttm, now)
|
||||
.set(inferenceEntity.updatedDttm, now)
|
||||
.where(inferenceEntity.id.eq(dataUid))
|
||||
.update(inferenceDataEntity)
|
||||
.set(inferenceDataEntity.fileCreatedYn, true)
|
||||
.set(inferenceDataEntity.fileCreatedDttm, now)
|
||||
.set(inferenceDataEntity.updatedDttm, now)
|
||||
.where(inferenceDataEntity.id.eq(dataUid))
|
||||
.execute();
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto;
|
||||
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
|
||||
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
|
||||
import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto;
|
||||
import jakarta.transaction.Transactional;
|
||||
@@ -34,8 +35,8 @@ import org.springframework.stereotype.Service;
|
||||
public class MapSheetInferenceJobService {
|
||||
|
||||
private final InferenceResultCoreService inferenceResultCoreService;
|
||||
private final InferenceResultShpService inferenceResultShpService;
|
||||
private final ExternalHttpClient externalHttpClient;
|
||||
|
||||
private final ObjectMapper objectMapper;
|
||||
|
||||
@Value("${inference.batch-url}")
|
||||
@@ -47,6 +48,9 @@ public class MapSheetInferenceJobService {
|
||||
@Value("${inference.url}")
|
||||
private String inferenceUrl;
|
||||
|
||||
@Value("${mapsheet.shp.baseurl}")
|
||||
private String baseDir;
|
||||
|
||||
/** 추론 진행 배치 1분 */
|
||||
@Scheduled(fixedDelay = 60_000)
|
||||
@Transactional
|
||||
@@ -204,6 +208,13 @@ public class MapSheetInferenceJobService {
|
||||
save.setInferEndDttm(now);
|
||||
save.setType("M3"); // 마지막 모델 기준
|
||||
inferenceResultCoreService.update(save);
|
||||
|
||||
// 추론 종료일때 geom 데이터 저장
|
||||
inferenceResultCoreService.upsertGeomData(sheet.getId());
|
||||
|
||||
// TODO jar로 생성하는걸로 변경
|
||||
// 추론 종료일때 shp 파일 생성
|
||||
// inferenceResultShpService.createShpFile(sheet.getId());
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
Reference in New Issue
Block a user