학습관리 수정

This commit is contained in:
2026-01-05 10:17:01 +09:00
parent 0142da4f72
commit 4cea4bb2ed

View File

@@ -17,15 +17,20 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
private final JPAQueryFactory queryFactory; private final JPAQueryFactory queryFactory;
@PersistenceContext private final EntityManager em; @PersistenceContext
private final EntityManager em;
/** tb_map_sheet_anal_data_inference */ /**
* tb_map_sheet_anal_data_inference
*/
private final QMapSheetAnalDataInferenceEntity inferenceEntity = private final QMapSheetAnalDataInferenceEntity inferenceEntity =
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity; QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
/** tb_map_sheet_anal_data_inference_geom */ /**
* tb_map_sheet_anal_data_inference_geom
*/
private final QMapSheetAnalDataInferenceGeomEntity inferenceGeomEntity = private final QMapSheetAnalDataInferenceGeomEntity inferenceGeomEntity =
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity; QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
// =============================== // ===============================
// Upsert (Native only) // Upsert (Native only)
@@ -34,33 +39,28 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
@Override @Override
public int upsertGroupsFromMapSheetAnal() { public int upsertGroupsFromMapSheetAnal() {
String sql = String sql =
""" """
INSERT INTO tb_map_sheet_anal_inference (
compare_yyyy, INSERT INTO tb_map_sheet_anal_inference (
target_yyyy, compare_yyyy,
anal_map_sheet, target_yyyy,
stage, stage,
anal_title anal_title
) )
SELECT SELECT
r.input1 AS compare_yyyy, r.input1 AS compare_yyyy,
r.input2 AS target_yyyy, r.input2 AS target_yyyy,
r.map_id AS anal_map_sheet, r.stage,
r.stage, CONCAT(r.stage ,'_', r.input1 ,'_', r.input2) AS anal_title
CONCAT(r.stage ,'_', r.input1 ,'_', r.input2 ,'_', r.map_id) as anal_title FROM inference_results r
FROM inference_results r GROUP BY r.stage, r.input1, r.input2;
GROUP BY r.stage, r.input1, r.input2, r.map_id
ON CONFLICT (compare_yyyy, target_yyyy, anal_map_sheet, stage)
DO UPDATE SET
updated_dttm = now()
"""; """;
return em.createNativeQuery(sql).executeUpdate(); return em.createNativeQuery(sql).executeUpdate();
} }
/** /**
* inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를 * inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를 생성/갱신한다.
* 생성/갱신한다.
* *
* <p>- 최초 생성 시 file_created_yn = false - detecting_cnt는 inference_results 건수 기준 * <p>- 최초 생성 시 file_created_yn = false - detecting_cnt는 inference_results 건수 기준
* *
@@ -70,7 +70,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
public int upsertGroupsFromInferenceResults() { public int upsertGroupsFromInferenceResults() {
String sql = String sql =
""" """
INSERT INTO tb_map_sheet_anal_data_inference ( INSERT INTO tb_map_sheet_anal_data_inference (
stage, stage,
compare_yyyy, compare_yyyy,
@@ -113,7 +113,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
public int upsertGeomsFromInferenceResults() { public int upsertGeomsFromInferenceResults() {
String sql = String sql =
""" """
INSERT INTO tb_map_sheet_anal_data_inference_geom ( INSERT INTO tb_map_sheet_anal_data_inference_geom (
uuid, stage, cd_prob, compare_yyyy, target_yyyy, map_sheet_num, uuid, stage, cd_prob, compare_yyyy, target_yyyy, map_sheet_num,
class_before_cd, class_before_prob, class_after_cd, class_after_prob, class_before_cd, class_before_prob, class_after_cd, class_after_prob,
@@ -188,12 +188,12 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
@Override @Override
public List<Long> findPendingDataUids(int limit) { public List<Long> findPendingDataUids(int limit) {
return queryFactory return queryFactory
.select(inferenceEntity.id) .select(inferenceEntity.id)
.from(inferenceEntity) .from(inferenceEntity)
.where(inferenceEntity.fileCreatedYn.isFalse().or(inferenceEntity.fileCreatedYn.isNull())) .where(inferenceEntity.fileCreatedYn.isFalse().or(inferenceEntity.fileCreatedYn.isNull()))
.orderBy(inferenceEntity.id.asc()) .orderBy(inferenceEntity.id.asc())
.limit(limit) .limit(limit)
.fetch(); .fetch();
} }
// =============================== // ===============================
@@ -212,13 +212,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ZonedDateTime now = ZonedDateTime.now(); ZonedDateTime now = ZonedDateTime.now();
return (int) return (int)
queryFactory queryFactory
.update(inferenceEntity) .update(inferenceEntity)
.set(inferenceEntity.fileCreatedYn, false) .set(inferenceEntity.fileCreatedYn, false)
.set(inferenceEntity.fileCreatedDttm, (ZonedDateTime) null) .set(inferenceEntity.fileCreatedDttm, (ZonedDateTime) null)
.set(inferenceEntity.updatedDttm, now) .set(inferenceEntity.updatedDttm, now)
.where(inferenceEntity.id.eq(dataUid)) .where(inferenceEntity.id.eq(dataUid))
.execute(); .execute();
} }
/** /**
@@ -231,13 +231,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ZonedDateTime now = ZonedDateTime.now(); ZonedDateTime now = ZonedDateTime.now();
return (int) return (int)
queryFactory queryFactory
.update(inferenceEntity) .update(inferenceEntity)
.set(inferenceEntity.fileCreatedYn, true) .set(inferenceEntity.fileCreatedYn, true)
.set(inferenceEntity.fileCreatedDttm, now) .set(inferenceEntity.fileCreatedDttm, now)
.set(inferenceEntity.updatedDttm, now) .set(inferenceEntity.updatedDttm, now)
.where(inferenceEntity.id.eq(dataUid)) .where(inferenceEntity.id.eq(dataUid))
.execute(); .execute();
} }
/** /**
@@ -250,13 +250,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ZonedDateTime now = ZonedDateTime.now(); ZonedDateTime now = ZonedDateTime.now();
return (int) return (int)
queryFactory queryFactory
.update(inferenceGeomEntity) .update(inferenceGeomEntity)
.set(inferenceGeomEntity.fileCreatedYn, false) .set(inferenceGeomEntity.fileCreatedYn, false)
.set(inferenceGeomEntity.fileCreatedDttm, (ZonedDateTime) null) .set(inferenceGeomEntity.fileCreatedDttm, (ZonedDateTime) null)
.set(inferenceGeomEntity.updatedDttm, now) .set(inferenceGeomEntity.updatedDttm, now)
.where(inferenceGeomEntity.dataUid.eq(dataUid)) .where(inferenceGeomEntity.dataUid.eq(dataUid))
.execute(); .execute();
} }
/** /**
@@ -274,13 +274,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ZonedDateTime now = ZonedDateTime.now(); ZonedDateTime now = ZonedDateTime.now();
return (int) return (int)
queryFactory queryFactory
.update(inferenceGeomEntity) .update(inferenceGeomEntity)
.set(inferenceGeomEntity.fileCreatedYn, true) .set(inferenceGeomEntity.fileCreatedYn, true)
.set(inferenceGeomEntity.fileCreatedDttm, now) .set(inferenceGeomEntity.fileCreatedDttm, now)
.set(inferenceGeomEntity.updatedDttm, now) .set(inferenceGeomEntity.updatedDttm, now)
.where(inferenceGeomEntity.geoUid.in(geoUids)) .where(inferenceGeomEntity.geoUid.in(geoUids))
.execute(); .execute();
} }
// =============================== // ===============================
@@ -294,18 +294,18 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
*/ */
@Override @Override
public List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid( public List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(
Long dataUid, int limit) { Long dataUid, int limit) {
return queryFactory return queryFactory
.selectFrom(inferenceGeomEntity) .selectFrom(inferenceGeomEntity)
.where( .where(
inferenceGeomEntity.dataUid.eq(dataUid), inferenceGeomEntity.dataUid.eq(dataUid),
inferenceGeomEntity.geom.isNotNull(), inferenceGeomEntity.geom.isNotNull(),
inferenceGeomEntity inferenceGeomEntity
.fileCreatedYn .fileCreatedYn
.isFalse() .isFalse()
.or(inferenceGeomEntity.fileCreatedYn.isNull())) .or(inferenceGeomEntity.fileCreatedYn.isNull()))
.orderBy(inferenceGeomEntity.geoUid.asc()) .orderBy(inferenceGeomEntity.geoUid.asc())
.limit(limit) .limit(limit)
.fetch(); .fetch();
} }
} }