학습관리 수정

This commit is contained in:
2026-01-05 10:17:01 +09:00
parent 0142da4f72
commit 4cea4bb2ed

View File

@@ -17,15 +17,20 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
private final JPAQueryFactory queryFactory;
@PersistenceContext private final EntityManager em;
@PersistenceContext
private final EntityManager em;
/** tb_map_sheet_anal_data_inference */
/**
* tb_map_sheet_anal_data_inference
*/
private final QMapSheetAnalDataInferenceEntity inferenceEntity =
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
/** tb_map_sheet_anal_data_inference_geom */
/**
* tb_map_sheet_anal_data_inference_geom
*/
private final QMapSheetAnalDataInferenceGeomEntity inferenceGeomEntity =
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
// ===============================
// Upsert (Native only)
@@ -34,33 +39,28 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
@Override
public int upsertGroupsFromMapSheetAnal() {
String sql =
"""
INSERT INTO tb_map_sheet_anal_inference (
compare_yyyy,
target_yyyy,
anal_map_sheet,
stage,
anal_title
)
SELECT
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.map_id AS anal_map_sheet,
r.stage,
CONCAT(r.stage ,'_', r.input1 ,'_', r.input2 ,'_', r.map_id) as anal_title
FROM inference_results r
GROUP BY r.stage, r.input1, r.input2, r.map_id
ON CONFLICT (compare_yyyy, target_yyyy, anal_map_sheet, stage)
DO UPDATE SET
updated_dttm = now()
"""
INSERT INTO tb_map_sheet_anal_inference (
compare_yyyy,
target_yyyy,
stage,
anal_title
)
SELECT
r.input1 AS compare_yyyy,
r.input2 AS target_yyyy,
r.stage,
CONCAT(r.stage ,'_', r.input1 ,'_', r.input2) AS anal_title
FROM inference_results r
GROUP BY r.stage, r.input1, r.input2;
""";
return em.createNativeQuery(sql).executeUpdate();
}
/**
* inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를
* 생성/갱신한다.
* inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를 생성/갱신한다.
*
* <p>- 최초 생성 시 file_created_yn = false - detecting_cnt는 inference_results 건수 기준
*
@@ -70,7 +70,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
public int upsertGroupsFromInferenceResults() {
String sql =
"""
"""
INSERT INTO tb_map_sheet_anal_data_inference (
stage,
compare_yyyy,
@@ -113,7 +113,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
public int upsertGeomsFromInferenceResults() {
String sql =
"""
"""
INSERT INTO tb_map_sheet_anal_data_inference_geom (
uuid, stage, cd_prob, compare_yyyy, target_yyyy, map_sheet_num,
class_before_cd, class_before_prob, class_after_cd, class_after_prob,
@@ -188,12 +188,12 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
@Override
public List<Long> findPendingDataUids(int limit) {
return queryFactory
.select(inferenceEntity.id)
.from(inferenceEntity)
.where(inferenceEntity.fileCreatedYn.isFalse().or(inferenceEntity.fileCreatedYn.isNull()))
.orderBy(inferenceEntity.id.asc())
.limit(limit)
.fetch();
.select(inferenceEntity.id)
.from(inferenceEntity)
.where(inferenceEntity.fileCreatedYn.isFalse().or(inferenceEntity.fileCreatedYn.isNull()))
.orderBy(inferenceEntity.id.asc())
.limit(limit)
.fetch();
}
// ===============================
@@ -212,13 +212,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(inferenceEntity)
.set(inferenceEntity.fileCreatedYn, false)
.set(inferenceEntity.fileCreatedDttm, (ZonedDateTime) null)
.set(inferenceEntity.updatedDttm, now)
.where(inferenceEntity.id.eq(dataUid))
.execute();
queryFactory
.update(inferenceEntity)
.set(inferenceEntity.fileCreatedYn, false)
.set(inferenceEntity.fileCreatedDttm, (ZonedDateTime) null)
.set(inferenceEntity.updatedDttm, now)
.where(inferenceEntity.id.eq(dataUid))
.execute();
}
/**
@@ -231,13 +231,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(inferenceEntity)
.set(inferenceEntity.fileCreatedYn, true)
.set(inferenceEntity.fileCreatedDttm, now)
.set(inferenceEntity.updatedDttm, now)
.where(inferenceEntity.id.eq(dataUid))
.execute();
queryFactory
.update(inferenceEntity)
.set(inferenceEntity.fileCreatedYn, true)
.set(inferenceEntity.fileCreatedDttm, now)
.set(inferenceEntity.updatedDttm, now)
.where(inferenceEntity.id.eq(dataUid))
.execute();
}
/**
@@ -250,13 +250,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(inferenceGeomEntity)
.set(inferenceGeomEntity.fileCreatedYn, false)
.set(inferenceGeomEntity.fileCreatedDttm, (ZonedDateTime) null)
.set(inferenceGeomEntity.updatedDttm, now)
.where(inferenceGeomEntity.dataUid.eq(dataUid))
.execute();
queryFactory
.update(inferenceGeomEntity)
.set(inferenceGeomEntity.fileCreatedYn, false)
.set(inferenceGeomEntity.fileCreatedDttm, (ZonedDateTime) null)
.set(inferenceGeomEntity.updatedDttm, now)
.where(inferenceGeomEntity.dataUid.eq(dataUid))
.execute();
}
/**
@@ -274,13 +274,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ZonedDateTime now = ZonedDateTime.now();
return (int)
queryFactory
.update(inferenceGeomEntity)
.set(inferenceGeomEntity.fileCreatedYn, true)
.set(inferenceGeomEntity.fileCreatedDttm, now)
.set(inferenceGeomEntity.updatedDttm, now)
.where(inferenceGeomEntity.geoUid.in(geoUids))
.execute();
queryFactory
.update(inferenceGeomEntity)
.set(inferenceGeomEntity.fileCreatedYn, true)
.set(inferenceGeomEntity.fileCreatedDttm, now)
.set(inferenceGeomEntity.updatedDttm, now)
.where(inferenceGeomEntity.geoUid.in(geoUids))
.execute();
}
// ===============================
@@ -294,18 +294,18 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
*/
@Override
public List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(
Long dataUid, int limit) {
Long dataUid, int limit) {
return queryFactory
.selectFrom(inferenceGeomEntity)
.where(
inferenceGeomEntity.dataUid.eq(dataUid),
inferenceGeomEntity.geom.isNotNull(),
inferenceGeomEntity
.fileCreatedYn
.isFalse()
.or(inferenceGeomEntity.fileCreatedYn.isNull()))
.orderBy(inferenceGeomEntity.geoUid.asc())
.limit(limit)
.fetch();
.selectFrom(inferenceGeomEntity)
.where(
inferenceGeomEntity.dataUid.eq(dataUid),
inferenceGeomEntity.geom.isNotNull(),
inferenceGeomEntity
.fileCreatedYn
.isFalse()
.or(inferenceGeomEntity.fileCreatedYn.isNull()))
.orderBy(inferenceGeomEntity.geoUid.asc())
.limit(limit)
.fetch();
}
}