spotless 적용
This commit is contained in:
@@ -19,15 +19,19 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
private final EntityManager em;
|
||||
|
||||
private final QMapSheetAnalInferenceEntity inferenceEntity =
|
||||
QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
|
||||
QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
|
||||
|
||||
/** tb_map_sheet_anal_data_inference */
|
||||
/**
|
||||
* tb_map_sheet_anal_data_inference
|
||||
*/
|
||||
private final QMapSheetAnalDataInferenceEntity inferenceDataEntity =
|
||||
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
|
||||
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
|
||||
|
||||
/** tb_map_sheet_anal_data_inference_geom */
|
||||
/**
|
||||
* tb_map_sheet_anal_data_inference_geom
|
||||
*/
|
||||
private final QMapSheetAnalDataInferenceGeomEntity inferenceGeomEntity =
|
||||
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
|
||||
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
|
||||
|
||||
// ===============================
|
||||
// Upsert (Native only)
|
||||
@@ -36,7 +40,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
@Override
|
||||
public Long upsertGroupsFromMapSheetAnal(Long id) {
|
||||
String sql =
|
||||
"""
|
||||
"""
|
||||
INSERT INTO tb_map_sheet_anal_inference (
|
||||
stage,
|
||||
compare_yyyy,
|
||||
@@ -77,8 +81,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
}
|
||||
|
||||
/**
|
||||
* inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를
|
||||
* 생성/갱신한다.
|
||||
* inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를 생성/갱신한다.
|
||||
*
|
||||
* <p>- 최초 생성 시 file_created_yn = false - detecting_cnt는 inference_results 건수 기준
|
||||
*
|
||||
@@ -88,7 +91,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
public void upsertGroupsFromInferenceResults(Long analId) {
|
||||
|
||||
String sql =
|
||||
"""
|
||||
"""
|
||||
INSERT INTO tb_map_sheet_anal_data_inference (
|
||||
anal_uid,
|
||||
compare_yyyy,
|
||||
@@ -144,7 +147,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
public void upsertGeomsFromInferenceResults(Long analUid) {
|
||||
|
||||
String sql =
|
||||
"""
|
||||
"""
|
||||
INSERT INTO tb_map_sheet_anal_data_inference_geom (
|
||||
result_uid,
|
||||
stage,
|
||||
@@ -184,9 +187,9 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
SELECT
|
||||
r.uid AS result_uid,
|
||||
msadi.stage,
|
||||
r.cd_prob,
|
||||
r.input1 AS compare_yyyy,
|
||||
r.input2 AS target_yyyy,
|
||||
r.after_p as cd_prob,
|
||||
msl.compare_yyyy,
|
||||
msl.target_yyyy,
|
||||
CASE
|
||||
WHEN r.map_id ~ '^[0-9]+$' THEN r.map_id::bigint
|
||||
ELSE NULL
|
||||
@@ -239,7 +242,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
public void upsertSttcFromInferenceResults(Long analUid) {
|
||||
|
||||
String sql =
|
||||
"""
|
||||
"""
|
||||
INSERT INTO tb_map_sheet_anal_sttc
|
||||
(
|
||||
compare_yyyy
|
||||
@@ -324,22 +327,22 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
public List<Long> findPendingDataUids(int limit, Long learnId) {
|
||||
|
||||
return queryFactory
|
||||
.select(inferenceDataEntity.id)
|
||||
.from(inferenceEntity)
|
||||
.innerJoin(inferenceDataEntity)
|
||||
.on(inferenceEntity.id.eq(inferenceDataEntity.analUid))
|
||||
.where(
|
||||
inferenceEntity
|
||||
.learnId
|
||||
.eq(learnId)
|
||||
.and(
|
||||
inferenceDataEntity
|
||||
.fileCreatedYn
|
||||
.isFalse()
|
||||
.or(inferenceDataEntity.fileCreatedYn.isNull())))
|
||||
.orderBy(inferenceDataEntity.id.asc())
|
||||
.limit(limit)
|
||||
.fetch();
|
||||
.select(inferenceDataEntity.id)
|
||||
.from(inferenceEntity)
|
||||
.innerJoin(inferenceDataEntity)
|
||||
.on(inferenceEntity.id.eq(inferenceDataEntity.analUid))
|
||||
.where(
|
||||
inferenceEntity
|
||||
.learnId
|
||||
.eq(learnId)
|
||||
.and(
|
||||
inferenceDataEntity
|
||||
.fileCreatedYn
|
||||
.isFalse()
|
||||
.or(inferenceDataEntity.fileCreatedYn.isNull())))
|
||||
.orderBy(inferenceDataEntity.id.asc())
|
||||
.limit(limit)
|
||||
.fetch();
|
||||
}
|
||||
|
||||
// ===============================
|
||||
@@ -358,13 +361,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
ZonedDateTime now = ZonedDateTime.now();
|
||||
|
||||
return (int)
|
||||
queryFactory
|
||||
.update(inferenceDataEntity)
|
||||
.set(inferenceDataEntity.fileCreatedYn, false)
|
||||
.set(inferenceDataEntity.fileCreatedDttm, (ZonedDateTime) null)
|
||||
.set(inferenceDataEntity.updatedDttm, now)
|
||||
.where(inferenceDataEntity.id.eq(dataUid))
|
||||
.execute();
|
||||
queryFactory
|
||||
.update(inferenceDataEntity)
|
||||
.set(inferenceDataEntity.fileCreatedYn, false)
|
||||
.set(inferenceDataEntity.fileCreatedDttm, (ZonedDateTime) null)
|
||||
.set(inferenceDataEntity.updatedDttm, now)
|
||||
.where(inferenceDataEntity.id.eq(dataUid))
|
||||
.execute();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -377,13 +380,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
ZonedDateTime now = ZonedDateTime.now();
|
||||
|
||||
return (int)
|
||||
queryFactory
|
||||
.update(inferenceDataEntity)
|
||||
.set(inferenceDataEntity.fileCreatedYn, true)
|
||||
.set(inferenceDataEntity.fileCreatedDttm, now)
|
||||
.set(inferenceDataEntity.updatedDttm, now)
|
||||
.where(inferenceDataEntity.id.eq(dataUid))
|
||||
.execute();
|
||||
queryFactory
|
||||
.update(inferenceDataEntity)
|
||||
.set(inferenceDataEntity.fileCreatedYn, true)
|
||||
.set(inferenceDataEntity.fileCreatedDttm, now)
|
||||
.set(inferenceDataEntity.updatedDttm, now)
|
||||
.where(inferenceDataEntity.id.eq(dataUid))
|
||||
.execute();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -396,13 +399,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
ZonedDateTime now = ZonedDateTime.now();
|
||||
|
||||
return (int)
|
||||
queryFactory
|
||||
.update(inferenceGeomEntity)
|
||||
.set(inferenceGeomEntity.fileCreatedYn, false)
|
||||
.set(inferenceGeomEntity.fileCreatedDttm, (ZonedDateTime) null)
|
||||
.set(inferenceGeomEntity.updatedDttm, now)
|
||||
.where(inferenceGeomEntity.dataUid.eq(dataUid))
|
||||
.execute();
|
||||
queryFactory
|
||||
.update(inferenceGeomEntity)
|
||||
.set(inferenceGeomEntity.fileCreatedYn, false)
|
||||
.set(inferenceGeomEntity.fileCreatedDttm, (ZonedDateTime) null)
|
||||
.set(inferenceGeomEntity.updatedDttm, now)
|
||||
.where(inferenceGeomEntity.dataUid.eq(dataUid))
|
||||
.execute();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -420,13 +423,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
ZonedDateTime now = ZonedDateTime.now();
|
||||
|
||||
return (int)
|
||||
queryFactory
|
||||
.update(inferenceGeomEntity)
|
||||
.set(inferenceGeomEntity.fileCreatedYn, true)
|
||||
.set(inferenceGeomEntity.fileCreatedDttm, now)
|
||||
.set(inferenceGeomEntity.updatedDttm, now)
|
||||
.where(inferenceGeomEntity.geoUid.in(geoUids))
|
||||
.execute();
|
||||
queryFactory
|
||||
.update(inferenceGeomEntity)
|
||||
.set(inferenceGeomEntity.fileCreatedYn, true)
|
||||
.set(inferenceGeomEntity.fileCreatedDttm, now)
|
||||
.set(inferenceGeomEntity.updatedDttm, now)
|
||||
.where(inferenceGeomEntity.geoUid.in(geoUids))
|
||||
.execute();
|
||||
}
|
||||
|
||||
// ===============================
|
||||
@@ -440,18 +443,18 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
*/
|
||||
@Override
|
||||
public List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(
|
||||
Long dataUid, int limit) {
|
||||
Long dataUid, int limit) {
|
||||
return queryFactory
|
||||
.selectFrom(inferenceGeomEntity)
|
||||
.where(
|
||||
inferenceGeomEntity.dataUid.eq(dataUid),
|
||||
inferenceGeomEntity.geom.isNotNull(),
|
||||
inferenceGeomEntity
|
||||
.fileCreatedYn
|
||||
.isFalse()
|
||||
.or(inferenceGeomEntity.fileCreatedYn.isNull()))
|
||||
.orderBy(inferenceGeomEntity.geoUid.asc())
|
||||
.limit(limit)
|
||||
.fetch();
|
||||
.selectFrom(inferenceGeomEntity)
|
||||
.where(
|
||||
inferenceGeomEntity.dataUid.eq(dataUid),
|
||||
inferenceGeomEntity.geom.isNotNull(),
|
||||
inferenceGeomEntity
|
||||
.fileCreatedYn
|
||||
.isFalse()
|
||||
.or(inferenceGeomEntity.fileCreatedYn.isNull()))
|
||||
.orderBy(inferenceGeomEntity.geoUid.asc())
|
||||
.limit(limit)
|
||||
.fetch();
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user