spotless 적용

This commit is contained in:
2026-01-05 10:17:24 +09:00
parent 4cea4bb2ed
commit 829cb510e3
3 changed files with 81 additions and 84 deletions

View File

@@ -32,29 +32,26 @@ public class LabelWorkerApiController {
@Operation(summary = "라벨링작업 관리 > 목록 조회", description = "라벨링작업 관리 > 목록 조회") @Operation(summary = "라벨링작업 관리 > 목록 조회", description = "라벨링작업 관리 > 목록 조회")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
responseCode = "200", responseCode = "200",
description = "조회 성공", description = "조회 성공",
content = content =
@Content( @Content(
mediaType = "application/json", mediaType = "application/json",
schema = @Schema(implementation = CommonCodeDto.Basic.class))), schema = @Schema(implementation = CommonCodeDto.Basic.class))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
}) })
@GetMapping("/label-work-mng-list") @GetMapping("/label-work-mng-list")
public ApiResponseDto<Page<LabelWorkMng>> labelWorkMngList( public ApiResponseDto<Page<LabelWorkMng>> labelWorkMngList(
@Schema(description = "변화탐지년도", example = "2024") @Schema(description = "변화탐지년도", example = "2024") Integer detectYyyy,
Integer detectYyyy, @Schema(description = "시작일", example = "20260101") String strtDttm,
@Schema(description = "시작", example = "20260101") @Schema(description = "종료", example = "20261201") String endDttm,
String strtDttm, @Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0")
@Schema(description = "종료일", example = "20261201") int page,
String endDttm, @Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20")
@Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0") int size) {
int page,
@Parameter(description = "페이지 크기", example = "20") @RequestParam(defaultValue = "20")
int size) {
LabelWorkDto.LabelWorkMngSearchReq searchReq = new LabelWorkMngSearchReq(); LabelWorkDto.LabelWorkMngSearchReq searchReq = new LabelWorkMngSearchReq();
searchReq.setDetectYyyy(detectYyyy); searchReq.setDetectYyyy(detectYyyy);
searchReq.setStrtDttm(strtDttm); searchReq.setStrtDttm(strtDttm);

View File

@@ -25,8 +25,13 @@ import org.hibernate.type.SqlTypes;
public class MapSheetAnalInferenceEntity { public class MapSheetAnalInferenceEntity {
@Id @Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tb_map_sheet_anal_inference_id_gen") @GeneratedValue(
@SequenceGenerator(name = "tb_map_sheet_anal_inference_id_gen", sequenceName = "tb_map_sheet_anal_inference_uid", allocationSize = 1) strategy = GenerationType.SEQUENCE,
generator = "tb_map_sheet_anal_inference_id_gen")
@SequenceGenerator(
name = "tb_map_sheet_anal_inference_id_gen",
sequenceName = "tb_map_sheet_anal_inference_uid",
allocationSize = 1)
@Column(name = "anal_uid", nullable = false) @Column(name = "anal_uid", nullable = false)
private Long id; private Long id;
@@ -144,5 +149,4 @@ public class MapSheetAnalInferenceEntity {
@Column(name = "stage") @Column(name = "stage")
private Integer stage; private Integer stage;
} }

View File

@@ -17,20 +17,15 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
private final JPAQueryFactory queryFactory; private final JPAQueryFactory queryFactory;
@PersistenceContext @PersistenceContext private final EntityManager em;
private final EntityManager em;
/** /** tb_map_sheet_anal_data_inference */
* tb_map_sheet_anal_data_inference
*/
private final QMapSheetAnalDataInferenceEntity inferenceEntity = private final QMapSheetAnalDataInferenceEntity inferenceEntity =
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity; QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
/** /** tb_map_sheet_anal_data_inference_geom */
* tb_map_sheet_anal_data_inference_geom
*/
private final QMapSheetAnalDataInferenceGeomEntity inferenceGeomEntity = private final QMapSheetAnalDataInferenceGeomEntity inferenceGeomEntity =
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity; QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
// =============================== // ===============================
// Upsert (Native only) // Upsert (Native only)
@@ -39,7 +34,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
@Override @Override
public int upsertGroupsFromMapSheetAnal() { public int upsertGroupsFromMapSheetAnal() {
String sql = String sql =
""" """
INSERT INTO tb_map_sheet_anal_inference ( INSERT INTO tb_map_sheet_anal_inference (
compare_yyyy, compare_yyyy,
@@ -60,7 +55,8 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
} }
/** /**
* inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를 생성/갱신한다. * inference_results 테이블을 기준으로 분석 데이터 단위(stage, compare_yyyy, target_yyyy, map_sheet_num)를
* 생성/갱신한다.
* *
* <p>- 최초 생성 시 file_created_yn = false - detecting_cnt는 inference_results 건수 기준 * <p>- 최초 생성 시 file_created_yn = false - detecting_cnt는 inference_results 건수 기준
* *
@@ -70,7 +66,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
public int upsertGroupsFromInferenceResults() { public int upsertGroupsFromInferenceResults() {
String sql = String sql =
""" """
INSERT INTO tb_map_sheet_anal_data_inference ( INSERT INTO tb_map_sheet_anal_data_inference (
stage, stage,
compare_yyyy, compare_yyyy,
@@ -113,7 +109,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
public int upsertGeomsFromInferenceResults() { public int upsertGeomsFromInferenceResults() {
String sql = String sql =
""" """
INSERT INTO tb_map_sheet_anal_data_inference_geom ( INSERT INTO tb_map_sheet_anal_data_inference_geom (
uuid, stage, cd_prob, compare_yyyy, target_yyyy, map_sheet_num, uuid, stage, cd_prob, compare_yyyy, target_yyyy, map_sheet_num,
class_before_cd, class_before_prob, class_after_cd, class_after_prob, class_before_cd, class_before_prob, class_after_cd, class_after_prob,
@@ -188,12 +184,12 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
@Override @Override
public List<Long> findPendingDataUids(int limit) { public List<Long> findPendingDataUids(int limit) {
return queryFactory return queryFactory
.select(inferenceEntity.id) .select(inferenceEntity.id)
.from(inferenceEntity) .from(inferenceEntity)
.where(inferenceEntity.fileCreatedYn.isFalse().or(inferenceEntity.fileCreatedYn.isNull())) .where(inferenceEntity.fileCreatedYn.isFalse().or(inferenceEntity.fileCreatedYn.isNull()))
.orderBy(inferenceEntity.id.asc()) .orderBy(inferenceEntity.id.asc())
.limit(limit) .limit(limit)
.fetch(); .fetch();
} }
// =============================== // ===============================
@@ -212,13 +208,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ZonedDateTime now = ZonedDateTime.now(); ZonedDateTime now = ZonedDateTime.now();
return (int) return (int)
queryFactory queryFactory
.update(inferenceEntity) .update(inferenceEntity)
.set(inferenceEntity.fileCreatedYn, false) .set(inferenceEntity.fileCreatedYn, false)
.set(inferenceEntity.fileCreatedDttm, (ZonedDateTime) null) .set(inferenceEntity.fileCreatedDttm, (ZonedDateTime) null)
.set(inferenceEntity.updatedDttm, now) .set(inferenceEntity.updatedDttm, now)
.where(inferenceEntity.id.eq(dataUid)) .where(inferenceEntity.id.eq(dataUid))
.execute(); .execute();
} }
/** /**
@@ -231,13 +227,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ZonedDateTime now = ZonedDateTime.now(); ZonedDateTime now = ZonedDateTime.now();
return (int) return (int)
queryFactory queryFactory
.update(inferenceEntity) .update(inferenceEntity)
.set(inferenceEntity.fileCreatedYn, true) .set(inferenceEntity.fileCreatedYn, true)
.set(inferenceEntity.fileCreatedDttm, now) .set(inferenceEntity.fileCreatedDttm, now)
.set(inferenceEntity.updatedDttm, now) .set(inferenceEntity.updatedDttm, now)
.where(inferenceEntity.id.eq(dataUid)) .where(inferenceEntity.id.eq(dataUid))
.execute(); .execute();
} }
/** /**
@@ -250,13 +246,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ZonedDateTime now = ZonedDateTime.now(); ZonedDateTime now = ZonedDateTime.now();
return (int) return (int)
queryFactory queryFactory
.update(inferenceGeomEntity) .update(inferenceGeomEntity)
.set(inferenceGeomEntity.fileCreatedYn, false) .set(inferenceGeomEntity.fileCreatedYn, false)
.set(inferenceGeomEntity.fileCreatedDttm, (ZonedDateTime) null) .set(inferenceGeomEntity.fileCreatedDttm, (ZonedDateTime) null)
.set(inferenceGeomEntity.updatedDttm, now) .set(inferenceGeomEntity.updatedDttm, now)
.where(inferenceGeomEntity.dataUid.eq(dataUid)) .where(inferenceGeomEntity.dataUid.eq(dataUid))
.execute(); .execute();
} }
/** /**
@@ -274,13 +270,13 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ZonedDateTime now = ZonedDateTime.now(); ZonedDateTime now = ZonedDateTime.now();
return (int) return (int)
queryFactory queryFactory
.update(inferenceGeomEntity) .update(inferenceGeomEntity)
.set(inferenceGeomEntity.fileCreatedYn, true) .set(inferenceGeomEntity.fileCreatedYn, true)
.set(inferenceGeomEntity.fileCreatedDttm, now) .set(inferenceGeomEntity.fileCreatedDttm, now)
.set(inferenceGeomEntity.updatedDttm, now) .set(inferenceGeomEntity.updatedDttm, now)
.where(inferenceGeomEntity.geoUid.in(geoUids)) .where(inferenceGeomEntity.geoUid.in(geoUids))
.execute(); .execute();
} }
// =============================== // ===============================
@@ -294,18 +290,18 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
*/ */
@Override @Override
public List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid( public List<MapSheetAnalDataInferenceGeomEntity> findGeomEntitiesByDataUid(
Long dataUid, int limit) { Long dataUid, int limit) {
return queryFactory return queryFactory
.selectFrom(inferenceGeomEntity) .selectFrom(inferenceGeomEntity)
.where( .where(
inferenceGeomEntity.dataUid.eq(dataUid), inferenceGeomEntity.dataUid.eq(dataUid),
inferenceGeomEntity.geom.isNotNull(), inferenceGeomEntity.geom.isNotNull(),
inferenceGeomEntity inferenceGeomEntity
.fileCreatedYn .fileCreatedYn
.isFalse() .isFalse()
.or(inferenceGeomEntity.fileCreatedYn.isNull())) .or(inferenceGeomEntity.fileCreatedYn.isNull()))
.orderBy(inferenceGeomEntity.geoUid.asc()) .orderBy(inferenceGeomEntity.geoUid.asc())
.limit(limit) .limit(limit)
.fetch(); .fetch();
} }
} }