diff --git a/src/main/java/com/kamco/cd/kamcoback/geojson/service/GeoJsonFileMonitorService.java b/src/main/java/com/kamco/cd/kamcoback/geojson/service/GeoJsonFileMonitorService.java index f493322c..43acf2b3 100644 --- a/src/main/java/com/kamco/cd/kamcoback/geojson/service/GeoJsonFileMonitorService.java +++ b/src/main/java/com/kamco/cd/kamcoback/geojson/service/GeoJsonFileMonitorService.java @@ -39,10 +39,10 @@ public class GeoJsonFileMonitorService { public void initializeDirectories() { try { log.info("GeoJSON 모니터링 시스템 초기화 중..."); - log.info("설정된 경로 - Watch: {}, Processed: {}, Error: {}, Temp: {}", - config.getWatchDirectory(), config.getProcessedDirectory(), + log.info("설정된 경로 - Watch: {}, Processed: {}, Error: {}, Temp: {}", + config.getWatchDirectory(), config.getProcessedDirectory(), config.getErrorDirectory(), config.getTempDirectory()); - + ensureDirectoriesExist(); log.info("GeoJSON 모니터링 시스템 초기화 완료"); } catch (Exception e) { @@ -55,26 +55,26 @@ public class GeoJsonFileMonitorService { * 스케줄러를 통한 파일 모니터링 * 설정된 cron 표현식에 따라 주기적으로 실행 */ - @Scheduled(cron = "#{@geoJsonMonitorConfig.cronExpression}") +// @Scheduled(cron = "#{@geoJsonMonitorConfig.cronExpression}") public void monitorFiles() { log.debug("파일 모니터링 시작"); - + try { // 모니터링 폴더 존재 확인 및 생성 ensureDirectoriesExist(); - + // 압축파일 검색 및 처리 processArchiveFiles(); - + // 미처리된 Geometry 변환 작업 수행 processUnprocessedGeometryData(); - + } catch (RuntimeException e) { log.error("파일 모니터링 중 치명적 오류 발생 - 이번 주기 건너뜀", e); } catch (Exception e) { log.error("파일 모니터링 중 오류 발생", e); } - + log.debug("파일 모니터링 완료"); } @@ -89,28 +89,28 @@ public class GeoJsonFileMonitorService { log.error("Watch 디렉토리 생성 실패: {} - {}", config.getWatchDirectory(), e.getMessage()); hasError = true; } - + try { createDirectoryIfNotExists(config.getProcessedDirectory()); } catch (IOException e) { log.error("Processed 디렉토리 생성 실패: {} - {}", config.getProcessedDirectory(), e.getMessage()); hasError = true; } - + try { createDirectoryIfNotExists(config.getErrorDirectory()); } catch (IOException e) { log.error("Error 디렉토리 생성 실패: {} - {}", config.getErrorDirectory(), e.getMessage()); hasError = true; } - + try { createDirectoryIfNotExists(config.getTempDirectory()); } catch (IOException e) { log.error("Temp 디렉토리 생성 실패: {} - {}", config.getTempDirectory(), e.getMessage()); hasError = true; } - + if (hasError) { log.warn("일부 디렉토리 생성에 실패했습니다. 해당 기능은 제한될 수 있습니다."); log.info("수동으로 다음 디렉토리들을 생성해주세요:"); @@ -130,14 +130,14 @@ public class GeoJsonFileMonitorService { if (directory == null || directory.trim().isEmpty()) { throw new IllegalArgumentException("디렉토리 경로가 비어있습니다."); } - + Path path = Paths.get(directory); - + if (!Files.exists(path)) { try { Files.createDirectories(path); log.info("디렉토리 생성 완료: {}", directory); - + // 디렉토리 권한 설정 (Unix/Linux 환경에서) try { if (!System.getProperty("os.name").toLowerCase().contains("windows")) { @@ -149,7 +149,7 @@ public class GeoJsonFileMonitorService { } catch (Exception permissionException) { log.debug("권한 설정 실패 (무시됨): {}", permissionException.getMessage()); } - + } catch (IOException e) { log.error("디렉토리 생성 실패: {} - {}", directory, e.getMessage()); throw new IOException("디렉토리를 생성할 수 없습니다: " + directory, e); @@ -168,29 +168,29 @@ public class GeoJsonFileMonitorService { */ private void processArchiveFiles() { Path watchDir = Paths.get(config.getWatchDirectory()); - + // 디렉토리 존재 확인 if (!Files.exists(watchDir)) { log.debug("Watch 디렉토리가 존재하지 않습니다: {}", watchDir); return; } - + if (!Files.isDirectory(watchDir)) { log.warn("Watch 경로가 디렉토리가 아닙니다: {}", watchDir); return; } - + if (!Files.isReadable(watchDir)) { log.warn("Watch 디렉토리에 읽기 권한이 없습니다: {}", watchDir); return; } - + try (Stream files = Files.list(watchDir)) { files.filter(Files::isRegularFile) .filter(archiveExtractorService::isSupportedArchive) .filter(archiveExtractorService::isFileSizeValid) .forEach(this::processArchiveFile); - + } catch (IOException e) { log.error("파일 목록 조회 실패: {}", watchDir, e); } @@ -202,42 +202,42 @@ public class GeoJsonFileMonitorService { private void processArchiveFile(Path archiveFile) { String fileName = archiveFile.getFileName().toString(); log.info("압축파일 처리 시작: {}", fileName); - + try { // 1. 압축파일에서 GeoJSON 파일들 추출 Map geoJsonContents = archiveExtractorService.extractGeoJsonFiles(archiveFile); - + if (geoJsonContents.isEmpty()) { log.warn("압축파일에서 GeoJSON 파일을 찾을 수 없습니다: {}", fileName); moveFileToError(archiveFile, "GeoJSON 파일 없음"); return; } - + // 2. 처리 가능한 파일 수인지 확인 if (!geoJsonDataService.isProcessable(geoJsonContents)) { log.warn("처리할 수 없는 파일입니다: {}", fileName); moveFileToError(archiveFile, "처리 불가능한 파일"); return; } - + // 3. GeoJSON 데이터를 데이터베이스에 저장 List savedLearnDataIds = geoJsonDataService.processGeoJsonFiles(geoJsonContents, fileName); - + if (savedLearnDataIds.isEmpty()) { log.warn("저장된 학습 데이터가 없습니다: {}", fileName); moveFileToError(archiveFile, "데이터 저장 실패"); return; } - + // 4. Geometry 데이터로 변환 List geometryIds = geometryConversionService.convertToGeometryData(savedLearnDataIds); - + // 5. 처리 완료된 파일을 처리된 폴더로 이동 moveFileToProcessed(archiveFile); - - log.info("압축파일 처리 완료: {} (학습 데이터: {}개, Geometry: {}개)", + + log.info("압축파일 처리 완료: {} (학습 데이터: {}개, Geometry: {}개)", fileName, savedLearnDataIds.size(), geometryIds.size()); - + } catch (Exception e) { log.error("압축파일 처리 실패: {}", fileName, e); try { @@ -269,7 +269,7 @@ public class GeoJsonFileMonitorService { String fileName = sourceFile.getFileName().toString(); String timestampedFileName = addTimestamp(fileName); Path targetPath = Paths.get(config.getProcessedDirectory(), timestampedFileName); - + Files.move(sourceFile, targetPath, StandardCopyOption.REPLACE_EXISTING); log.info("파일을 처리된 폴더로 이동: {} -> {}", fileName, timestampedFileName); } @@ -281,16 +281,16 @@ public class GeoJsonFileMonitorService { String fileName = sourceFile.getFileName().toString(); String errorFileName = addTimestamp(fileName) + ".error"; Path targetPath = Paths.get(config.getErrorDirectory(), errorFileName); - + Files.move(sourceFile, targetPath, StandardCopyOption.REPLACE_EXISTING); - + // 오류 정보를 별도 파일로 저장 String errorInfoFileName = errorFileName + ".info"; Path errorInfoPath = Paths.get(config.getErrorDirectory(), errorInfoFileName); - String errorInfo = String.format("파일: %s%n오류 시간: %s%n오류 원인: %s%n", + String errorInfo = String.format("파일: %s%n오류 시간: %s%n오류 원인: %s%n", fileName, java.time.Instant.now(), errorReason); Files.write(errorInfoPath, errorInfo.getBytes()); - + log.warn("파일을 오류 폴더로 이동: {} (원인: {})", fileName, errorReason); } @@ -301,7 +301,7 @@ public class GeoJsonFileMonitorService { int lastDotIndex = fileName.lastIndexOf('.'); String name = (lastDotIndex > 0) ? fileName.substring(0, lastDotIndex) : fileName; String extension = (lastDotIndex > 0) ? fileName.substring(lastDotIndex) : ""; - + return String.format("%s_%d%s", name, System.currentTimeMillis(), extension); } @@ -310,17 +310,17 @@ public class GeoJsonFileMonitorService { */ public void processFileManually(String filePath) { Path archiveFile = Paths.get(filePath); - + if (!Files.exists(archiveFile)) { log.error("파일이 존재하지 않습니다: {}", filePath); return; } - + if (!archiveExtractorService.isSupportedArchive(archiveFile)) { log.error("지원하지 않는 압축파일 형식입니다: {}", filePath); return; } - + log.info("수동 파일 처리 시작: {}", filePath); processArchiveFile(archiveFile); } @@ -359,22 +359,22 @@ public class GeoJsonFileMonitorService { */ public Map getSystemStats() { Map stats = new HashMap<>(); - + try { // 데이터베이스 통계 long totalLearnData = learnDataRepository.count(); long totalGeomData = geomRepository.count(); long pendingAnalysis = learnDataRepository.countByAnalState("PENDING"); - + stats.put("database", Map.of( "totalLearnData", totalLearnData, "totalGeomData", totalGeomData, "pendingAnalysis", pendingAnalysis )); - + // 파일 시스템 통계 stats.put("fileSystem", getFileSystemStats()); - + // 모니터링 설정 stats.put("monitoring", Map.of( "isActive", true, @@ -383,39 +383,39 @@ public class GeoJsonFileMonitorService { "processedDirectory", config.getProcessedDirectory(), "errorDirectory", config.getErrorDirectory() )); - + } catch (Exception e) { log.error("통계 정보 조회 실패", e); stats.put("error", e.getMessage()); } - + return stats; } - + /** * 파일 시스템 통계 조회 */ private Map getFileSystemStats() { Map fileStats = new HashMap<>(); - + try { // 각 디렉토리의 파일 수 계산 Path watchDir = Paths.get(config.getWatchDirectory()); Path processedDir = Paths.get(config.getProcessedDirectory()); Path errorDir = Paths.get(config.getErrorDirectory()); - + fileStats.put("watchDirectoryCount", countFilesInDirectory(watchDir)); fileStats.put("processedDirectoryCount", countFilesInDirectory(processedDir)); fileStats.put("errorDirectoryCount", countFilesInDirectory(errorDir)); - + } catch (Exception e) { log.warn("파일 시스템 통계 조회 실패: {}", e.getMessage()); fileStats.put("error", e.getMessage()); } - + return fileStats; } - + /** * 디렉토리 내 파일 개수 계산 */ @@ -423,7 +423,7 @@ public class GeoJsonFileMonitorService { if (!Files.exists(directory) || !Files.isDirectory(directory)) { return 0; } - + try (Stream files = Files.list(directory)) { return files.filter(Files::isRegularFile).count(); } catch (IOException e) { @@ -431,4 +431,4 @@ public class GeoJsonFileMonitorService { return 0; } } -} \ No newline at end of file +} diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultApiController.java b/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultApiController.java index 88493e20..0b208b5f 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultApiController.java @@ -1,10 +1,21 @@ package com.kamco.cd.kamcoback.inference; +import com.kamco.cd.kamcoback.config.api.ApiResponseDto; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; import com.kamco.cd.kamcoback.inference.service.InferenceResultService; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; import java.util.List; import lombok.RequiredArgsConstructor; +import org.springframework.data.domain.Page; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; @Tag(name = "분석결과", description = "추론관리 분석결과") @@ -15,4 +26,34 @@ public class InferenceResultApiController { private final InferenceResultService inferenceResultService; + @Operation( + summary = "추론관리 분석결과 목록 조회", + description = + "분석상태, 제목으로 분석결과를 조회 합니다.") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "검색 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = Page.class))), + @ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @GetMapping("/list") + public ApiResponseDto> getInferenceResultList( + @Parameter(description = "분석상태", example = "0000") + @RequestParam(required = false) + String statCode, + @Parameter(description = "검색", example = "2023_2024년도") @RequestParam(required = false) + String title, + @Parameter(description = "페이지 번호 (0부터 시작)", example = "0") @RequestParam(defaultValue = "0") + int page + ) { + InferenceResultDto.SearchReq searchReq = new InferenceResultDto.SearchReq(statCode, title, page, 20, null); + Page zoos = inferenceResultService.getInferenceResultList(searchReq); + return ApiResponseDto.ok(zoos); + } } diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java new file mode 100644 index 00000000..72c550d2 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java @@ -0,0 +1,80 @@ +package com.kamco.cd.kamcoback.inference.dto; + +import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm; +import io.swagger.v3.oas.annotations.media.Schema; +import java.time.ZonedDateTime; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; + +public class InferenceResultDto { + + @Schema(name = "InferenceResultBasic", description = "분석결과 기본 정보") + @Getter + public static class Basic { + + private Long id; + private String dataName; + private Long mapSheepNum; + private Long detectingCnt; + @JsonFormatDttm + private ZonedDateTime analStrtDttm; + @JsonFormatDttm + private ZonedDateTime analEndDttm; + private Long analSec; + private String analState; + + public Basic( + Long id, + String dataName, + Long mapSheepNum, + Long detectingCnt, + ZonedDateTime analStrtDttm, + ZonedDateTime analEndDttm, + Long analSec, + String analState + ) { + this.id = id; + this.dataName = dataName; + this.mapSheepNum = mapSheepNum; + this.detectingCnt = detectingCnt; + this.analStrtDttm = analStrtDttm; + this.analEndDttm = analEndDttm; + this.analSec = analSec; + this.analState = analState; + } + } + + + @Schema(name = "InferenceResultSearchReq", description = "분석결과 목록 요청 정보") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class SearchReq { + + // 검색 조건 + private String statCode; + private String title; + + // 페이징 파라미터 + private int page = 0; + private int size = 20; + private String sort; + + public Pageable toPageable() { + if (sort != null && !sort.isEmpty()) { + String[] sortParams = sort.split(","); + String property = sortParams[0]; + Sort.Direction direction = + sortParams.length > 1 ? Sort.Direction.fromString(sortParams[1]) : Sort.Direction.ASC; + return PageRequest.of(page, size, Sort.by(direction, property)); + } + return PageRequest.of(page, size); + } + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java index 86a7b7d9..86a92530 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java @@ -1,7 +1,10 @@ package com.kamco.cd.kamcoback.inference.service; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Basic; import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService; import lombok.RequiredArgsConstructor; +import org.springframework.data.domain.Page; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -12,6 +15,12 @@ public class InferenceResultService { private final InferenceResultCoreService inferenceResultCoreService; + public Page getInferenceResultList(InferenceResultDto.SearchReq searchReq) { + return inferenceResultCoreService.getInferenceResultList(searchReq); + } + + + } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java index 8384b388..85f3a5c3 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java @@ -1,7 +1,10 @@ package com.kamco.cd.kamcoback.postgres.core; -import com.kamco.cd.kamcoback.common.service.BaseCoreService; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Basic; +import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity; import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository; +import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; @@ -17,4 +20,25 @@ public class InferenceResultCoreService { private final InferenceResultRepository inferenceResultRepository; + public Page getInferenceResultList(InferenceResultDto.SearchReq searchReq) { + Page list = inferenceResultRepository.getInferenceResultList(searchReq); + List result = + list.getContent().stream() + .map(infList ->toDto(infList)) + .collect(Collectors.toList()); + return new PageImpl<>(result, list.getPageable(), list.getTotalElements()); + } + + private Basic toDto(MapSheetAnalDataEntity entity) { + return new Basic( + entity.getId(), + entity.getDataName(), + entity.getMapSheepNum(), + entity.getDetectingCnt(), + entity.getAnalStrtDttm(), + entity.getAnalEndDttm(), + entity.getAnalSec(), + entity.getAnalState() + ); + } } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataEntity.java b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataEntity.java index 109bbd49..3a0886fe 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataEntity.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/MapSheetAnalDataEntity.java @@ -10,6 +10,7 @@ import jakarta.persistence.Table; import jakarta.validation.constraints.Size; import java.time.Instant; import java.time.LocalTime; +import java.time.ZonedDateTime; import java.util.Map; import lombok.Getter; import lombok.Setter; @@ -51,14 +52,14 @@ public class MapSheetAnalDataEntity { @ColumnDefault("now()") @Column(name = "created_dttm") - private Instant createdDttm; + private ZonedDateTime createdDttm; @Column(name = "created_uid") private Long createdUid; @ColumnDefault("now()") @Column(name = "updated_dttm") - private Instant updatedDttm; + private ZonedDateTime updatedDttm; @Column(name = "updated_uid") private Long updatedUid; @@ -79,13 +80,13 @@ public class MapSheetAnalDataEntity { @ColumnDefault("now()") @Column(name = "data_state_dttm") - private Instant dataStateDttm; + private ZonedDateTime dataStateDttm; @Column(name = "anal_strt_dttm") - private Instant analStrtDttm; + private ZonedDateTime analStrtDttm; @Column(name = "anal_end_dttm") - private LocalTime analEndDttm; + private ZonedDateTime analEndDttm; @Column(name = "anal_sec") private Long analSec; @@ -100,4 +101,7 @@ public class MapSheetAnalDataEntity { @Column(name = "map_sheep_num") private Long mapSheepNum; + @Column(name = "detecting_cnt") + private Long detectingCnt; + } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java index 45f2d0e4..a24bdbb7 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryCustom.java @@ -1,5 +1,9 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference; -public interface InferenceResultRepositoryCustom { +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; +import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity; +import org.springframework.data.domain.Page; +public interface InferenceResultRepositoryCustom { + Page getInferenceResultList(InferenceResultDto.SearchReq searchReq); } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java index cab90c2f..1ad4a5cc 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/InferenceResultRepositoryImpl.java @@ -1,9 +1,16 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference; -import com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnDataEntity; +import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto; +import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataEntity; +import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataEntity; +import com.querydsl.jpa.impl.JPAQuery; import com.querydsl.jpa.impl.JPAQueryFactory; +import java.util.List; import lombok.RequiredArgsConstructor; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Repository; @Repository @@ -11,7 +18,24 @@ import org.springframework.stereotype.Repository; public class InferenceResultRepositoryImpl implements InferenceResultRepositoryCustom { private final JPAQueryFactory queryFactory; - private final QMapSheetLearnDataEntity qSheetLearnData = QMapSheetLearnDataEntity.mapSheetLearnDataEntity; + private final QMapSheetAnalDataEntity mapSheetAnalData = QMapSheetAnalDataEntity.mapSheetAnalDataEntity; + @Override + public Page getInferenceResultList(InferenceResultDto.SearchReq searchReq) { + Pageable pageable = searchReq.toPageable(); + JPAQuery query = + queryFactory.selectFrom(mapSheetAnalData) + ; + + long total = query.fetchCount(); + + List content = + query + .offset(pageable.getOffset()) + .limit(pageable.getPageSize()) + .orderBy(mapSheetAnalData.createdDttm.desc()) + .fetch(); + return new PageImpl<>(content, pageable, total); + } }