학습데이터 업로드, unzip 로직 진행중

This commit is contained in:
2026-02-10 10:43:40 +09:00
parent b4a4486560
commit 89744d2aa1
19 changed files with 621 additions and 43 deletions

View File

@@ -5,6 +5,8 @@ import static java.lang.String.CASE_INSENSITIVE_ORDER;
import io.swagger.v3.oas.annotations.media.Schema;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
@@ -25,13 +27,17 @@ import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FilenameUtils;
import org.geotools.coverage.grid.GridCoverage2D;
import org.geotools.gce.geotiff.GeoTiffReader;
import org.springframework.util.FileSystemUtils;
import org.springframework.web.multipart.MultipartFile;
@Slf4j
public class FIleChecker {
static SimpleDateFormat dttmFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
@@ -492,16 +498,30 @@ public class FIleChecker {
public static boolean multipartChunkSaveTo(
MultipartFile mfile, String targetPath, int chunkIndex) {
File dest = new File(targetPath, String.valueOf(chunkIndex));
boolean fileUpload = true;
try {
mfile.transferTo(dest);
} catch (IOException e) {
return false;
File dir = new File(targetPath);
if (!dir.exists()) {
dir.mkdirs();
}
File dest = new File(dir, String.valueOf(chunkIndex));
log.info("chunkIndex={}, uploadSize={}", chunkIndex, mfile.getSize());
log.info("savedSize={}", dest.length());
// ⭐ 핵심
if (dest.exists()) {
dest.delete();
}
log.info("after delete={}", dest.length());
mfile.transferTo(dest);
return true;
} catch (IOException e) {
log.error("chunk save error", e);
return false;
}
}
public static boolean deleteFolder(String path) {
@@ -680,4 +700,59 @@ public class FIleChecker {
this.lastModified = lastModified;
}
}
public static void unzip(String fileName, String destDirectory) throws IOException {
File destDir = new File(destDirectory);
if (!destDir.exists()) {
destDir.mkdirs(); // 대상 폴더가 없으면 생성
}
String zipFilePath = destDirectory + "/" + fileName;
try (ZipInputStream zis = new ZipInputStream(new FileInputStream(zipFilePath))) {
ZipEntry zipEntry = zis.getNextEntry();
while (zipEntry != null) {
File newFile = newFile(destDir, zipEntry);
if (zipEntry.isDirectory()) {
if (!newFile.isDirectory() && !newFile.mkdirs()) {
throw new IOException("디렉토리 생성 실패: " + newFile);
}
} else {
// 상위 디렉토리가 없는 경우 생성
File parent = newFile.getParentFile();
if (!parent.exists() && !parent.mkdirs()) {
throw new IOException("상위 디렉토리 생성 실패: " + parent);
}
// 실제 파일 쓰기
try (FileOutputStream fos = new FileOutputStream(newFile)) {
byte[] buffer = new byte[1024];
int len;
while ((len = zis.read(buffer)) > 0) {
fos.write(buffer, 0, len);
}
}
}
zipEntry = zis.getNextEntry();
}
zis.closeEntry();
}
}
public static File newFile(File destinationDir, ZipEntry zipEntry) throws IOException {
File destFile = new File(destinationDir, zipEntry.getName());
String destDirPath = destinationDir.getCanonicalPath();
String destFilePath = destFile.getCanonicalPath();
if (!destFilePath.startsWith(destDirPath + File.separator)) {
throw new IOException("엔트리가 대상 디렉토리를 벗어남: " + zipEntry.getName());
}
return destFile;
}
}

View File

@@ -14,10 +14,16 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.validation.Valid;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.core.io.Resource;
import org.springframework.core.io.UrlResource;
import org.springframework.data.domain.Page;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
@Tag(name = "학습데이터 관리", description = "어드민 홈 > 학습데이터관리 > 전체데이터 API")
@@ -235,4 +241,25 @@ public class DatasetApiController {
public ApiResponseDto<DatasetStorage> getUsableBytes() {
return ApiResponseDto.ok(datasetService.getUsableBytes());
}
@Operation(summary = "데이터셋 등록", description = "데이터셋을 등록 합니다.")
@PostMapping
public ApiResponseDto<ApiResponseDto.ResponseObj> insertDataset(
@RequestBody @Valid DatasetDto.AddReq addReq) {
return ApiResponseDto.ok(datasetService.insertDataset(addReq));
}
@Operation(summary = "파일 Path 조회", description = "파일 Path 조회")
@GetMapping("/files")
public ResponseEntity<Resource> getFile(@RequestParam UUID uuid, @RequestParam String pathType)
throws Exception {
String path = datasetService.getFilePathByUUIDPathType(uuid, pathType);
Path filePath = Paths.get(path);
Resource resource = new UrlResource(filePath.toUri());
return ResponseEntity.ok().contentType(MediaType.APPLICATION_OCTET_STREAM).body(resource);
}
}

View File

@@ -13,6 +13,7 @@ import java.time.ZonedDateTime;
import java.util.List;
import java.util.UUID;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
@@ -132,9 +133,7 @@ public class DatasetDto {
private int size = 20;
public Pageable toPageable() {
// API에서는 1부터 시작하지만 내부적으로는 0부터 시작
int pageIndex = Math.max(0, page - 1);
return PageRequest.of(pageIndex, size, Sort.by(Sort.Direction.DESC, "createdDttm"));
return PageRequest.of(page, size, Sort.by(Sort.Direction.DESC, "createdDttm"));
}
}
@@ -308,4 +307,45 @@ public class DatasetDto {
Long id;
List<Long> ids;
}
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class AddReq {
private Long stage;
private String title;
private String fileName;
private String filePath;
private Long fileSize;
private String memo;
}
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
@Builder
public static class DatasetFileDto {
private String fileName;
private String filePath;
}
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
@Builder
public static class DatasetMngRegDto {
private String uid;
private String dataType;
private Integer compareYyyy;
private Integer targetYyyy;
private Long roundNo;
private String title;
private String memo;
private Long totalSize;
private Long totalObjectCount;
private String datasetPath;
}
}

View File

@@ -1,11 +1,13 @@
package com.kamco.cd.training.dataset.dto;
import com.fasterxml.jackson.databind.JsonNode;
import com.kamco.cd.training.common.enums.DetectionClassification;
import com.kamco.cd.training.common.utils.interfaces.JsonFormatDttm;
import io.swagger.v3.oas.annotations.media.Schema;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
@@ -38,6 +40,7 @@ public class DatasetObjDto {
private Long createdUid;
private Boolean deleted;
private UUID uuid;
private String geoJsonb;
public Basic(
Long objId,
@@ -54,7 +57,8 @@ public class DatasetObjDto {
ZonedDateTime createdDttm,
Long createdUid,
Boolean deleted,
UUID uuid) {
UUID uuid,
String geoJsonb) {
this.objId = objId;
this.datasetUid = datasetUid;
this.targetYyyy = targetYyyy;
@@ -70,6 +74,7 @@ public class DatasetObjDto {
this.createdUid = createdUid;
this.deleted = deleted;
this.uuid = uuid;
this.geoJsonb = geoJsonb;
}
}
@@ -104,9 +109,7 @@ public class DatasetObjDto {
private int size = 20;
public Pageable toPageable() {
// API에서는 1부터 시작하지만 내부적으로는 0부터 시작
int pageIndex = Math.max(0, page - 1);
return PageRequest.of(pageIndex, size, Sort.by(Sort.Direction.DESC, "createdDttm"));
return PageRequest.of(page, size, Sort.by(Sort.Direction.DESC, "createdDttm"));
}
}
@@ -127,4 +130,23 @@ public class DatasetObjDto {
public static class DatasetStorage {
private String usableBytes;
}
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
@Builder
public static class DatasetObjRegDto {
private Long datasetUid;
private Integer compareYyyy;
private String compareClassCd;
private Integer targetYyyy;
private String targetClassCd;
private String comparePath;
private String targetPath;
private String labelPath;
private String mapSheetNum;
private JsonNode geojson;
private String fileName;
}
}

View File

@@ -1,18 +1,36 @@
package com.kamco.cd.training.dataset.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.training.common.enums.LearnDataType;
import com.kamco.cd.training.common.exception.CustomApiException;
import com.kamco.cd.training.common.service.FormatStorage;
import com.kamco.cd.training.common.utils.FIleChecker;
import com.kamco.cd.training.config.api.ApiResponseDto.ApiResponseCode;
import com.kamco.cd.training.config.api.ApiResponseDto.ResponseObj;
import com.kamco.cd.training.dataset.dto.DatasetDto;
import com.kamco.cd.training.dataset.dto.DatasetDto.AddReq;
import com.kamco.cd.training.dataset.dto.DatasetDto.DatasetMngRegDto;
import com.kamco.cd.training.dataset.dto.DatasetObjDto;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.DatasetClass;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.DatasetObjRegDto;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.DatasetStorage;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.SearchReq;
import com.kamco.cd.training.postgres.core.DatasetCoreService;
import jakarta.validation.Valid;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.stream.Stream;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service;
@@ -21,11 +39,16 @@ import org.springframework.transaction.annotation.Transactional;
@Slf4j
@Service
@RequiredArgsConstructor
@Transactional(readOnly = true)
@Transactional
public class DatasetService {
private final DatasetCoreService datasetCoreService;
@Value("${file.dataset-dir}")
private String datasetDir;
private static final List<String> LABEL_DIRS = List.of("label-json", "label", "input1", "input2");
/**
* 데이터셋 목록 조회
*
@@ -132,4 +155,162 @@ public class DatasetService {
throw new CustomApiException("NOT_FOUND", HttpStatus.NOT_FOUND);
}
}
public ResponseObj insertDataset(@Valid AddReq addReq) {
Long datasetUid = 0L; // master id 값, 등록하면서 가져올 예정
try {
// 압축 해제
FIleChecker.unzip(addReq.getFileName(), addReq.getFilePath());
// 해제한 폴더 읽어서 데이터 저장
List<Map<String, Object>> list =
getUnzipDatasetFiles(addReq.getFilePath() + addReq.getFileName().replace(".zip", ""));
int idx = 0;
for (Map<String, Object> map : list) {
String comparePath = (String) map.get("input1");
String targetPath = (String) map.get("input2");
String labelPath = (String) map.get("label");
JsonNode json = (JsonNode) map.get("label-json");
String fileName = Paths.get(comparePath).getFileName().toString();
String[] fileNameStr = fileName.split("_");
String compareYyyy = fileNameStr[1];
String targetYyyy = fileNameStr[2];
String mapSheetNum = fileNameStr[3];
if (idx == 0) {
String title = compareYyyy + "-" + targetYyyy;
String dataType = LearnDataType.PRODUCTION.getId(); // 만들어 넣는 건 다 제작
Long stage =
datasetCoreService.getDatasetMaxStage(
Integer.parseInt(compareYyyy), Integer.parseInt(targetYyyy))
+ 1;
String uid = Paths.get(addReq.getFilePath()).getParent().getFileName().toString();
DatasetMngRegDto mngRegDto =
DatasetMngRegDto.builder()
.uid(uid)
.dataType(dataType)
.compareYyyy(Integer.parseInt(compareYyyy))
.targetYyyy(Integer.parseInt(targetYyyy))
.title(title)
.memo(addReq.getMemo())
.roundNo(stage)
.totalSize(addReq.getFileSize())
.datasetPath(addReq.getFilePath())
.build();
datasetUid = datasetCoreService.insertDatasetMngData(mngRegDto); // tb_dataset 에 insert
}
// datasetUid 로 obj 도 등록하기
// Json 갯수만큼 for문 돌려서 insert 해야 함
for (JsonNode feature : json.path("features")) {
JsonNode prop = feature.path("properties");
String compareClassCd = prop.path("before").asText(null);
String targetClassCd = prop.path("after").asText(null);
DatasetObjRegDto objRegDto =
DatasetObjRegDto.builder()
.datasetUid(datasetUid)
.compareYyyy(Integer.parseInt(compareYyyy))
.compareClassCd(compareClassCd)
.targetYyyy(Integer.parseInt(targetYyyy))
.targetClassCd(targetClassCd)
.comparePath(comparePath)
.targetPath(targetPath)
.labelPath(labelPath)
.mapSheetNum(mapSheetNum)
.geojson(feature)
.fileName(fileName)
.build();
datasetCoreService.insertDatasetObj(objRegDto);
}
idx++;
}
} catch (IOException e) {
log.error(e.getMessage());
return new ResponseObj(ApiResponseCode.INTERNAL_SERVER_ERROR, e.getMessage());
}
return new ResponseObj(ApiResponseCode.OK, "업로드 성공하였습니다.");
}
private List<Map<String, Object>> getUnzipDatasetFiles(String unzipRootPath) {
Path root = Paths.get(unzipRootPath).resolve("train");
Map<String, Map<String, Object>> grouped = new HashMap<>();
long total_file_cnt = 0;
for (String dirName : LABEL_DIRS) {
Path dir = root.resolve(dirName);
if (!Files.isDirectory(dir)) {
throw new IllegalStateException("폴더가 존재하지 않습니다 : " + dir);
}
// 파일 갯수는 같아서 비교년도일 때만 file_cnt 를 가지고 오기
// if(dirName.equals("input1")){
// try (Stream<Path> stream = Files.list(dir)) {
// total_file_cnt = stream.filter(Files::isRegularFile).count();
// } catch (IOException e) {
// throw new RuntimeException(e);
// }
// }
try (Stream<Path> stream = Files.list(dir)) {
stream
.filter(Files::isRegularFile)
.forEach(
path -> {
String fileName = path.getFileName().toString();
String baseName = getBaseName(fileName);
// baseName 기준 Map 생성
Map<String, Object> data =
grouped.computeIfAbsent(baseName, k -> new HashMap<>());
// 공통 메타
data.put("baseName", baseName);
// 폴더별 처리
if ("label-json".equals(dirName)) {
// json 파일이면 파싱
data.put("label-json", readJson(path));
} else {
// 나머지는 경로 or 파일명
data.put(dirName, path.toAbsolutePath().toString());
}
});
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return new ArrayList<>(grouped.values());
}
private String getBaseName(String fileName) {
int idx = fileName.lastIndexOf('.');
return (idx > 0) ? fileName.substring(0, idx) : fileName;
}
private JsonNode readJson(Path jsonPath) {
try {
ObjectMapper mapper = new ObjectMapper();
return mapper.readTree(jsonPath.toFile());
} catch (IOException e) {
throw new RuntimeException("JSON 읽기 실패: " + jsonPath, e);
}
}
public String getFilePathByUUIDPathType(UUID uuid, String pathType) {
return datasetCoreService.getFilePathByUUIDPathType(uuid, pathType);
}
}

View File

@@ -6,13 +6,16 @@ import com.kamco.cd.training.common.enums.LearnDataType;
import com.kamco.cd.training.common.exception.NotFoundException;
import com.kamco.cd.training.common.service.BaseCoreService;
import com.kamco.cd.training.dataset.dto.DatasetDto;
import com.kamco.cd.training.dataset.dto.DatasetDto.DatasetMngRegDto;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.Basic;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.DatasetClass;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.DatasetObjRegDto;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.SearchReq;
import com.kamco.cd.training.postgres.entity.DatasetEntity;
import com.kamco.cd.training.postgres.entity.DatasetObjEntity;
import com.kamco.cd.training.postgres.repository.dataset.DatasetObjRepository;
import com.kamco.cd.training.postgres.repository.dataset.DatasetRepository;
import jakarta.transaction.Transactional;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.UUID;
@@ -208,4 +211,22 @@ public class DatasetCoreService
public List<DatasetClass> findDatasetObjClassByUuid(UUID uuid, String type) {
return datasetObjRepository.findDatasetObjClassByUuid(uuid, type);
}
public Long getDatasetMaxStage(int compareYyyy, int targetYyyy) {
return datasetRepository.getDatasetMaxStage(compareYyyy, targetYyyy);
}
@Transactional
public Long insertDatasetMngData(DatasetMngRegDto mngRegDto) {
return datasetRepository.insertDatasetMngData(mngRegDto);
}
@Transactional
public void insertDatasetObj(DatasetObjRegDto objRegDto) {
datasetObjRepository.insertDatasetObj(objRegDto);
}
public String getFilePathByUUIDPathType(UUID uuid, String pathType) {
return datasetObjRepository.getFilePathByUUIDPathType(uuid, pathType);
}
}

View File

@@ -2,6 +2,7 @@ package com.kamco.cd.training.postgres.core;
import com.kamco.cd.training.postgres.repository.upload.UploadSessionRepository;
import com.kamco.cd.training.upload.dto.UploadDto;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
@@ -39,8 +40,8 @@ public class UploadSessionCoreService {
uploadSessionRepository.insertUploadSession(addReq);
}
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) {
return uploadSessionRepository.findByDatasetUid(datasetId, uploadDivi);
public UploadDto.uploadDto findByDatasetUid(String uploadDivi, UUID uuid) {
return uploadSessionRepository.findByDatasetUid(uploadDivi, uuid);
}
public UploadDto.uploadDto findByUuid(String uuid) {

View File

@@ -16,6 +16,8 @@ import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.type.SqlTypes;
@Getter
@Setter
@@ -93,6 +95,10 @@ public class DatasetObjEntity {
@Column(precision = 5, scale = 2)
private BigDecimal afClsPro;
@JdbcTypeCode(SqlTypes.JSON)
@Column(name = "geo_jsonb", columnDefinition = "jsonb")
private String geoJsonb;
public Basic toDto() {
return new DatasetObjDto.Basic(
this.objId,
@@ -109,6 +115,7 @@ public class DatasetObjEntity {
this.createdDttm,
this.createdUid,
this.deleted,
this.uuid);
this.uuid,
this.geoJsonb);
}
}

View File

@@ -1,6 +1,7 @@
package com.kamco.cd.training.postgres.repository.dataset;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.DatasetClass;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.DatasetObjRegDto;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.SearchReq;
import com.kamco.cd.training.postgres.entity.DatasetObjEntity;
import java.util.List;
@@ -15,4 +16,8 @@ public interface DatasetObjRepositoryCustom {
Page<DatasetObjEntity> searchDatasetObjectList(SearchReq searchReq);
List<DatasetClass> findDatasetObjClassByUuid(UUID uuid, String type);
void insertDatasetObj(DatasetObjRegDto objRegDto);
String getFilePathByUUIDPathType(UUID uuid, String pathType);
}

View File

@@ -3,16 +3,23 @@ package com.kamco.cd.training.postgres.repository.dataset;
import static com.kamco.cd.training.postgres.entity.QDatasetEntity.datasetEntity;
import static com.kamco.cd.training.postgres.entity.QDatasetObjEntity.datasetObjEntity;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.DatasetClass;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.DatasetObjRegDto;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.SearchReq;
import com.kamco.cd.training.postgres.entity.DatasetEntity;
import com.kamco.cd.training.postgres.entity.DatasetObjEntity;
import com.kamco.cd.training.postgres.entity.QDatasetEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Expression;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.StringPath;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityNotFoundException;
import jakarta.persistence.PersistenceContext;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
@@ -31,6 +38,8 @@ public class DatasetObjRepositoryImpl implements DatasetObjRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final QDatasetEntity dataset = datasetEntity;
@PersistenceContext EntityManager em;
@Override
public Optional<DatasetObjEntity> findByUuid(UUID id) {
return Optional.ofNullable(
@@ -91,7 +100,12 @@ public class DatasetObjRepositoryImpl implements DatasetObjRepositoryCustom {
queryFactory
.select(datasetObjEntity.count())
.from(datasetObjEntity)
.where(datasetObjEntity.deleted.isFalse().and(builder))
.where(
datasetObjEntity
.deleted
.isFalse()
.and(datasetObjEntity.datasetUid.eq(entity.getId()))
.and(builder))
.fetchOne())
.orElse(0L);
@@ -118,4 +132,76 @@ public class DatasetObjRepositoryImpl implements DatasetObjRepositoryCustom {
.groupBy(classCd)
.fetch();
}
@Override
public void insertDatasetObj(DatasetObjRegDto objRegDto) {
ObjectMapper objectMapper = new ObjectMapper();
String json;
try {
json = objectMapper.writeValueAsString(objRegDto.getGeojson());
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
// queryFactory
// .insert(datasetObjEntity)
// .columns(
// datasetObjEntity.datasetUid,
// datasetObjEntity.targetYyyy,
// datasetObjEntity.targetClassCd,
// datasetObjEntity.compareYyyy,
// datasetObjEntity.compareClassCd,
// datasetObjEntity.targetPath,
// datasetObjEntity.comparePath,
// datasetObjEntity.labelPath,
// datasetObjEntity.geoJsonb)
// .values(
// objRegDto.getDatasetUid(),
// objRegDto.getTargetYyyy(),
// objRegDto.getTargetClassCd(),
// objRegDto.getCompareYyyy(),
// objRegDto.getCompareClassCd(),
// objRegDto.getTargetPath(),
// objRegDto.getComparePath(),
// objRegDto.getLabelPath(),
// Expressions.stringTemplate("cast({0} as jsonb)", json))
// .execute();
em.createNativeQuery(
"""
insert into tb_dataset_obj
(dataset_uid, target_yyyy, target_class_cd,
compare_yyyy, compare_class_cd,
target_path, compare_path, label_path, geo_jsonb, map_sheet_num)
values
(?, ?, ?, ?, ?, ?, ?, ?, cast(? as jsonb), ?)
""")
.setParameter(1, objRegDto.getDatasetUid())
.setParameter(2, objRegDto.getTargetYyyy())
.setParameter(3, objRegDto.getTargetClassCd())
.setParameter(4, objRegDto.getCompareYyyy())
.setParameter(5, objRegDto.getCompareClassCd())
.setParameter(6, objRegDto.getTargetPath())
.setParameter(7, objRegDto.getComparePath())
.setParameter(8, objRegDto.getLabelPath())
.setParameter(9, json)
.setParameter(10, objRegDto.getMapSheetNum())
.executeUpdate();
}
@Override
public String getFilePathByUUIDPathType(UUID uuid, String pathType) {
Expression<String> pathExpr =
switch (pathType) {
case "compare" -> datasetObjEntity.comparePath;
case "target" -> datasetObjEntity.targetPath;
case "label" -> datasetObjEntity.labelPath;
default -> Expressions.constant("");
};
return queryFactory
.select(pathExpr)
.from(datasetObjEntity)
.where(datasetObjEntity.uuid.eq(uuid))
.fetchOne();
}
}

View File

@@ -1,6 +1,7 @@
package com.kamco.cd.training.postgres.repository.dataset;
import com.kamco.cd.training.dataset.dto.DatasetDto;
import com.kamco.cd.training.dataset.dto.DatasetDto.DatasetMngRegDto;
import com.kamco.cd.training.dataset.dto.DatasetDto.DatasetReq;
import com.kamco.cd.training.dataset.dto.DatasetDto.SelectDataSet;
import com.kamco.cd.training.postgres.entity.DatasetEntity;
@@ -17,4 +18,8 @@ public interface DatasetRepositoryCustom {
List<SelectDataSet> getDatasetSelectG1List(DatasetReq req);
List<SelectDataSet> getDatasetSelectG2G3List(DatasetReq req);
Long getDatasetMaxStage(int compareYyyy, int targetYyyy);
Long insertDatasetMngData(DatasetMngRegDto mngRegDto);
}

View File

@@ -3,6 +3,7 @@ package com.kamco.cd.training.postgres.repository.dataset;
import static com.kamco.cd.training.postgres.entity.QDatasetObjEntity.datasetObjEntity;
import com.kamco.cd.training.common.enums.ModelType;
import com.kamco.cd.training.dataset.dto.DatasetDto.DatasetMngRegDto;
import com.kamco.cd.training.dataset.dto.DatasetDto.DatasetReq;
import com.kamco.cd.training.dataset.dto.DatasetDto.SearchReq;
import com.kamco.cd.training.dataset.dto.DatasetDto.SelectDataSet;
@@ -196,4 +197,48 @@ public class DatasetRepositoryImpl implements DatasetRepositoryCustom {
.orderBy(dataset.createdDttm.desc())
.fetch();
}
@Override
public Long getDatasetMaxStage(int compareYyyy, int targetYyyy) {
return queryFactory
.select(dataset.roundNo.max().coalesce(0L))
.from(dataset)
.where(dataset.compareYyyy.eq(compareYyyy), dataset.targetYyyy.eq(targetYyyy))
.fetchOne();
}
@Override
public Long insertDatasetMngData(DatasetMngRegDto mngRegDto) {
queryFactory
.insert(dataset)
.columns(
dataset.uid,
dataset.dataType,
dataset.compareYyyy,
dataset.targetYyyy,
dataset.roundNo,
dataset.totalSize,
dataset.title,
dataset.memo,
dataset.datasetPath)
.values(
mngRegDto.getUid(),
mngRegDto.getDataType(),
mngRegDto.getCompareYyyy(),
mngRegDto.getTargetYyyy(),
mngRegDto.getRoundNo(),
mngRegDto.getTotalSize(),
mngRegDto.getTitle(),
mngRegDto.getMemo(),
mngRegDto.getDatasetPath())
.execute();
Long datasetUid =
queryFactory
.select(dataset.id)
.from(dataset)
.where(dataset.uid.eq(mngRegDto.getUid()))
.fetchOne();
return datasetUid;
}
}

View File

@@ -1,12 +1,13 @@
package com.kamco.cd.training.postgres.repository.upload;
import com.kamco.cd.training.upload.dto.UploadDto;
import java.util.UUID;
public interface UploadSessionRepositoryCustom {
void insertUploadSession(UploadDto.UploadAddReq addReq);
UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi);
UploadDto.uploadDto findByDatasetUid(String uploadDivi, UUID uuid);
UploadDto.uploadDto findByUuid(String uuid);

View File

@@ -61,7 +61,7 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
}
@Override
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) {
public UploadDto.uploadDto findByDatasetUid(String uploadDivi, UUID uuid) {
UploadDto.uploadDto foundContent =
queryFactory
@@ -83,9 +83,9 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
.from(uploadSessionEntity)
.where(
uploadSessionEntity
.datasetId
.eq(datasetId)
.and(uploadSessionEntity.uploadDivi.eq(uploadDivi)))
.uploadDivi
.eq(uploadDivi)
.and(uploadSessionEntity.uuid.eq(uuid)))
.limit(1)
.fetchOne();

View File

@@ -9,6 +9,7 @@ import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;
@@ -53,24 +54,26 @@ public class UploadApiController {
})
@PostMapping(value = "/chunk-upload-dataset", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public ApiResponseDto<UploadDto.UploadRes> uploadChunkDataSetFile(
@RequestParam("datasetUid") long datasetUid,
// @RequestParam("datasetUid") long datasetUid,
@RequestParam("fileName") String fileName,
@RequestParam("fileSize") long fileSize,
// @RequestParam("fileHash") String fileHash,
@RequestParam("chunkIndex") Integer chunkIndex,
@RequestParam("chunkTotalIndex") Integer chunkTotalIndex,
@RequestPart("chunkFile") MultipartFile chunkFile) {
@RequestPart("chunkFile") MultipartFile chunkFile,
@RequestParam("uuid") UUID uuid) {
String uploadDivi = "dataset";
UploadDto.UploadAddReq upAddReqDto = new UploadDto.UploadAddReq();
upAddReqDto.setDatasetId(datasetUid);
// upAddReqDto.setDatasetId(datasetUid);
upAddReqDto.setFileName(fileName);
upAddReqDto.setFileSize(fileSize);
upAddReqDto.setChunkIndex(chunkIndex);
upAddReqDto.setChunkTotalIndex(chunkTotalIndex);
upAddReqDto.setUploadDivi(uploadDivi);
// upAddReqDto.setFileHash(fileHash);
upAddReqDto.setUuid(uuid);
return ApiResponseDto.ok(uploadService.uploadChunk(upAddReqDto, chunkFile));
}

View File

@@ -195,6 +195,8 @@ public class UploadDto {
private String uuid;
private int chunkIndex;
private int chunkTotalIndex;
private String filePath;
private String fileName;
public double getUploadRate() {
if (this.chunkTotalIndex == 0) {

View File

@@ -10,6 +10,7 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.Objects;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@@ -51,9 +52,11 @@ public class UploadService {
UploadDto.UploadRes upRes = new UploadDto.UploadRes();
long datasetId = upAddReqDto.getDatasetId();
// long datasetId = upAddReqDto.getDatasetId();
long datasetId = 0;
String uploadId = System.currentTimeMillis() + "";
UUID uuid = UUID.randomUUID();
// UUID uuid = UUID.randomUUID();
UUID uuid = upAddReqDto.getUuid();
String tmpDataSetDir = "";
String fianlDir = "";
String uploadDivi = upAddReqDto.getUploadDivi();
@@ -68,6 +71,10 @@ public class UploadService {
fianlDir = datasetDir + uuid + "/";
}
// 리턴용 파일 값
upRes.setFilePath(fianlDir);
upRes.setFileName(fileName);
upAddReqDto.setUuid(uuid);
upAddReqDto.setUploadId(uploadId);
upAddReqDto.setStatus(status);
@@ -94,13 +101,12 @@ public class UploadService {
}
// chunk완료시 merge 및 폴더에 저장
if (chunkIndex.equals(chunkTotalIndex)) {
if (Objects.equals(chunkIndex, chunkTotalIndex)) {
// upAddReqDto.setUploadId(dto.getUploadId());
// upAddReqDto.setStatus("MERGING");
// uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
upAddReqDto.setUploadId(dto != null ? dto.getUploadId() : upAddReqDto.getUploadId());
upAddReqDto.setStatus("MERGING");
uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
/*
try {
this.mergeChunks(tmpDataSetDir, fianlDir, fileName, chunkTotalIndex);
} catch (IOException e) {
@@ -109,9 +115,8 @@ public class UploadService {
upRes.setResMsg("파일 저장 완료(merge) 애러");
return upRes;
}
*/
upAddReqDto.setUploadId(dto.getUploadId());
upAddReqDto.setUploadId(dto != null ? dto.getUploadId() : upAddReqDto.getUploadId());
upAddReqDto.setStatus("COMPLETE");
uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
}
@@ -158,7 +163,7 @@ public class UploadService {
UploadDto.uploadDto dto =
uploadSessionCoreService.findByDatasetUid(
upAddReqDto.getDatasetId(), upAddReqDto.getUploadDivi());
upAddReqDto.getUploadDivi(), upAddReqDto.getUuid());
if (upAddReqDto.getChunkIndex() == 0) {
if (dto != null) {
@@ -190,28 +195,64 @@ public class UploadService {
return dto;
}
public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex)
public void mergeChunks(String tmpDir, String finalDir, String fileName, int chunkTotalIndex)
throws IOException {
long start = System.currentTimeMillis();
Path outputPath = Paths.get(finalDir, fileName);
log.info(
"mergeChunks start: fileName={}, tmpDir={}, outputPath={}, lastChunkIndex={}",
fileName,
tmpDir,
outputPath,
chunkTotalIndex);
long totalBytes = 0;
Path outputPath = Paths.get(fianlDir, fileName);
try (FileChannel outChannel =
FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) {
FileChannel.open(
outputPath,
StandardOpenOption.CREATE,
StandardOpenOption.WRITE,
StandardOpenOption.TRUNCATE_EXISTING)) {
for (int i = 0; i <= chunkTotalIndex; i++) {
Path chunkPath = Paths.get(tmpDir, i + "");
Path chunkPath = Paths.get(tmpDir, String.valueOf(i));
try (FileChannel inChannel = FileChannel.open(chunkPath, StandardOpenOption.READ)) {
long transferred = 0;
long size = inChannel.size();
long transferred = 0;
while (transferred < size) {
transferred += inChannel.transferTo(transferred, size - transferred, outChannel);
}
}
// 병합 후 즉시 삭제하여 디스크 공간 확보
Files.delete(chunkPath);
}
totalBytes += size;
}
// 병합후 임시 폴더 삭제
Files.delete(chunkPath);
}
try {
FIleChecker.deleteFolder(tmpDir);
} catch (Exception e) {
log.warn("tmpDir delete failed (merge already succeeded): tmpDir={}", tmpDir, e);
}
} catch (IOException e) {
log.error(
"mergeChunks failed: fileName={}, tmpDir={}, outputPath={}, lastChunkIndex={}",
fileName,
tmpDir,
outputPath,
chunkTotalIndex,
e);
throw e;
}
log.info(
"mergeChunks done: fileName={}, outputPath={}, bytes={}, elapsedMs={}",
fileName,
outputPath,
totalBytes,
(System.currentTimeMillis() - start));
}
}

View File

@@ -49,3 +49,11 @@ member:
swagger:
local-port: 9080
file:
sync-root-dir: /app/original-images/
sync-tmp-dir: ${file.sync-root-dir}tmp/
sync-file-extention: tfw,tif
dataset-dir: /kamco-nfs/dataset/upload/
dataset-tmp-dir: ${file.dataset-dir}tmp/

View File

@@ -35,3 +35,11 @@ member:
swagger:
local-port: 9080
file:
sync-root-dir: /app/original-images/
sync-tmp-dir: ${file.sync-root-dir}tmp/
sync-file-extention: tfw,tif
dataset-dir: /kamco-nfs/dataset/upload/
dataset-tmp-dir: ${file.dataset-dir}tmp/