chunk업로드 공통, 모델관리 수정

This commit is contained in:
Moon
2026-01-08 18:35:19 +09:00
parent 4d2135ec4c
commit eb88717791
19 changed files with 1335 additions and 23 deletions

View File

@@ -0,0 +1,28 @@
package com.kamco.cd.kamcoback.common.enums;
import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose;
import com.kamco.cd.kamcoback.common.utils.enums.EnumType;
import lombok.AllArgsConstructor;
import lombok.Getter;
@CodeExpose
@Getter
@AllArgsConstructor
public enum FileUploadStatus implements EnumType {
INIT("초기화"),
UPLOADING("업로드중"),
DONE("업로드완료"),
MERGED("병합완료");
private final String desc;
@Override
public String getId() {
return name();
}
@Override
public String getText() {
return desc;
}
}

View File

@@ -29,6 +29,7 @@ import lombok.Getter;
import org.apache.commons.io.FilenameUtils;
import org.geotools.coverage.grid.GridCoverage2D;
import org.geotools.gce.geotiff.GeoTiffReader;
import org.springframework.util.FileSystemUtils;
import org.springframework.web.multipart.MultipartFile;
public class FIleChecker {
@@ -489,6 +490,23 @@ public class FIleChecker {
return true;
}
public static boolean multipartChunkSaveTo(MultipartFile mfile, String targetPath, int chunkIndex) {
File dest = new File(targetPath, String.valueOf(chunkIndex));
boolean fileUpload = true;
try {
mfile.transferTo(dest);
} catch (IOException e) {
return false;
}
return true;
}
public static boolean deleteFolder(String path) {
return FileSystemUtils.deleteRecursively(new File(path));
}
public static boolean validationMultipart(MultipartFile mfile) {
// 파일 유효성 검증
if (mfile == null || mfile.isEmpty() || mfile.getSize() == 0) {

View File

@@ -44,6 +44,8 @@ public class SecurityConfig {
.authorizeHttpRequests(
auth ->
auth
// .requestMatchers("/chunk_upload_test.html").authenticated()
// 맵시트 영역 전체 허용 (우선순위 최상단)
.requestMatchers("/api/mapsheet/**")
.permitAll()
@@ -77,7 +79,11 @@ public class SecurityConfig {
"/api/auth/logout",
"/swagger-ui/**",
"/api/members/*/password",
"/v3/api-docs/**")
"/v3/api-docs/**",
"/chunk_upload_test.html",
"/api/model/file-chunk-upload",
"/api/upload/file-chunk-upload",
"/api/upload/chunk-upload-complete")
.permitAll()
// 로그인한 사용자만 가능 IAM
.requestMatchers("/api/user/**", "/api/my/menus", "/api/code/**")

View File

@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.common.utils.zip.ZipUtils;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.model.service.ModelMngService;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
@@ -11,16 +12,20 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.transaction.Transactional;
import jakarta.validation.Valid;
import java.io.IOException;
import java.time.LocalDate;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart;
@@ -36,6 +41,27 @@ public class ModelMngApiController {
private final ModelMngService modelMngService;
@Value("${file.sync-root-dir}")
private String syncRootDir;
@Value("${file.sync-tmp-dir}")
private String syncTmpDir;
@Value("${file.sync-file-extention}")
private String syncFileExtention;
@Value("${file.dataset-dir}")
private String datasetDir;
@Value("${file.dataset-tmp-dir}")
private String datasetTmpDir;
@Value("${file.model-dir}")
private String modelDir;
@Value("${file.model-tmp-dir}")
private String modelTmpDir;
@Autowired private ZipUtils zipUtils;
@Operation(summary = "모델관리 목록")
@@ -70,19 +96,59 @@ public class ModelMngApiController {
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@DeleteMapping("/{modelVer}")
@DeleteMapping("/{uuid}")
public ApiResponseDto<ApiResponseDto.ResponseObj> removeModel(
@io.swagger.v3.oas.annotations.parameters.RequestBody(
description = "모델 삭제 요청 정보",
required = true)
@PathVariable
String modelVer) {
return ApiResponseDto.okObject(modelMngService.removeModel(modelVer));
String uuid) {
return ApiResponseDto.okObject(modelMngService.removeModel(UUID.fromString(uuid)));
}
@Operation(summary = "모델 zip 파일 업로드", description = "모델 zip 파일 업로드")
@PostMapping(value = "/upload/zip", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public void upload(@RequestPart MultipartFile zipFilie) throws IOException {
zipUtils.processZip(zipFilie.getInputStream());
@Operation(summary = "모델등록")
@PostMapping
public ApiResponseDto<ApiResponseDto.ResponseObj> ModelMgmt(
@RequestBody @Valid ModelMngDto.AddReq addReq) {
return ApiResponseDto.ok(modelMngService.insertModel(addReq));
}
@Operation(summary = "데이터셋 대용량 파일 분할 전송", description = "데이터셋 파일 대용량 파일을 청크 단위로 전송합니다.")
@ApiResponses(
value = {
@ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping(value = "/file-chunk-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public ApiResponseDto<UploadDto.UploadRes> fileChunkUpload(
@RequestParam("uuid") UUID uuid,
@RequestParam("fileName") String fileName,
@RequestParam("fileSize") long fileSize,
// @RequestParam("fileHash") String fileHash,
@RequestParam("chunkIndex") Integer chunkIndex,
@RequestParam("chunkTotalIndex") Integer chunkTotalIndex,
@RequestPart("chunkFile") MultipartFile chunkFile) {
String uploadDivi = "model";
UploadDto.UploadAddReq upAddReqDto = new UploadDto.UploadAddReq();
upAddReqDto.setDatasetId(0L);
upAddReqDto.setUuid(uuid);
upAddReqDto.setFileName(fileName);
upAddReqDto.setFileSize(fileSize);
upAddReqDto.setChunkIndex(chunkIndex);
upAddReqDto.setChunkTotalIndex(chunkTotalIndex);
upAddReqDto.setUploadDivi(uploadDivi);
upAddReqDto.setFinalPath(modelDir);
upAddReqDto.setTempPath(modelTmpDir);
System.out.println("uuid === "+ uuid);
return ApiResponseDto.ok(modelMngService.uploadChunkModelFile(upAddReqDto, chunkFile));
}
}

View File

@@ -1,11 +1,13 @@
package com.kamco.cd.kamcoback.model.dto;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose;
import com.kamco.cd.kamcoback.common.utils.enums.EnumType;
import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm;
import io.swagger.v3.oas.annotations.media.Schema;
import java.math.BigDecimal;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
@@ -96,7 +98,6 @@ public class ModelMngDto {
@AllArgsConstructor
public static class ModelList {
private Integer rowNum;
private String modelVer;
private String fileName;
private String modelType;
@@ -108,6 +109,7 @@ public class ModelMngDto {
private BigDecimal iou;
private String memo;
private Boolean deleted;
private UUID uuid;
}
@Schema(name = "ModelAddReq", description = "모델 등록 req")
@@ -118,10 +120,14 @@ public class ModelMngDto {
public static class AddReq {
private String modelType;
private String dockerFileNm;
private String modelVer;
private String hyperVer;
private String epochVer;
private String filePath;
private String fileName;
private String memo;
@JsonIgnore
private UUID uuid;
}
@Schema(name = "searchReq", description = "검색 요청")

View File

@@ -3,10 +3,15 @@ package com.kamco.cd.kamcoback.model.service;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import com.kamco.cd.kamcoback.upload.service.UploadService;
import java.time.LocalDate;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
@Service
@RequiredArgsConstructor
@@ -14,6 +19,29 @@ public class ModelMngService {
private final ModelMngCoreService modelMngCoreService;
private final UploadService uploadService;
@Value("${file.sync-root-dir}")
private String syncRootDir;
@Value("${file.sync-tmp-dir}")
private String syncTmpDir;
@Value("${file.sync-file-extention}")
private String syncFileExtention;
@Value("${file.dataset-dir}")
private String datasetDir;
@Value("${file.dataset-tmp-dir}")
private String datasetTmpDir;
@Value("${file.model-dir}")
private String modelDir;
@Value("${file.model-tmp-dir}")
private String modelTmpDir;
public Page<ModelMngDto.ModelList> findModelMgmtList(
ModelMngDto.searchReq searchReq,
LocalDate startDate,
@@ -24,7 +52,27 @@ public class ModelMngService {
searchReq, startDate, endDate, modelType, searchVal);
}
public ApiResponseDto.ResponseObj removeModel(String modelVer) {
return modelMngCoreService.removeModel(modelVer);
public ApiResponseDto.ResponseObj removeModel(UUID uuid) {
modelMngCoreService.removeModel(uuid);
return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "삭제되었습니다.");
}
public ApiResponseDto.ResponseObj insertModel(ModelMngDto.AddReq addReq) {
UUID uuid = UUID.randomUUID();
addReq.setUuid(uuid);
modelMngCoreService.insertModel(addReq);
return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "등록되었습니다.");
}
public UploadDto.UploadRes uploadChunkModelFile(UploadDto.UploadAddReq upAddReqDto, MultipartFile chunkFile)
{
UploadDto.UploadRes upRes = uploadService.uploadChunk(upAddReqDto, chunkFile);
return upRes;
}
}

View File

@@ -6,6 +6,7 @@ import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import com.kamco.cd.kamcoback.postgres.repository.model.ModelMngRepository;
import jakarta.persistence.EntityNotFoundException;
import java.time.LocalDate;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
@@ -26,14 +27,24 @@ public class ModelMngCoreService {
searchReq, startDate, endDate, modelType, searchVal);
}
public ApiResponseDto.ResponseObj removeModel(String modelVer) {
public void removeModel(UUID uuid) {
/*
ModelMngEntity entity =
modelMngRepository
.findByModelUid(modelVer)
.orElseThrow(() -> new EntityNotFoundException("model을 찾을 수 없습니다. ver: " + modelVer));
.findByModelUuid(uuid)
.orElseThrow(() -> new EntityNotFoundException("model을 찾을 수 없습니다."));
*/
// id 코드 deleted = true 업데이트
entity.deleted();
return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "");
//entity.deleted();
modelMngRepository.deleteByModelUuid(uuid);
}
public void insertModel(ModelMngDto.AddReq addReq){
//ModelMngEntity addEntity = new ModelMngEntity();
//addEntity.setModelType(addReq.getModelType());
modelMngRepository.insertModel(addReq);
}
}

View File

@@ -0,0 +1,56 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.postgres.repository.upload.UploadSessionRepository;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class UploadSessionCoreService {
private final UploadSessionRepository uploadSessionRepository;
public void createUploadSession(UploadDto.UploadAddReq addReq)
{
/*
UUID newUuid = UUID.randomUUID();
UploadSessionEntity entity = new UploadSessionEntity();
entity.setUploadId(addReq.getUploadId());
entity.setDatasetId(addReq.getDatasetId());
entity.setFileName(addReq.getFileName());
entity.setFileSize(addReq.getFileSize());
entity.setFinalPath(addReq.getFinalPath());
entity.setStatus(addReq.getStatus());
entity.setTempPath(addReq.getTempPath());
entity.setChunkIndex(addReq.getChunkIndex());
entity.setChunkTotalIndex(addReq.getChunkTotalIndex());
entity.setUploadDivi(addReq.getUploadDivi());
entity.setFileHash(addReq.getFileHash());
entity.setUuid(newUuid);
//System.out.println("======================");
UploadSessionEntity saved = uploadSessionRepository.save(entity);
return String.valueOf(saved.getUuid());
*/
uploadSessionRepository.insertUploadSession(addReq);
}
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi){
return uploadSessionRepository.findByDatasetUid(datasetId, uploadDivi);
}
public UploadDto.uploadDto findByUuid(UUID uuid){
return uploadSessionRepository.findByUuid(uuid);
}
public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq){
uploadSessionRepository.updateUploadSessionStatus(addReq);
}
}

View File

@@ -8,6 +8,7 @@ import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
@@ -52,6 +53,9 @@ public class ModelMngEntity extends CommonDateEntity {
@Column(name = "memo")
private String memo;
@Column(name = "uuid")
private UUID uuid;
public void deleted() {
this.deleted = true;
}

View File

@@ -0,0 +1,90 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import java.time.OffsetDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@Getter
@Setter
@Entity
@Table(name = "tb_upload_session")
public class UploadSessionEntity {
@Id
@Size(max = 100)
@Column(name = "upload_id", nullable = false, length = 100)
private String uploadId;
@Column(name = "completed_dttm")
private OffsetDateTime completedDttm;
@NotNull
@ColumnDefault("now()")
@Column(name = "created_dttm", nullable = false)
private OffsetDateTime createdDttm;
@Column(name = "dataset_id")
private Long datasetId;
@Column(name = "error_message", length = Integer.MAX_VALUE)
private String errorMessage;
@Size(max = 255)
@Column(name = "file_name")
private String fileName;
@ColumnDefault("0")
@Column(name = "file_size")
private Long fileSize;
@Size(max = 500)
@Column(name = "final_path", length = 500)
private String finalPath;
@Size(max = 20)
@Column(name = "status", length = 20)
private String status;
@Size(max = 500)
@Column(name = "temp_path", length = 500)
private String tempPath;
@Column(name = "chunk_total_index")
private Integer chunkTotalIndex;
@NotNull
@ColumnDefault("now()")
@Column(name = "updated_dttm", nullable = false)
private OffsetDateTime updatedDttm;
@Column(name = "chunk_index")
private Integer chunkIndex;
@Size(max = 50)
@Column(name = "upload_divi", length = 50)
private String uploadDivi;
@Size(max = 300)
@Column(name = "file_hash", length = 300)
private String fileHash;
@Column(name = "total_chunks")
private Integer totalChunks;
@Column(name = "uploaded_chunks")
private Integer uploadedChunks;
@NotNull
@ColumnDefault("uuid_generate_v4()")
@Column(name = "uuid", nullable = false)
private UUID uuid;
}

View File

@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import java.time.LocalDate;
import java.util.Optional;
import java.util.UUID;
import org.springframework.data.domain.Page;
public interface ModelMngRepositoryCustom {
@@ -16,4 +17,11 @@ public interface ModelMngRepositoryCustom {
String searchVal);
Optional<ModelMngEntity> findByModelUid(String modelVer);
Optional<ModelMngEntity> findByModelUuid(UUID uuid);
void insertModel(ModelMngDto.AddReq addReq);
void deleteByModelUuid(UUID uuid);
}

View File

@@ -14,6 +14,7 @@ import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberPath;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.validation.Valid;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.LocalDateTime;
@@ -22,6 +23,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
@@ -67,8 +69,6 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
.select(
Projections.constructor(
ModelMngDto.ModelList.class,
Expressions.numberTemplate(
Integer.class, "row_number() over(order by {0} desc)", sortColumn),
modelMngEntity.modelVer,
modelMngEntity.fileName,
modelMngEntity.modelType,
@@ -80,7 +80,8 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
roundNumericToPercent(modelResultMetricEntity.loss),
roundNumericToPercent(modelResultMetricEntity.iou),
modelMngEntity.memo,
modelMngEntity.deleted))
modelMngEntity.deleted,
modelMngEntity.uuid))
.from(modelMngEntity)
.innerJoin(modelResultMetricEntity)
.on(modelMngEntity.modelUid.eq(modelResultMetricEntity.modelUid))
@@ -116,6 +117,18 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
.fetchOne());
}
@Override
public Optional<ModelMngEntity> findByModelUuid(UUID uuid) {
System.out.println("uuid == " + uuid);
return Optional.ofNullable(
queryFactory
.selectFrom(modelMngEntity)
.where(modelMngEntity.uuid.eq(uuid))
.fetchOne());
}
private BooleanExpression eventEndedAtBetween(
LocalDate startDate, LocalDate endDate, String sortColumn) {
if (Objects.isNull(startDate) || Objects.isNull(endDate)) {
@@ -155,4 +168,37 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
private Expression<BigDecimal> roundNumericToPercent(NumberPath<Double> ratio) {
return Expressions.numberTemplate(BigDecimal.class, "function('round', {0} * 100, 2)", ratio);
}
@Override
public void insertModel(@Valid ModelMngDto.AddReq addReq) {
long execCount =
queryFactory
.insert(modelMngEntity)
.columns(
modelMngEntity.modelVer,
modelMngEntity.modelType,
modelMngEntity.filePath,
modelMngEntity.fileName,
modelMngEntity.memo,
modelMngEntity.uuid)
.values(
addReq.getModelVer(),
addReq.getModelType(),
addReq.getFilePath(),
addReq.getFileName(),
addReq.getMemo(),
addReq.getUuid())
.execute();
}
@Override
public void deleteByModelUuid(UUID uuid){
long execCount =
queryFactory
.update(modelMngEntity)
.set(modelMngEntity.deleted, true)
.where(modelMngEntity.uuid.eq(uuid))
.execute();
}
}

View File

@@ -0,0 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.upload;
import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface UploadSessionRepository
extends JpaRepository<UploadSessionEntity, String>, UploadSessionRepositoryCustom {}

View File

@@ -0,0 +1,19 @@
package com.kamco.cd.kamcoback.postgres.repository.upload;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import java.util.UUID;
public interface UploadSessionRepositoryCustom {
void insertUploadSession(UploadDto.UploadAddReq addReq);
UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi);
UploadDto.uploadDto findByUuid(UUID uuid);
void updateUploadSessionStatus(UploadDto.UploadAddReq addReq);
}

View File

@@ -0,0 +1,142 @@
package com.kamco.cd.kamcoback.postgres.repository.upload;
import static com.kamco.cd.kamcoback.postgres.entity.QUploadSessionEntity.uploadSessionEntity;
import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import java.util.UUID;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
implements UploadSessionRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)");
@PersistenceContext private EntityManager em;
public UploadSessionRepositoryImpl(JPAQueryFactory queryFactory) {
super(UploadSessionEntity.class);
this.queryFactory = queryFactory;
}
@Override
public void insertUploadSession(UploadDto.UploadAddReq addReq) {
long execCnt =
queryFactory
.insert(uploadSessionEntity)
.columns(
uploadSessionEntity.uploadId,
uploadSessionEntity.datasetId,
uploadSessionEntity.fileName,
uploadSessionEntity.fileSize,
uploadSessionEntity.finalPath,
uploadSessionEntity.status,
uploadSessionEntity.tempPath,
uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.uploadDivi,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid
)
.values(
addReq.getUploadId(),
addReq.getDatasetId(),
addReq.getFileName(),
addReq.getFileSize(),
addReq.getFinalPath(),
addReq.getStatus(),
addReq.getTempPath(),
addReq.getChunkIndex(),
addReq.getChunkTotalIndex(),
addReq.getUploadDivi(),
addReq.getFileHash(),
addReq.getUuid()
)
.execute();
}
@Override
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) {
UploadDto.uploadDto foundContent =
queryFactory
.select(
Projections.constructor(
UploadDto.uploadDto.class,
uploadSessionEntity.uploadId,
uploadSessionEntity.datasetId,
uploadSessionEntity.fileName,
uploadSessionEntity.fileSize,
uploadSessionEntity.finalPath,
uploadSessionEntity.uploadDivi,
uploadSessionEntity.status,
uploadSessionEntity.tempPath,
uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid
))
.from(uploadSessionEntity)
.where(uploadSessionEntity.datasetId.eq(datasetId)
.and(uploadSessionEntity.uploadDivi.eq(uploadDivi)))
.limit(1)
.fetchOne();
return foundContent;
}
@Override
public UploadDto.uploadDto findByUuid(UUID uuid) {
UploadDto.uploadDto foundContent =
queryFactory
.select(
Projections.constructor(
UploadDto.uploadDto.class,
uploadSessionEntity.uploadId,
uploadSessionEntity.datasetId,
uploadSessionEntity.fileName,
uploadSessionEntity.fileSize,
uploadSessionEntity.finalPath,
uploadSessionEntity.uploadDivi,
uploadSessionEntity.status,
uploadSessionEntity.tempPath,
uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid
))
.from(uploadSessionEntity)
.where(uploadSessionEntity.uuid.eq(uuid))
.limit(1)
.fetchOne();
return foundContent;
}
public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq){
long fileCount =
queryFactory
.update(uploadSessionEntity)
.set(uploadSessionEntity.chunkIndex, addReq.getChunkIndex())
.set(uploadSessionEntity.status, addReq.getStatus())
.where(uploadSessionEntity.uploadId.eq(addReq.getUploadId()))
.execute();
}
}

View File

@@ -0,0 +1,144 @@
package com.kamco.cd.kamcoback.upload;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import com.kamco.cd.kamcoback.upload.service.UploadService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
@Tag(name = "파일 업로드", description = "대용량 파일 업로드 API")
@RestController
@RequestMapping("/api/upload")
@RequiredArgsConstructor
public class UploadApiController {
private final UploadService uploadService;
@Value("${file.sync-root-dir}")
private String syncRootDir;
@Value("${file.sync-tmp-dir}")
private String syncTmpDir;
@Value("${file.sync-file-extention}")
private String syncFileExtention;
@Value("${file.dataset-dir}")
private String datasetDir;
@Value("${file.dataset-tmp-dir}")
private String datasetTmpDir;
@Value("${file.model-dir}")
private String modelDir;
@Value("${file.model-tmp-dir}")
private String modelTmpDir;
/*
@Operation(summary = "데이터셋 대용량 업로드 세션 시작", description = "데이터셋 대용량 파일 업로드 세션을 시작합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "세션 생성 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = UploadDto.InitRes.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/chunk-upload-init")
public ApiResponseDto<DmlReturn> initUpload(
@RequestBody @Valid UploadDto.InitReq initReq) {
return ApiResponseDto.createOK(uploadService.initUpload(initReq));
}
*/
@Operation(summary = "데이터셋 대용량 파일 분할 전송", description = "데이터셋 파일 대용량 파일을 청크 단위로 전송합니다.")
@ApiResponses(
value = {
@ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping(value = "/file-chunk-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public ApiResponseDto<UploadDto.UploadRes> fileChunkUpload(
@RequestParam("uuid") UUID uuid,
@RequestParam("fileName") String fileName,
@RequestParam("fileSize") long fileSize,
// @RequestParam("fileHash") String fileHash,
@RequestParam("chunkIndex") Integer chunkIndex,
@RequestParam("chunkTotalIndex") Integer chunkTotalIndex,
@RequestPart("chunkFile") MultipartFile chunkFile) {
String uploadDivi = "dataset";
UploadDto.UploadAddReq upAddReqDto = new UploadDto.UploadAddReq();
upAddReqDto.setDatasetId(0L);
upAddReqDto.setUuid(uuid);
upAddReqDto.setFileName(fileName);
upAddReqDto.setFileSize(fileSize);
upAddReqDto.setChunkIndex(chunkIndex);
upAddReqDto.setChunkTotalIndex(chunkTotalIndex);
upAddReqDto.setUploadDivi(uploadDivi);
upAddReqDto.setFinalPath(datasetDir);
upAddReqDto.setTempPath(datasetTmpDir);
System.out.println("uuid === "+ uuid);
return ApiResponseDto.ok(uploadService.uploadChunk(upAddReqDto, chunkFile));
}
@Operation(summary = "업로드 완료된 파일 병합", description = "업로드 완료 및 파일 병합을 요청합니다.")
@ApiResponses(
value = {
@ApiResponse(responseCode = "200", description = "업로드 완료 성공", content = @Content),
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
@ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PutMapping("/chunk-upload-complete/{uuid}")
public ApiResponseDto<UploadDto.UploadRes> completeUpload(
@PathVariable UUID uuid) {
return ApiResponseDto.ok(uploadService.completeUpload(uuid));
}
/*
@Operation(summary = "업로드 상태 조회", description = "업로드 진행 상태를 조회합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = UploadDto.Status.class))),
@ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/status")
public ApiResponseDto<UploadDto.Status> getUploadStatus(
@RequestBody @Valid UploadDto.StatusReq statusReq) {
return ApiResponseDto.ok(uploadService.getUploadStatus(statusReq));
}
*/
}

View File

@@ -0,0 +1,224 @@
package com.kamco.cd.kamcoback.upload.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotBlank;
import java.util.UUID;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
public class UploadDto {
@Schema(name = "InitReq", description = "업로드(Chunk) 세션 초기화")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class InitReq {
@Schema(description = "파일명", example = "data.zip")
private String fileName;
@Schema(description = "파일 크기 (bytes)", example = "10737418240")
private Long fileSize;
@Schema(description = "총 청크 수", example = "100")
private Integer chunkTotalIndex;
/*
@Schema(
description = "파일해쉬",
example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
private String fileHash;
*/
@Schema(description = "업로드구분", example = "model")
private String uploadDivi;
}
@Schema(name = "UploadAddReq", description = "업로드 요청")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class UploadAddReq {
@Schema(description = "업로드 ID", example = "각데이터의 식별키")
private String uploadId;
@Schema(description = "데이터식별키", example = "129227333")
private Long datasetId;
@Schema(description = "파일명", example = "data.zip")
private String fileName;
@Schema(description = "파일 크기 (bytes)", example = "10737418240")
private Long fileSize;
@Schema(description = "파일명", example = "data.zip")
private String finalPath;
@Schema(description = "업로드구분", example = "dataset")
private String uploadDivi;
@Schema(description = "상태", example = "UPLOADING")
private String status;
@Schema(description = "임시저장경로")
private String tempPath;
@Schema(description = "업로드 청크 Index", example = "50")
private Integer chunkIndex;
@Schema(description = "총 청크 수", example = "100")
private Integer chunkTotalIndex;
@Schema(
description = "파일해쉬",
example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
private String fileHash;
@Schema(description = "uuid", example = "303d4e24-1726-4272-bbc7-01ab85692b80")
private UUID uuid;
}
@Schema(name = "UploadCompleteReq", description = "업로드 완료 요청")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class CompleteReq {
@NotBlank(message = "업로드 ID는 필수입니다")
@Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123")
private String uploadId;
}
@Schema(name = "UploadStatusReq", description = "업로드 상태 조회 요청")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class StatusReq {
@NotBlank(message = "업로드 ID는 필수입니다")
@Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123")
private String uploadId;
}
@Schema(name = "UploadStatus", description = "업로드 상태 정보")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class Status {
@Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123")
private String uploadId;
@Schema(description = "파일명", example = "data.zip")
private String fileName;
@Schema(description = "파일 크기 (bytes)", example = "10737418240")
private Long fileSize;
@Schema(description = "상태", example = "UPLOADING")
private String status;
@Schema(description = "총 청크 수", example = "100")
private Integer totalChunks;
@Schema(description = "업로드된 청크 수", example = "50")
private Integer uploadedChunks;
@Schema(description = "진행률 (%)", example = "50.0")
private Double progress;
@Schema(description = "에러 메시지", example = "")
private String errorMessage;
}
@Schema(name = "UploadAddReq", description = "업로드 요청")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class uploadDto {
@Schema(description = "업로드 ID", example = "각데이터의 식별키")
private String uploadId;
@Schema(description = "데이터식별키", example = "129227333")
private Long datasetId;
@Schema(description = "파일명", example = "data.zip")
private String fileName;
@Schema(description = "파일 크기 (bytes)", example = "10737418240")
private Long fileSize;
@Schema(description = "파일명", example = "data.zip")
private String finalPath;
@Schema(description = "업로드구분", example = "dataset")
private String uploadDivi;
@Schema(description = "상태", example = "UPLOADING")
private String status;
@Schema(description = "임시저장경로")
private String tempPath;
@Schema(description = "업로드 청크 Index", example = "50")
private Integer chunkIndex;
@Schema(description = "총 청크 Index", example = "100")
private Integer chunkTotalIndex;
@Schema(
description = "파일해쉬",
example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
private String fileHash;
@Schema(description = "uuid")
private UUID uuid;
}
@Schema(name = "UploadRes", description = "업로드 수행 후 리턴")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class UploadRes {
private String res;
private String resMsg;
private UUID uuid;
private String filePath;
private String fileName;
private int chunkIndex;
private int chunkTotalIndex;
public double getUploadRate() {
if (this.chunkTotalIndex == 0) {
return 0.0;
}
return (double) (this.chunkIndex+1) / (this.chunkTotalIndex+1) * 100.0;
}
}
@Schema(name = "DmlReturn", description = "수행 후 리턴")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class DmlReturn {
private String flag;
private String message;
}
}

View File

@@ -0,0 +1,252 @@
package com.kamco.cd.kamcoback.upload.service;
import com.kamco.cd.kamcoback.common.enums.FileUploadStatus;
import com.kamco.cd.kamcoback.common.utils.FIleChecker;
import com.kamco.cd.kamcoback.postgres.core.UploadSessionCoreService;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import com.kamco.cd.kamcoback.upload.dto.UploadDto.DmlReturn;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
@Slf4j
@Service
@RequiredArgsConstructor
public class UploadService {
private final UploadSessionCoreService uploadSessionCoreService;
@Value("${file.sync-root-dir}")
private String syncRootDir;
@Value("${file.sync-tmp-dir}")
private String syncTmpDir;
@Value("${file.sync-file-extention}")
private String syncFileExtention;
@Value("${file.dataset-dir}")
private String datasetDir;
@Value("${file.dataset-tmp-dir}")
private String datasetTmpDir;
@Value("${file.model-dir}")
private String modelDir;
@Value("${file.model-tmp-dir}")
private String modelTmpDir;
@Transactional
public DmlReturn initUpload(UploadDto.InitReq initReq) {
return new DmlReturn("success", "UPLOAD CHUNK INIT");
}
@Transactional
public UploadDto.UploadRes uploadChunk(UploadDto.UploadAddReq upAddReqDto, MultipartFile file) {
UploadDto.UploadRes upRes = new UploadDto.UploadRes();
long datasetId = 0;
if( upAddReqDto.getDatasetId() != null )datasetId = upAddReqDto.getDatasetId();
String uploadId = System.currentTimeMillis()+"";
//UUID uuid = UUID.randomUUID();
UUID uuid = upAddReqDto.getUuid();
String tmpDataSetDir = upAddReqDto.getTempPath()+uuid;
String fianlDir = upAddReqDto.getFinalPath()+uuid;
String uploadDivi = upAddReqDto.getUploadDivi();
//String fileName = file.getOriginalFilename();
String fileName = upAddReqDto.getFileName();
Integer chunkIndex = upAddReqDto.getChunkIndex();
Integer chunkTotalIndex = upAddReqDto.getChunkTotalIndex();
String status = FileUploadStatus.INIT.name();
upRes.setUuid(uuid);
upRes.setFilePath(fianlDir);
upRes.setFileName(fileName);
upAddReqDto.setUuid(uuid);
upAddReqDto.setUploadId(uploadId);
upAddReqDto.setStatus(status);
upAddReqDto.setFileName(fileName);
upAddReqDto.setTempPath(tmpDataSetDir);
upAddReqDto.setFinalPath(fianlDir);
//세션 신규,중복체크(초기화 포함)
UploadDto.uploadDto dto = this.checkUploadSession(upAddReqDto, upRes);
if( !upRes.getRes().equals("success") )return upRes;
status = FileUploadStatus.UPLOADING.name();
upAddReqDto.setStatus(status);
if( dto != null )
{
tmpDataSetDir = dto.getTempPath();
fianlDir = dto.getFinalPath();
}
//폴더 생성 및 체크
if( ! checkChunkFoler(upRes, tmpDataSetDir, fianlDir) )return upRes;
//chunk저장하기
if( ! FIleChecker.multipartChunkSaveTo(file, tmpDataSetDir, chunkIndex ) )
{
upRes.setRes("fail");
upRes.setResMsg("chunkIndex:"+chunkIndex+" 업로드 애러");
}
if( chunkIndex == chunkTotalIndex ) {
upAddReqDto.setUploadId(dto.getUploadId());
upAddReqDto.setStatus(FileUploadStatus.DONE.name());
uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
try {
this.mergeChunks(tmpDataSetDir, fianlDir, fileName, chunkTotalIndex);
upAddReqDto.setUploadId(dto.getUploadId());
upAddReqDto.setStatus("MERGED");
uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
} catch (IOException e) {
//throw new RuntimeException(e);
upRes.setRes("fail");
upRes.setResMsg("파일Chunk 병합(merge) 애러");
return upRes;
}
}
return upRes;
}
@Transactional
public UploadDto.UploadRes completeUpload(UUID uuid) {
UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(uuid);
UploadDto.UploadRes upRes = new UploadDto.UploadRes();
upRes.setRes("success");
upRes.setResMsg("병합(merge) 정상처리되었습니다.");
upRes.setUuid(uuid);
upRes.setFilePath(dto.getFinalPath());
upRes.setFileName(dto.getFileName());
upRes.setChunkIndex(dto.getChunkIndex());
upRes.setChunkTotalIndex(dto.getChunkTotalIndex());
try {
this.mergeChunks(dto.getTempPath(), dto.getFinalPath(), dto.getFileName(), dto.getChunkTotalIndex());
} catch (IOException e) {
upRes.setRes("fail");
upRes.setResMsg("병합(merge) 애러");
return upRes;
}
return upRes;
}
public boolean checkChunkFoler(UploadDto.UploadRes upRes, String tmpDataSetDir, String fianlDir)
{
if( ! FIleChecker.mkDir(tmpDataSetDir) )
{
upRes.setRes("fail");
upRes.setRes("CHUNK 폴더 생성 ERROR");
return false;
}
if( ! FIleChecker.mkDir(fianlDir) )
{
upRes.setRes("fail");
upRes.setRes("업로드 완료 폴더 생성 ERROR");
return false;
}
return true;
}
public UploadDto.uploadDto checkUploadSession(UploadDto.UploadAddReq upAddReqDto, UploadDto.UploadRes upRes) {
upRes.setRes("success");
upRes.setResMsg("정상처리되었습니다.");
UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(upAddReqDto.getUuid());
if( upAddReqDto.getChunkIndex() == 0 ) {
if( dto != null )
{
upRes.setRes("duplicate");
upRes.setResMsg("이미 진행중인 업로드세션입니다.");
return dto;
}
upAddReqDto.setStatus("UPLOADING");
upRes.setUuid( upAddReqDto.getUuid() );
uploadSessionCoreService.createUploadSession(upAddReqDto);
}
else {
if( dto == null ){
upRes.setRes("nosession");
upRes.setResMsg("업로드 세션이 존재하지 않습니다.");
return dto;
}
upAddReqDto.setStatus("UPLOADING");
upAddReqDto.setUploadId(dto.getUploadId());
uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
}
if( dto != null )upRes.setUuid( dto.getUuid() );
upRes.setChunkIndex(upAddReqDto.getChunkIndex());
upRes.setChunkTotalIndex(upAddReqDto.getChunkTotalIndex());
return dto;
}
public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex) throws IOException {
Path outputPath = Paths.get(fianlDir, fileName);
try (FileChannel outChannel = FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) {
for (int i = 0; i <= chunkTotalIndex; i++) {
Path chunkPath = Paths.get(tmpDir, i+"");
try (FileChannel inChannel = FileChannel.open(chunkPath, StandardOpenOption.READ)) {
long transferred = 0;
long size = inChannel.size();
while (transferred < size) {
transferred += inChannel.transferTo(transferred, size - transferred, outChannel);
}
}
// 병합 후 즉시 삭제하여 디스크 공간 확보
Files.delete(chunkPath);
}
}
//병합후 임시 폴더 삭제
FIleChecker.deleteFolder(tmpDir);
}
}

View File

@@ -0,0 +1,137 @@
<!DOCTYPE html>
<html lang="ko">
<head>
<meta charset="UTF-8">
<title>Chunk Upload Test</title>
</head>
<body>
<h2>대용량 파일 청크 업로드 테스트</h2>
* Chunk 테스트 사이즈 10M (10 * 1024 * 1024) - 성능에 따라 변경가능<br><br>
* 업로드 API선택</br></br>
<select name="apiUrl" id="apiUrl" style="width:600px;height:40px;">
<option value="/api/model/file-chunk-upload">모델파일Chunk업로드 ( /api/model/file-chunk-upload )</option>
<option value="/api/upload/file-chunk-upload">파일Chunk업로드(공통) ( /api/upload/file-chunk-upload )</option>
</select>
<br><br>
* 파일첨부<br><br>
<input type="file" id="chunkFile" style="height:40px;"><br><br>
<button onclick="startUpload()" style="height:40px;">업로드 시작</button>
<br><br><br><br>
* 업로드시 업로드 이력을 추적하기 위해 UUID생성해서 전달(파일병합시 사용)(script 예제참고)</br></br>
UUID : <input id="uuid" name="uuid" value="" style="width:300px;height:30px;" readonly><br><br>
* API 호출시 파일정보 추출해서 자동 할당해야 함.(script 예제참고)</br></br>
chunkIndex : <input style="height:30px;" id="chunkIndex" placeholder="chunkIndex" readonly><br><br>
chunkTotalIndex : <input style="height:30px;" id="chunkTotalIndex" placeholder="chunkTotalIndex" readonly ><br><br>
* API 호출시 파일정보 추출해서 자동 할당해야 함.(script 예제참고)</br></br>
fileSize : <input style="height:30px;" id="fileSize" placeholder="fileSize" readonly><br><br>
<!--
fileHash : <input id="fileHash" placeholder="fileHash"><br><br> -->
<br><br>
* 진행율(%)</br></br>
<div style="width:500px;height:30px;border:1px solid #cccccc;"><div id="prgssbar" style="width:100%;height:30px;background:#eeeeee;"></div></div>
<br><br>
* 결과메세지</br></br>
<div id="status" style="padding:20px;width:800px;height:300px;border:1px solid #000000;"></div>
<script>
async function startUpload() {
const apiUrl = document.getElementById('apiUrl').value;
const file = document.getElementById('chunkFile').files[0];
const fileName = file.name;
//const datasetUid = Number(document.getElementById('datasetUid').value);
//const chunkIndex = document.getElementById('chunkIndex').value;
if (!file) return alert("파일을 선택하세요.");
const CHUNK_SIZE = 10 * 1024 * 1024; // 5MB
const fileSize = file.size;
var totalChunks = Math.ceil(fileSize / CHUNK_SIZE);
const chunkTotalIndex = totalChunks - 1;
const uuid = crypto.randomUUID(); // 고유 ID 생성
//var uuid = "";
document.getElementById('uuid').value = uuid;
document.getElementById('fileSize').value = file.size;
document.getElementById('chunkTotalIndex').value = chunkTotalIndex;
for (let i = 0; i < totalChunks; i++) {
//for (let i = 0; i < 1; i++) {
const start = i * CHUNK_SIZE;
const end = Math.min(start + CHUNK_SIZE, file.size);
const chunk = file.slice(start, end);
document.getElementById('chunkIndex').value = i;
const formData = new FormData();
formData.append("uuid", uuid);
formData.append("fileSize", fileSize);
formData.append("fileName", fileName);
formData.append("chunkIndex", i);
formData.append("chunkTotalIndex", chunkTotalIndex);
formData.append("chunkFile", chunk);
try {
const response = await fetch(apiUrl, { method: 'POST', body: formData });
// 2. 응답 상태 확인 (200 OK 등)
if (!response.ok) {
throw new Error(`서버 에러: ${response.status}`);
}
// 3. 서버가 보낸 데이터 읽기 (JSON 형태라고 가정)
const result = await response.json();
document.getElementById('status').innerText = JSON.stringify(result, null, 2);
if( result.data.res != "success")
{
//오류 경고창 띄우는 것으로 처리하시면 됩니다.
break;
}
document.getElementById('prgssbar').style.width = result.data.uploadRate+"%";
} catch (error) {
console.error(`${i}번째 청크 업로드 실패:`, error);
break; // 오류 발생 시 중단
}
}
// 모든 청크 전송 후 최종 완료 요청
//var mergeResult = await completeUpload(uuid);
//document.getElementById('status').innerText = JSON.stringify(mergeResult, null, 2);
}
async function completeUpload(uuid) {
try {
const response = await fetch(`/api/upload/chunk-upload-complete/${uuid}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json'
},
});
if (!response.ok) {
throw new Error(`서ver 응답 에러: ${response.status}`);
}
const result = await response.json();
return result;
} catch (error) {
console.error("완료 요청 중 오류 발생:", error);
}
}
</script>
</body>
</html>