2 Commits

Author SHA1 Message Date
37f8d728fa Merge pull request 'init spotless 적용' (#1) from feat/training_260202 into develop
Reviewed-on: #1
2026-02-02 15:49:14 +09:00
a1ffad1c4e init spotless 적용 2026-02-02 15:48:23 +09:00
153 changed files with 12870 additions and 12931 deletions

View File

@@ -490,8 +490,8 @@ public class FIleChecker {
return true;
}
public static boolean multipartChunkSaveTo(MultipartFile mfile, String targetPath, int chunkIndex) {
public static boolean multipartChunkSaveTo(
MultipartFile mfile, String targetPath, int chunkIndex) {
File dest = new File(targetPath, String.valueOf(chunkIndex));
boolean fileUpload = true;
@@ -508,8 +508,6 @@ public class FIleChecker {
return FileSystemUtils.deleteRecursively(new File(path));
}
public static boolean validationMultipart(MultipartFile mfile) {
// 파일 유효성 검증
if (mfile == null || mfile.isEmpty() || mfile.getSize() == 0) {

View File

@@ -56,7 +56,7 @@ public class AuthController {
@ApiResponses({
@ApiResponse(
responseCode = "200",
description = "로그인 성공",
description = "로그인 성공1",
content = @Content(schema = @Schema(implementation = TokenResponse.class))),
@ApiResponse(
responseCode = "401",
@@ -118,7 +118,7 @@ public class AuthController {
// 사용자 상태 조회
String status = authService.getUserStatus(request);
if(StatusType.INACTIVE.getId().equals(status)) {
if (StatusType.INACTIVE.getId().equals(status)) {
throw new CustomApiException("INACTIVE_ID", HttpStatus.UNAUTHORIZED);
}

View File

@@ -144,7 +144,8 @@ public class MembersCoreService {
MemberEntity memberEntity =
membersRepository
.findByEmployeeNo(request.getUsername())
.orElseThrow(() -> new CustomApiException("LOGIN_ID_NOT_FOUND", HttpStatus.UNAUTHORIZED));
.orElseThrow(
() -> new CustomApiException("LOGIN_ID_NOT_FOUND", HttpStatus.UNAUTHORIZED));
return memberEntity.getStatus();
}

View File

@@ -1,9 +1,7 @@
package com.kamco.cd.training.postgres.core;
import com.kamco.cd.training.postgres.entity.UploadSessionEntity;
import com.kamco.cd.training.postgres.repository.upload.UploadSessionRepository;
import com.kamco.cd.training.upload.dto.UploadDto;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
@@ -13,8 +11,7 @@ public class UploadSessionCoreService {
private final UploadSessionRepository uploadSessionRepository;
public void createUploadSession(UploadDto.UploadAddReq addReq)
{
public void createUploadSession(UploadDto.UploadAddReq addReq) {
/*
UUID newUuid = UUID.randomUUID();
@@ -40,18 +37,17 @@ public class UploadSessionCoreService {
*/
uploadSessionRepository.insertUploadSession(addReq);
}
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi){
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) {
return uploadSessionRepository.findByDatasetUid(datasetId, uploadDivi);
}
public UploadDto.uploadDto findByUuid(String uuid){
public UploadDto.uploadDto findByUuid(String uuid) {
return uploadSessionRepository.findByUuid(uuid);
}
public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq){
public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq) {
uploadSessionRepository.updateUploadSessionStatus(addReq);
}
}

View File

@@ -1,6 +1,5 @@
package com.kamco.cd.training.postgres.entity;
import com.kamco.cd.training.upload.dto.UploadDto;
import jakarta.persistence.*;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
@@ -95,8 +94,4 @@ public class UploadSessionEntity {
this.chunkIndex = 0;
}
}
}

View File

@@ -1,22 +1,14 @@
package com.kamco.cd.training.postgres.repository.upload;
import com.kamco.cd.training.upload.dto.UploadDto;
import jakarta.validation.Valid;
import java.util.List;
import java.util.Optional;
import org.springframework.data.domain.Page;
public interface UploadSessionRepositoryCustom {
void insertUploadSession(UploadDto.UploadAddReq addReq);
UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi);
UploadDto.uploadDto findByUuid(String uuid);
void updateUploadSessionStatus(UploadDto.UploadAddReq addReq);
}

View File

@@ -1,31 +1,16 @@
package com.kamco.cd.training.postgres.repository.upload;
import static com.kamco.cd.training.postgres.entity.QUploadSessionEntity.uploadSessionEntity;
import com.kamco.cd.training.postgres.entity.UploadSessionEntity;
import com.kamco.cd.training.upload.dto.UploadDto;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberExpression;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import jakarta.validation.Valid;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import org.hibernate.query.Query;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
@@ -58,8 +43,7 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.uploadDivi,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid
)
uploadSessionEntity.uuid)
.values(
addReq.getUploadId(),
addReq.getDatasetId(),
@@ -72,14 +56,10 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
addReq.getChunkTotalIndex(),
addReq.getUploadDivi(),
addReq.getFileHash(),
addReq.getUuid()
)
addReq.getUuid())
.execute();
}
@Override
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) {
@@ -99,19 +79,19 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid
))
uploadSessionEntity.uuid))
.from(uploadSessionEntity)
.where(uploadSessionEntity.datasetId.eq(datasetId)
.where(
uploadSessionEntity
.datasetId
.eq(datasetId)
.and(uploadSessionEntity.uploadDivi.eq(uploadDivi)))
.limit(1)
.fetchOne();
return foundContent;
}
@Override
public UploadDto.uploadDto findByUuid(String uuid) {
@@ -131,19 +111,16 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid
))
uploadSessionEntity.uuid))
.from(uploadSessionEntity)
.where(uploadSessionEntity.uuid.eq(UUID.fromString(uuid)))
.limit(1)
.fetchOne();
return foundContent;
}
public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq){
public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq) {
long fileCount =
queryFactory
.update(uploadSessionEntity)
@@ -152,5 +129,4 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
.where(uploadSessionEntity.uploadId.eq(addReq.getUploadId()))
.execute();
}
}

View File

@@ -75,7 +75,6 @@ public class UploadApiController {
return ApiResponseDto.ok(uploadService.uploadChunk(upAddReqDto, chunkFile));
}
@Operation(summary = "업로드 완료된 파일 병합", description = "업로드 완료 및 파일 병합을 요청합니다.")
@ApiResponses(
value = {
@@ -85,8 +84,7 @@ public class UploadApiController {
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PutMapping("/chunk-upload-complete/{uuid}")
public ApiResponseDto<DmlReturn> completeUpload(
@PathVariable String uuid) {
public ApiResponseDto<DmlReturn> completeUpload(@PathVariable String uuid) {
return ApiResponseDto.ok(uploadService.completeUpload(uuid));
}

View File

@@ -154,7 +154,6 @@ public class UploadDto {
@Schema(description = "파일명", example = "data.zip")
private String fileName;
@Schema(description = "파일 크기 (bytes)", example = "10737418240")
private Long fileSize;
@@ -176,7 +175,6 @@ public class UploadDto {
@Schema(description = "총 청크 Index", example = "100")
private Integer chunkTotalIndex;
@Schema(
description = "파일해쉬",
example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
@@ -184,7 +182,6 @@ public class UploadDto {
@Schema(description = "uuid")
private UUID uuid;
}
@Schema(name = "UploadRes", description = "업로드 수행 후 리턴")
@@ -203,9 +200,8 @@ public class UploadDto {
if (this.chunkTotalIndex == 0) {
return 0.0;
}
return (double) (this.chunkIndex+1) / (this.chunkTotalIndex+1) * 100.0;
return (double) (this.chunkIndex + 1) / (this.chunkTotalIndex + 1) * 100.0;
}
}
@Schema(name = "DmlReturn", description = "수행 후 리턴")

View File

@@ -4,9 +4,6 @@ import com.kamco.cd.training.common.utils.FIleChecker;
import com.kamco.cd.training.postgres.core.UploadSessionCoreService;
import com.kamco.cd.training.upload.dto.UploadDto;
import com.kamco.cd.training.upload.dto.UploadDto.DmlReturn;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
@@ -55,21 +52,20 @@ public class UploadService {
UploadDto.UploadRes upRes = new UploadDto.UploadRes();
long datasetId = upAddReqDto.getDatasetId();
String uploadId = System.currentTimeMillis()+"";
String uploadId = System.currentTimeMillis() + "";
UUID uuid = UUID.randomUUID();
String tmpDataSetDir = "";
String fianlDir = "";
String uploadDivi = upAddReqDto.getUploadDivi();
//String fileName = file.getOriginalFilename();
// String fileName = file.getOriginalFilename();
String fileName = upAddReqDto.getFileName();
Integer chunkIndex = upAddReqDto.getChunkIndex();
Integer chunkTotalIndex = upAddReqDto.getChunkTotalIndex();
String status = "UPLOADING";
if( uploadDivi.equals("dataset"))
{
tmpDataSetDir = datasetTmpDir+uuid+"/";
fianlDir = datasetDir+uuid+"/";
if (uploadDivi.equals("dataset")) {
tmpDataSetDir = datasetTmpDir + uuid + "/";
fianlDir = datasetDir + uuid + "/";
}
upAddReqDto.setUuid(uuid);
@@ -79,32 +75,30 @@ public class UploadService {
upAddReqDto.setTempPath(tmpDataSetDir);
upAddReqDto.setFinalPath(fianlDir);
//세션 신규,중복체크(초기화 포함)
// 세션 신규,중복체크(초기화 포함)
UploadDto.uploadDto dto = this.checkUploadSession(upAddReqDto, upRes);
if( !upRes.getRes().equals("success") )return upRes;
if (!upRes.getRes().equals("success")) return upRes;
if( dto != null )
{
if (dto != null) {
tmpDataSetDir = dto.getTempPath();
fianlDir = dto.getFinalPath();
}
//폴더 생성 및 체크
if( ! checkChunkFoler(upRes, tmpDataSetDir, fianlDir) )return upRes;
// 폴더 생성 및 체크
if (!checkChunkFoler(upRes, tmpDataSetDir, fianlDir)) return upRes;
//chunk저장하기
if( ! FIleChecker.multipartChunkSaveTo(file, tmpDataSetDir, chunkIndex ) )
{
// chunk저장하기
if (!FIleChecker.multipartChunkSaveTo(file, tmpDataSetDir, chunkIndex)) {
upRes.setRes("fail");
upRes.setResMsg("chunkIndex:"+chunkIndex+" 업로드 애러");
upRes.setResMsg("chunkIndex:" + chunkIndex + " 업로드 애러");
}
//chunk완료시 merge 및 폴더에 저장
if( chunkIndex == chunkTotalIndex ) {
// chunk완료시 merge 및 폴더에 저장
if (chunkIndex == chunkTotalIndex) {
//upAddReqDto.setUploadId(dto.getUploadId());
//upAddReqDto.setStatus("MERGING");
//uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
// upAddReqDto.setUploadId(dto.getUploadId());
// upAddReqDto.setStatus("MERGING");
// uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
/*
try {
@@ -120,7 +114,6 @@ public class UploadService {
upAddReqDto.setUploadId(dto.getUploadId());
upAddReqDto.setStatus("COMPLETE");
uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
}
return upRes;
@@ -132,26 +125,23 @@ public class UploadService {
UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(uuid);
try {
this.mergeChunks(dto.getTempPath(), dto.getFinalPath(), dto.getFileName(), dto.getChunkTotalIndex());
this.mergeChunks(
dto.getTempPath(), dto.getFinalPath(), dto.getFileName(), dto.getChunkTotalIndex());
} catch (IOException e) {
return new DmlReturn("mergingfail", "chunk파일 merge 애러");
}
return new DmlReturn("success", "병합(merge) 정상처리되었습니다.");
}
public boolean checkChunkFoler(UploadDto.UploadRes upRes, String tmpDataSetDir, String fianlDir)
{
if( ! FIleChecker.mkDir(tmpDataSetDir) )
{
public boolean checkChunkFoler(UploadDto.UploadRes upRes, String tmpDataSetDir, String fianlDir) {
if (!FIleChecker.mkDir(tmpDataSetDir)) {
upRes.setRes("fail");
upRes.setRes("CHUNK 폴더 생성 ERROR");
return false;
}
if( ! FIleChecker.mkDir(fianlDir) )
{
if (!FIleChecker.mkDir(fianlDir)) {
upRes.setRes("fail");
upRes.setRes("업로드 완료 폴더 생성 ERROR");
return false;
@@ -160,28 +150,28 @@ public class UploadService {
return true;
}
public UploadDto.uploadDto checkUploadSession(UploadDto.UploadAddReq upAddReqDto, UploadDto.UploadRes upRes) {
public UploadDto.uploadDto checkUploadSession(
UploadDto.UploadAddReq upAddReqDto, UploadDto.UploadRes upRes) {
upRes.setRes("success");
upRes.setResMsg("정상처리되었습니다.");
UploadDto.uploadDto dto = uploadSessionCoreService.findByDatasetUid(upAddReqDto.getDatasetId(), upAddReqDto.getUploadDivi());
UploadDto.uploadDto dto =
uploadSessionCoreService.findByDatasetUid(
upAddReqDto.getDatasetId(), upAddReqDto.getUploadDivi());
if( upAddReqDto.getChunkIndex() == 0 ) {
if( dto != null )
{
if (upAddReqDto.getChunkIndex() == 0) {
if (dto != null) {
upRes.setRes("duplicate");
upRes.setResMsg("이미 진행중인 업로드세션입니다.");
return dto;
}
upAddReqDto.setStatus("UPLOADING");
upRes.setUuid( upAddReqDto.getUuid().toString() );
upRes.setUuid(upAddReqDto.getUuid().toString());
uploadSessionCoreService.createUploadSession(upAddReqDto);
}
else {
if( dto == null ){
} else {
if (dto == null) {
upRes.setRes("nosession");
upRes.setResMsg("업로드 세션이 존재하지 않습니다.");
return dto;
@@ -192,22 +182,22 @@ public class UploadService {
uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
}
if( dto != null )upRes.setUuid( dto.getUuid().toString() );
if (dto != null) upRes.setUuid(dto.getUuid().toString());
upRes.setChunkIndex(upAddReqDto.getChunkIndex());
upRes.setChunkTotalIndex(upAddReqDto.getChunkTotalIndex());
return dto;
}
public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex) throws IOException {
public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex)
throws IOException {
Path outputPath = Paths.get(fianlDir, fileName);
try (FileChannel outChannel = FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) {
try (FileChannel outChannel =
FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) {
for (int i = 0; i <= chunkTotalIndex; i++) {
Path chunkPath = Paths.get(tmpDir, i+"");
Path chunkPath = Paths.get(tmpDir, i + "");
try (FileChannel inChannel = FileChannel.open(chunkPath, StandardOpenOption.READ)) {
long transferred = 0;
@@ -221,10 +211,7 @@ public class UploadService {
}
}
//병합후 임시 폴더 삭제
// 병합후 임시 폴더 삭제
FIleChecker.deleteFolder(tmpDir);
}
}