init spotless 적용 #1

Merged
teddy merged 1 commits from feat/training_260202 into develop 2026-02-02 15:49:14 +09:00
153 changed files with 12870 additions and 12931 deletions
Showing only changes of commit a1ffad1c4e - Show all commits

View File

@@ -490,8 +490,8 @@ public class FIleChecker {
return true; return true;
} }
public static boolean multipartChunkSaveTo(
public static boolean multipartChunkSaveTo(MultipartFile mfile, String targetPath, int chunkIndex) { MultipartFile mfile, String targetPath, int chunkIndex) {
File dest = new File(targetPath, String.valueOf(chunkIndex)); File dest = new File(targetPath, String.valueOf(chunkIndex));
boolean fileUpload = true; boolean fileUpload = true;
@@ -508,8 +508,6 @@ public class FIleChecker {
return FileSystemUtils.deleteRecursively(new File(path)); return FileSystemUtils.deleteRecursively(new File(path));
} }
public static boolean validationMultipart(MultipartFile mfile) { public static boolean validationMultipart(MultipartFile mfile) {
// 파일 유효성 검증 // 파일 유효성 검증
if (mfile == null || mfile.isEmpty() || mfile.getSize() == 0) { if (mfile == null || mfile.isEmpty() || mfile.getSize() == 0) {

View File

@@ -56,7 +56,7 @@ public class AuthController {
@ApiResponses({ @ApiResponses({
@ApiResponse( @ApiResponse(
responseCode = "200", responseCode = "200",
description = "로그인 성공", description = "로그인 성공1",
content = @Content(schema = @Schema(implementation = TokenResponse.class))), content = @Content(schema = @Schema(implementation = TokenResponse.class))),
@ApiResponse( @ApiResponse(
responseCode = "401", responseCode = "401",

View File

@@ -144,7 +144,8 @@ public class MembersCoreService {
MemberEntity memberEntity = MemberEntity memberEntity =
membersRepository membersRepository
.findByEmployeeNo(request.getUsername()) .findByEmployeeNo(request.getUsername())
.orElseThrow(() -> new CustomApiException("LOGIN_ID_NOT_FOUND", HttpStatus.UNAUTHORIZED)); .orElseThrow(
() -> new CustomApiException("LOGIN_ID_NOT_FOUND", HttpStatus.UNAUTHORIZED));
return memberEntity.getStatus(); return memberEntity.getStatus();
} }

View File

@@ -1,9 +1,7 @@
package com.kamco.cd.training.postgres.core; package com.kamco.cd.training.postgres.core;
import com.kamco.cd.training.postgres.entity.UploadSessionEntity;
import com.kamco.cd.training.postgres.repository.upload.UploadSessionRepository; import com.kamco.cd.training.postgres.repository.upload.UploadSessionRepository;
import com.kamco.cd.training.upload.dto.UploadDto; import com.kamco.cd.training.upload.dto.UploadDto;
import java.util.UUID;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@@ -13,8 +11,7 @@ public class UploadSessionCoreService {
private final UploadSessionRepository uploadSessionRepository; private final UploadSessionRepository uploadSessionRepository;
public void createUploadSession(UploadDto.UploadAddReq addReq) public void createUploadSession(UploadDto.UploadAddReq addReq) {
{
/* /*
UUID newUuid = UUID.randomUUID(); UUID newUuid = UUID.randomUUID();
@@ -40,7 +37,6 @@ public class UploadSessionCoreService {
*/ */
uploadSessionRepository.insertUploadSession(addReq); uploadSessionRepository.insertUploadSession(addReq);
} }
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) { public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) {

View File

@@ -1,6 +1,5 @@
package com.kamco.cd.training.postgres.entity; package com.kamco.cd.training.postgres.entity;
import com.kamco.cd.training.upload.dto.UploadDto;
import jakarta.persistence.*; import jakarta.persistence.*;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size; import jakarta.validation.constraints.Size;
@@ -95,8 +94,4 @@ public class UploadSessionEntity {
this.chunkIndex = 0; this.chunkIndex = 0;
} }
} }
} }

View File

@@ -1,22 +1,14 @@
package com.kamco.cd.training.postgres.repository.upload; package com.kamco.cd.training.postgres.repository.upload;
import com.kamco.cd.training.upload.dto.UploadDto; import com.kamco.cd.training.upload.dto.UploadDto;
import jakarta.validation.Valid;
import java.util.List;
import java.util.Optional;
import org.springframework.data.domain.Page;
public interface UploadSessionRepositoryCustom { public interface UploadSessionRepositoryCustom {
void insertUploadSession(UploadDto.UploadAddReq addReq); void insertUploadSession(UploadDto.UploadAddReq addReq);
UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi); UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi);
UploadDto.uploadDto findByUuid(String uuid); UploadDto.uploadDto findByUuid(String uuid);
void updateUploadSessionStatus(UploadDto.UploadAddReq addReq); void updateUploadSessionStatus(UploadDto.UploadAddReq addReq);
} }

View File

@@ -1,31 +1,16 @@
package com.kamco.cd.training.postgres.repository.upload; package com.kamco.cd.training.postgres.repository.upload;
import static com.kamco.cd.training.postgres.entity.QUploadSessionEntity.uploadSessionEntity; import static com.kamco.cd.training.postgres.entity.QUploadSessionEntity.uploadSessionEntity;
import com.kamco.cd.training.postgres.entity.UploadSessionEntity; import com.kamco.cd.training.postgres.entity.UploadSessionEntity;
import com.kamco.cd.training.upload.dto.UploadDto; import com.kamco.cd.training.upload.dto.UploadDto;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Projections; import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.BooleanExpression;
import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions; import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberExpression;
import com.querydsl.core.types.dsl.StringExpression; import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory; import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager; import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext; import jakarta.persistence.PersistenceContext;
import jakarta.validation.Valid;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID; import java.util.UUID;
import org.hibernate.query.Query;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport; import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
@@ -58,8 +43,7 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
uploadSessionEntity.chunkTotalIndex, uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.uploadDivi, uploadSessionEntity.uploadDivi,
uploadSessionEntity.fileHash, uploadSessionEntity.fileHash,
uploadSessionEntity.uuid uploadSessionEntity.uuid)
)
.values( .values(
addReq.getUploadId(), addReq.getUploadId(),
addReq.getDatasetId(), addReq.getDatasetId(),
@@ -72,14 +56,10 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
addReq.getChunkTotalIndex(), addReq.getChunkTotalIndex(),
addReq.getUploadDivi(), addReq.getUploadDivi(),
addReq.getFileHash(), addReq.getFileHash(),
addReq.getUuid() addReq.getUuid())
)
.execute(); .execute();
} }
@Override @Override
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) { public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) {
@@ -99,19 +79,19 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
uploadSessionEntity.chunkIndex, uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex, uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.fileHash, uploadSessionEntity.fileHash,
uploadSessionEntity.uuid uploadSessionEntity.uuid))
))
.from(uploadSessionEntity) .from(uploadSessionEntity)
.where(uploadSessionEntity.datasetId.eq(datasetId) .where(
uploadSessionEntity
.datasetId
.eq(datasetId)
.and(uploadSessionEntity.uploadDivi.eq(uploadDivi))) .and(uploadSessionEntity.uploadDivi.eq(uploadDivi)))
.limit(1) .limit(1)
.fetchOne(); .fetchOne();
return foundContent; return foundContent;
} }
@Override @Override
public UploadDto.uploadDto findByUuid(String uuid) { public UploadDto.uploadDto findByUuid(String uuid) {
@@ -131,18 +111,15 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
uploadSessionEntity.chunkIndex, uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex, uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.fileHash, uploadSessionEntity.fileHash,
uploadSessionEntity.uuid uploadSessionEntity.uuid))
))
.from(uploadSessionEntity) .from(uploadSessionEntity)
.where(uploadSessionEntity.uuid.eq(UUID.fromString(uuid))) .where(uploadSessionEntity.uuid.eq(UUID.fromString(uuid)))
.limit(1) .limit(1)
.fetchOne(); .fetchOne();
return foundContent; return foundContent;
} }
public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq) { public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq) {
long fileCount = long fileCount =
queryFactory queryFactory
@@ -152,5 +129,4 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
.where(uploadSessionEntity.uploadId.eq(addReq.getUploadId())) .where(uploadSessionEntity.uploadId.eq(addReq.getUploadId()))
.execute(); .execute();
} }
} }

View File

@@ -75,7 +75,6 @@ public class UploadApiController {
return ApiResponseDto.ok(uploadService.uploadChunk(upAddReqDto, chunkFile)); return ApiResponseDto.ok(uploadService.uploadChunk(upAddReqDto, chunkFile));
} }
@Operation(summary = "업로드 완료된 파일 병합", description = "업로드 완료 및 파일 병합을 요청합니다.") @Operation(summary = "업로드 완료된 파일 병합", description = "업로드 완료 및 파일 병합을 요청합니다.")
@ApiResponses( @ApiResponses(
value = { value = {
@@ -85,8 +84,7 @@ public class UploadApiController {
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
}) })
@PutMapping("/chunk-upload-complete/{uuid}") @PutMapping("/chunk-upload-complete/{uuid}")
public ApiResponseDto<DmlReturn> completeUpload( public ApiResponseDto<DmlReturn> completeUpload(@PathVariable String uuid) {
@PathVariable String uuid) {
return ApiResponseDto.ok(uploadService.completeUpload(uuid)); return ApiResponseDto.ok(uploadService.completeUpload(uuid));
} }

View File

@@ -154,7 +154,6 @@ public class UploadDto {
@Schema(description = "파일명", example = "data.zip") @Schema(description = "파일명", example = "data.zip")
private String fileName; private String fileName;
@Schema(description = "파일 크기 (bytes)", example = "10737418240") @Schema(description = "파일 크기 (bytes)", example = "10737418240")
private Long fileSize; private Long fileSize;
@@ -176,7 +175,6 @@ public class UploadDto {
@Schema(description = "총 청크 Index", example = "100") @Schema(description = "총 청크 Index", example = "100")
private Integer chunkTotalIndex; private Integer chunkTotalIndex;
@Schema( @Schema(
description = "파일해쉬", description = "파일해쉬",
example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
@@ -184,7 +182,6 @@ public class UploadDto {
@Schema(description = "uuid") @Schema(description = "uuid")
private UUID uuid; private UUID uuid;
} }
@Schema(name = "UploadRes", description = "업로드 수행 후 리턴") @Schema(name = "UploadRes", description = "업로드 수행 후 리턴")
@@ -205,7 +202,6 @@ public class UploadDto {
} }
return (double) (this.chunkIndex + 1) / (this.chunkTotalIndex + 1) * 100.0; return (double) (this.chunkIndex + 1) / (this.chunkTotalIndex + 1) * 100.0;
} }
} }
@Schema(name = "DmlReturn", description = "수행 후 리턴") @Schema(name = "DmlReturn", description = "수행 후 리턴")

View File

@@ -4,9 +4,6 @@ import com.kamco.cd.training.common.utils.FIleChecker;
import com.kamco.cd.training.postgres.core.UploadSessionCoreService; import com.kamco.cd.training.postgres.core.UploadSessionCoreService;
import com.kamco.cd.training.upload.dto.UploadDto; import com.kamco.cd.training.upload.dto.UploadDto;
import com.kamco.cd.training.upload.dto.UploadDto.DmlReturn; import com.kamco.cd.training.upload.dto.UploadDto.DmlReturn;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.channels.FileChannel; import java.nio.channels.FileChannel;
import java.nio.file.Files; import java.nio.file.Files;
@@ -66,8 +63,7 @@ public class UploadService {
Integer chunkTotalIndex = upAddReqDto.getChunkTotalIndex(); Integer chunkTotalIndex = upAddReqDto.getChunkTotalIndex();
String status = "UPLOADING"; String status = "UPLOADING";
if( uploadDivi.equals("dataset")) if (uploadDivi.equals("dataset")) {
{
tmpDataSetDir = datasetTmpDir + uuid + "/"; tmpDataSetDir = datasetTmpDir + uuid + "/";
fianlDir = datasetDir + uuid + "/"; fianlDir = datasetDir + uuid + "/";
} }
@@ -83,8 +79,7 @@ public class UploadService {
UploadDto.uploadDto dto = this.checkUploadSession(upAddReqDto, upRes); UploadDto.uploadDto dto = this.checkUploadSession(upAddReqDto, upRes);
if (!upRes.getRes().equals("success")) return upRes; if (!upRes.getRes().equals("success")) return upRes;
if( dto != null ) if (dto != null) {
{
tmpDataSetDir = dto.getTempPath(); tmpDataSetDir = dto.getTempPath();
fianlDir = dto.getFinalPath(); fianlDir = dto.getFinalPath();
} }
@@ -93,8 +88,7 @@ public class UploadService {
if (!checkChunkFoler(upRes, tmpDataSetDir, fianlDir)) return upRes; if (!checkChunkFoler(upRes, tmpDataSetDir, fianlDir)) return upRes;
// chunk저장하기 // chunk저장하기
if( ! FIleChecker.multipartChunkSaveTo(file, tmpDataSetDir, chunkIndex ) ) if (!FIleChecker.multipartChunkSaveTo(file, tmpDataSetDir, chunkIndex)) {
{
upRes.setRes("fail"); upRes.setRes("fail");
upRes.setResMsg("chunkIndex:" + chunkIndex + " 업로드 애러"); upRes.setResMsg("chunkIndex:" + chunkIndex + " 업로드 애러");
} }
@@ -120,7 +114,6 @@ public class UploadService {
upAddReqDto.setUploadId(dto.getUploadId()); upAddReqDto.setUploadId(dto.getUploadId());
upAddReqDto.setStatus("COMPLETE"); upAddReqDto.setStatus("COMPLETE");
uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto); uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
} }
return upRes; return upRes;
@@ -132,26 +125,23 @@ public class UploadService {
UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(uuid); UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(uuid);
try { try {
this.mergeChunks(dto.getTempPath(), dto.getFinalPath(), dto.getFileName(), dto.getChunkTotalIndex()); this.mergeChunks(
dto.getTempPath(), dto.getFinalPath(), dto.getFileName(), dto.getChunkTotalIndex());
} catch (IOException e) { } catch (IOException e) {
return new DmlReturn("mergingfail", "chunk파일 merge 애러"); return new DmlReturn("mergingfail", "chunk파일 merge 애러");
} }
return new DmlReturn("success", "병합(merge) 정상처리되었습니다."); return new DmlReturn("success", "병합(merge) 정상처리되었습니다.");
} }
public boolean checkChunkFoler(UploadDto.UploadRes upRes, String tmpDataSetDir, String fianlDir) public boolean checkChunkFoler(UploadDto.UploadRes upRes, String tmpDataSetDir, String fianlDir) {
{ if (!FIleChecker.mkDir(tmpDataSetDir)) {
if( ! FIleChecker.mkDir(tmpDataSetDir) )
{
upRes.setRes("fail"); upRes.setRes("fail");
upRes.setRes("CHUNK 폴더 생성 ERROR"); upRes.setRes("CHUNK 폴더 생성 ERROR");
return false; return false;
} }
if( ! FIleChecker.mkDir(fianlDir) ) if (!FIleChecker.mkDir(fianlDir)) {
{
upRes.setRes("fail"); upRes.setRes("fail");
upRes.setRes("업로드 완료 폴더 생성 ERROR"); upRes.setRes("업로드 완료 폴더 생성 ERROR");
return false; return false;
@@ -160,17 +150,18 @@ public class UploadService {
return true; return true;
} }
public UploadDto.uploadDto checkUploadSession(
public UploadDto.uploadDto checkUploadSession(UploadDto.UploadAddReq upAddReqDto, UploadDto.UploadRes upRes) { UploadDto.UploadAddReq upAddReqDto, UploadDto.UploadRes upRes) {
upRes.setRes("success"); upRes.setRes("success");
upRes.setResMsg("정상처리되었습니다."); upRes.setResMsg("정상처리되었습니다.");
UploadDto.uploadDto dto = uploadSessionCoreService.findByDatasetUid(upAddReqDto.getDatasetId(), upAddReqDto.getUploadDivi()); UploadDto.uploadDto dto =
uploadSessionCoreService.findByDatasetUid(
upAddReqDto.getDatasetId(), upAddReqDto.getUploadDivi());
if (upAddReqDto.getChunkIndex() == 0) { if (upAddReqDto.getChunkIndex() == 0) {
if( dto != null ) if (dto != null) {
{
upRes.setRes("duplicate"); upRes.setRes("duplicate");
upRes.setResMsg("이미 진행중인 업로드세션입니다."); upRes.setResMsg("이미 진행중인 업로드세션입니다.");
return dto; return dto;
@@ -179,8 +170,7 @@ public class UploadService {
upAddReqDto.setStatus("UPLOADING"); upAddReqDto.setStatus("UPLOADING");
upRes.setUuid(upAddReqDto.getUuid().toString()); upRes.setUuid(upAddReqDto.getUuid().toString());
uploadSessionCoreService.createUploadSession(upAddReqDto); uploadSessionCoreService.createUploadSession(upAddReqDto);
} } else {
else {
if (dto == null) { if (dto == null) {
upRes.setRes("nosession"); upRes.setRes("nosession");
upRes.setResMsg("업로드 세션이 존재하지 않습니다."); upRes.setResMsg("업로드 세션이 존재하지 않습니다.");
@@ -197,15 +187,15 @@ public class UploadService {
upRes.setChunkIndex(upAddReqDto.getChunkIndex()); upRes.setChunkIndex(upAddReqDto.getChunkIndex());
upRes.setChunkTotalIndex(upAddReqDto.getChunkTotalIndex()); upRes.setChunkTotalIndex(upAddReqDto.getChunkTotalIndex());
return dto; return dto;
} }
public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex) throws IOException { public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex)
throws IOException {
Path outputPath = Paths.get(fianlDir, fileName); Path outputPath = Paths.get(fianlDir, fileName);
try (FileChannel outChannel = FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) { try (FileChannel outChannel =
FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) {
for (int i = 0; i <= chunkTotalIndex; i++) { for (int i = 0; i <= chunkTotalIndex; i++) {
Path chunkPath = Paths.get(tmpDir, i + ""); Path chunkPath = Paths.get(tmpDir, i + "");
@@ -223,8 +213,5 @@ public class UploadService {
// 병합후 임시 폴더 삭제 // 병합후 임시 폴더 삭제
FIleChecker.deleteFolder(tmpDir); FIleChecker.deleteFolder(tmpDir);
} }
} }