업로드 로직 재수정
This commit is contained in:
@@ -707,12 +707,17 @@ public class FIleChecker {
|
||||
}
|
||||
|
||||
public static void unzip(String fileName, String destDirectory) throws IOException {
|
||||
File destDir = new File(destDirectory);
|
||||
if (!destDir.exists()) {
|
||||
destDir.mkdirs(); // 대상 폴더가 없으면 생성
|
||||
}
|
||||
String zipFilePath = destDirectory + File.separator + fileName;
|
||||
|
||||
String zipFilePath = destDirectory + "/" + fileName;
|
||||
// zip 이름으로 폴더 생성 (확장자 제거)
|
||||
String folderName =
|
||||
fileName.endsWith(".zip") ? fileName.substring(0, fileName.length() - 4) : fileName;
|
||||
|
||||
File destDir = new File(destDirectory, folderName);
|
||||
|
||||
if (!destDir.exists()) {
|
||||
destDir.mkdirs();
|
||||
}
|
||||
|
||||
try (ZipInputStream zis = new ZipInputStream(new FileInputStream(zipFilePath))) {
|
||||
ZipEntry zipEntry = zis.getNextEntry();
|
||||
|
||||
@@ -221,8 +221,7 @@ public class DatasetApiController {
|
||||
public ApiResponseDto<ApiResponseDto.ResponseObj> insertDataset(
|
||||
@RequestBody @Valid DatasetDto.AddReq addReq) {
|
||||
|
||||
return ApiResponseDto.ok(
|
||||
datasetService.insertDatasetTo86(addReq)); // TODO 서버 옮긴 이후에 다시 insertDataset 로 원복하기
|
||||
return ApiResponseDto.ok(datasetService.insertDataset(addReq));
|
||||
}
|
||||
|
||||
@Operation(summary = "객체별 파일 Path 조회", description = "파일 Path 조회")
|
||||
|
||||
@@ -168,6 +168,7 @@ public class DatasetService {
|
||||
}
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@Transactional
|
||||
public ResponseObj insertDatasetTo86(@Valid AddReq addReq) {
|
||||
|
||||
@@ -226,6 +227,17 @@ public class DatasetService {
|
||||
idx++;
|
||||
}
|
||||
|
||||
List<Map<String, Object>> valList =
|
||||
getUnzipDatasetFiles(
|
||||
addReq.getFilePath() + addReq.getFileName().replace(".zip", ""), "val");
|
||||
|
||||
int valIdx = 0;
|
||||
for (Map<String, Object> valid : valList) {
|
||||
datasetUid =
|
||||
this.insertTrainTestData(valid, addReq, valIdx, datasetUid, "val"); // val 데이터 insert
|
||||
valIdx++;
|
||||
}
|
||||
|
||||
List<Map<String, Object>> testList =
|
||||
getUnzipDatasetFiles(
|
||||
addReq.getFilePath() + addReq.getFileName().replace(".zip", ""), "test");
|
||||
@@ -332,6 +344,8 @@ public class DatasetService {
|
||||
|
||||
if (subDir.equals("train")) {
|
||||
datasetCoreService.insertDatasetObj(objRegDto);
|
||||
} else if (subDir.equals("val")) {
|
||||
datasetCoreService.insertDatasetValObj(objRegDto);
|
||||
} else {
|
||||
datasetCoreService.insertDatasetTestObj(objRegDto);
|
||||
}
|
||||
@@ -404,6 +418,7 @@ public class DatasetService {
|
||||
return datasetCoreService.getFilePathByUUIDPathType(uuid, pathType);
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
private List<Map<String, Object>> getUnzipDatasetFilesTo86(String unzipRootPath, String subDir) {
|
||||
|
||||
// String root = Paths.get(unzipRootPath)
|
||||
|
||||
@@ -242,4 +242,8 @@ public class DatasetCoreService
|
||||
|
||||
entity.setStatus(LearnDataRegister.COMPLETED.getId());
|
||||
}
|
||||
|
||||
public void insertDatasetValObj(DatasetObjRegDto objRegDto) {
|
||||
datasetObjRepository.insertDatasetValObj(objRegDto);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,117 @@
|
||||
package com.kamco.cd.training.postgres.entity;
|
||||
|
||||
import com.kamco.cd.training.dataset.dto.DatasetObjDto.Basic;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Entity;
|
||||
import jakarta.persistence.GeneratedValue;
|
||||
import jakarta.persistence.GenerationType;
|
||||
import jakarta.persistence.Id;
|
||||
import jakarta.persistence.Table;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.UUID;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import org.hibernate.annotations.ColumnDefault;
|
||||
import org.hibernate.annotations.JdbcTypeCode;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@Entity
|
||||
@Table(name = "tb_dataset_val_obj")
|
||||
public class DatasetValObjEntity {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.IDENTITY)
|
||||
@Column(name = "obj_id", nullable = false)
|
||||
private Long objId;
|
||||
|
||||
@NotNull
|
||||
@Column(name = "dataset_uid", nullable = false)
|
||||
private Long datasetUid;
|
||||
|
||||
@Column(name = "target_yyyy")
|
||||
private Integer targetYyyy;
|
||||
|
||||
@Size(max = 255)
|
||||
@Column(name = "target_class_cd")
|
||||
private String targetClassCd;
|
||||
|
||||
@Column(name = "compare_yyyy")
|
||||
private Integer compareYyyy;
|
||||
|
||||
@Size(max = 255)
|
||||
@Column(name = "compare_class_cd")
|
||||
private String compareClassCd;
|
||||
|
||||
@Size(max = 255)
|
||||
@Column(name = "target_path")
|
||||
private String targetPath;
|
||||
|
||||
@Size(max = 255)
|
||||
@Column(name = "compare_path")
|
||||
private String comparePath;
|
||||
|
||||
@Size(max = 255)
|
||||
@Column(name = "label_path")
|
||||
private String labelPath;
|
||||
|
||||
@Size(max = 255)
|
||||
@Column(name = "geojson_path")
|
||||
private String geojsonPath;
|
||||
|
||||
@Size(max = 255)
|
||||
@Column(name = "map_sheet_num")
|
||||
private String mapSheetNum;
|
||||
|
||||
@ColumnDefault("now()")
|
||||
@Column(name = "created_dttm")
|
||||
private ZonedDateTime createdDttm;
|
||||
|
||||
@Column(name = "created_uid")
|
||||
private Long createdUid;
|
||||
|
||||
@ColumnDefault("false")
|
||||
@Column(name = "deleted")
|
||||
private Boolean deleted;
|
||||
|
||||
@Column(name = "uuid")
|
||||
private UUID uuid;
|
||||
|
||||
@Size(max = 32)
|
||||
@Column(name = "uid")
|
||||
private String uid;
|
||||
|
||||
@JdbcTypeCode(SqlTypes.JSON)
|
||||
@Column(name = "geo_jsonb", columnDefinition = "jsonb")
|
||||
private String geoJsonb;
|
||||
|
||||
@Column(name = "file_name")
|
||||
private String fileName;
|
||||
|
||||
@Column(name = "geom", columnDefinition = "geometry")
|
||||
private Geometry geom;
|
||||
|
||||
public Basic toDto() {
|
||||
return new Basic(
|
||||
this.objId,
|
||||
this.datasetUid,
|
||||
this.targetYyyy,
|
||||
this.targetClassCd,
|
||||
this.compareYyyy,
|
||||
this.compareClassCd,
|
||||
this.targetPath,
|
||||
this.comparePath,
|
||||
this.labelPath,
|
||||
this.geojsonPath,
|
||||
this.mapSheetNum,
|
||||
this.createdDttm,
|
||||
this.createdUid,
|
||||
this.deleted,
|
||||
this.uuid,
|
||||
this.geoJsonb);
|
||||
}
|
||||
}
|
||||
@@ -22,4 +22,6 @@ public interface DatasetObjRepositoryCustom {
|
||||
String getFilePathByUUIDPathType(UUID uuid, String pathType);
|
||||
|
||||
void insertDatasetTestObj(DatasetObjRegDto objRegDto);
|
||||
|
||||
void insertDatasetValObj(DatasetObjRegDto objRegDto);
|
||||
}
|
||||
|
||||
@@ -97,6 +97,49 @@ public class DatasetObjRepositoryImpl implements DatasetObjRepositoryCustom {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void insertDatasetValObj(DatasetObjRegDto objRegDto) {
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
String json;
|
||||
String geometryJson;
|
||||
try {
|
||||
json = objectMapper.writeValueAsString(objRegDto.getGeojson());
|
||||
geometryJson =
|
||||
objectMapper.writeValueAsString(
|
||||
objRegDto.getGeojson().path("features").get(0).path("geometry"));
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
try {
|
||||
em.createNativeQuery(
|
||||
"""
|
||||
insert into tb_dataset_val_obj
|
||||
(dataset_uid, target_yyyy, target_class_cd,
|
||||
compare_yyyy, compare_class_cd,
|
||||
target_path, compare_path, label_path, geo_jsonb, map_sheet_num, file_name, geom, geojson_path)
|
||||
values
|
||||
(?, ?, ?, ?, ?, ?, ?, ?, cast(? as jsonb), ?, ?, ST_SetSRID(ST_GeomFromGeoJSON(?), 5186), ?)
|
||||
""")
|
||||
.setParameter(1, objRegDto.getDatasetUid())
|
||||
.setParameter(2, objRegDto.getTargetYyyy())
|
||||
.setParameter(3, objRegDto.getTargetClassCd())
|
||||
.setParameter(4, objRegDto.getCompareYyyy())
|
||||
.setParameter(5, objRegDto.getCompareClassCd())
|
||||
.setParameter(6, objRegDto.getTargetPath())
|
||||
.setParameter(7, objRegDto.getComparePath())
|
||||
.setParameter(8, objRegDto.getLabelPath())
|
||||
.setParameter(9, json)
|
||||
.setParameter(10, objRegDto.getMapSheetNum())
|
||||
.setParameter(11, objRegDto.getFileName())
|
||||
.setParameter(12, geometryJson)
|
||||
.setParameter(13, objRegDto.getGeojsonPath())
|
||||
.executeUpdate();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Page<DatasetObjEntity> searchDatasetObjectList(SearchReq searchReq) {
|
||||
Pageable pageable = searchReq.toPageable();
|
||||
|
||||
@@ -234,8 +234,8 @@ public class UploadService {
|
||||
try {
|
||||
FIleChecker.deleteFolder(tmpDir);
|
||||
// 108 에서 86 서버로 이동
|
||||
log.info("################# server move 108 -> 86");
|
||||
FIleChecker.uploadTo86(outputPath);
|
||||
// log.info("################# server move 108 -> 86");
|
||||
// FIleChecker.uploadTo86(outputPath);
|
||||
} catch (Exception e) {
|
||||
log.warn("tmpDir delete failed (merge already succeeded): tmpDir={}", tmpDir, e);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user