Merge remote-tracking branch 'origin/feat/infer_dev_260107' into feat/infer_dev_260107

This commit is contained in:
DanielLee
2026-01-09 10:11:52 +09:00
26 changed files with 1443 additions and 52 deletions

View File

@@ -1,5 +1,6 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
@@ -16,6 +17,7 @@ import jakarta.persistence.EntityNotFoundException;
import jakarta.validation.constraints.NotNull;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.locationtech.jts.io.geojson.GeoJsonWriter;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -28,6 +30,9 @@ public class InferenceResultCoreService {
private final MapSheetLearnRepository mapSheetLearnRepository;
private final MapInkx5kRepository mapInkx5kRepository;
private final ObjectMapper objectMapper = new ObjectMapper();
private final GeoJsonWriter geoJsonWriter = new GeoJsonWriter();
/**
* 추론관리 목록
*
@@ -39,6 +44,20 @@ public class InferenceResultCoreService {
return list.map(MapSheetLearnEntity::toDto);
}
/**
* 변화탐지 실행 정보 생성
*
* @param req
*/
public void saveInferenceInfo(InferenceResultDto.RegReq req) {
MapSheetLearnEntity mapSheetLearnEntity = new MapSheetLearnEntity();
mapSheetLearnEntity.setTitle(req.getTitle());
mapSheetLearnEntity.setM1ModelUid(req.getModel1Uid());
mapSheetLearnEntity.setM2ModelUid(req.getModel2Uid());
mapSheetLearnEntity.setM3ModelUid(req.getModel3Uid());
// mapSheetLearnRepository.save()
}
/****/
/**

View File

@@ -1,11 +1,9 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import com.kamco.cd.kamcoback.postgres.repository.model.ModelMngRepository;
import jakarta.persistence.EntityNotFoundException;
import java.time.LocalDate;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
@@ -26,14 +24,24 @@ public class ModelMngCoreService {
searchReq, startDate, endDate, modelType, searchVal);
}
public ApiResponseDto.ResponseObj removeModel(String modelVer) {
public void removeModel(UUID uuid) {
/*
ModelMngEntity entity =
modelMngRepository
.findByModelUid(modelVer)
.orElseThrow(() -> new EntityNotFoundException("model을 찾을 수 없습니다. ver: " + modelVer));
.findByModelUuid(uuid)
.orElseThrow(() -> new EntityNotFoundException("model을 찾을 수 없습니다."));
*/
// id 코드 deleted = true 업데이트
entity.deleted();
return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "");
// entity.deleted();
modelMngRepository.deleteByModelUuid(uuid);
}
public void insertModel(ModelMngDto.AddReq addReq) {
// ModelMngEntity addEntity = new ModelMngEntity();
// addEntity.setModelType(addReq.getModelType());
modelMngRepository.insertModel(addReq);
}
}

View File

@@ -0,0 +1,54 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.postgres.repository.upload.UploadSessionRepository;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class UploadSessionCoreService {
private final UploadSessionRepository uploadSessionRepository;
public void createUploadSession(UploadDto.UploadAddReq addReq) {
/*
UUID newUuid = UUID.randomUUID();
UploadSessionEntity entity = new UploadSessionEntity();
entity.setUploadId(addReq.getUploadId());
entity.setDatasetId(addReq.getDatasetId());
entity.setFileName(addReq.getFileName());
entity.setFileSize(addReq.getFileSize());
entity.setFinalPath(addReq.getFinalPath());
entity.setStatus(addReq.getStatus());
entity.setTempPath(addReq.getTempPath());
entity.setChunkIndex(addReq.getChunkIndex());
entity.setChunkTotalIndex(addReq.getChunkTotalIndex());
entity.setUploadDivi(addReq.getUploadDivi());
entity.setFileHash(addReq.getFileHash());
entity.setUuid(newUuid);
//System.out.println("======================");
UploadSessionEntity saved = uploadSessionRepository.save(entity);
return String.valueOf(saved.getUuid());
*/
uploadSessionRepository.insertUploadSession(addReq);
}
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) {
return uploadSessionRepository.findByDatasetUid(datasetId, uploadDivi);
}
public UploadDto.uploadDto findByUuid(UUID uuid) {
return uploadSessionRepository.findByUuid(uuid);
}
public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq) {
uploadSessionRepository.updateUploadSessionStatus(addReq);
}
}

View File

@@ -8,6 +8,7 @@ import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
@@ -52,6 +53,9 @@ public class ModelMngEntity extends CommonDateEntity {
@Column(name = "memo")
private String memo;
@Column(name = "uuid")
private UUID uuid;
public void deleted() {
this.deleted = true;
}

View File

@@ -0,0 +1,89 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import java.time.OffsetDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@Getter
@Setter
@Entity
@Table(name = "tb_upload_session")
public class UploadSessionEntity {
@Id
@Size(max = 100)
@Column(name = "upload_id", nullable = false, length = 100)
private String uploadId;
@Column(name = "completed_dttm")
private OffsetDateTime completedDttm;
@NotNull
@ColumnDefault("now()")
@Column(name = "created_dttm", nullable = false)
private OffsetDateTime createdDttm;
@Column(name = "dataset_id")
private Long datasetId;
@Column(name = "error_message", length = Integer.MAX_VALUE)
private String errorMessage;
@Size(max = 255)
@Column(name = "file_name")
private String fileName;
@ColumnDefault("0")
@Column(name = "file_size")
private Long fileSize;
@Size(max = 500)
@Column(name = "final_path", length = 500)
private String finalPath;
@Size(max = 20)
@Column(name = "status", length = 20)
private String status;
@Size(max = 500)
@Column(name = "temp_path", length = 500)
private String tempPath;
@Column(name = "chunk_total_index")
private Integer chunkTotalIndex;
@NotNull
@ColumnDefault("now()")
@Column(name = "updated_dttm", nullable = false)
private OffsetDateTime updatedDttm;
@Column(name = "chunk_index")
private Integer chunkIndex;
@Size(max = 50)
@Column(name = "upload_divi", length = 50)
private String uploadDivi;
@Size(max = 300)
@Column(name = "file_hash", length = 300)
private String fileHash;
@Column(name = "total_chunks")
private Integer totalChunks;
@Column(name = "uploaded_chunks")
private Integer uploadedChunks;
@NotNull
@ColumnDefault("uuid_generate_v4()")
@Column(name = "uuid", nullable = false)
private UUID uuid;
}

View File

@@ -57,7 +57,6 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom {
QLabelingAssignmentEntity.labelingAssignmentEntity;
private final QMemberEntity memberEntity = QMemberEntity.memberEntity;
/**
* 변화탐지 년도 셀렉트박스 조회
*
@@ -81,8 +80,7 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom {
}
/**
* 라벨링 작업관리 목록 조회
* (복잡한 집계 쿼리로 인해 DTO 직접 반환)
* 라벨링 작업관리 목록 조회 (복잡한 집계 쿼리로 인해 DTO 직접 반환)
*
* @param searchReq 검색 조건
* @return 라벨링 작업관리 목록 페이지
@@ -342,7 +340,9 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom {
mapSheetAnalInferenceEntity
.uuid
.eq(uuid)
.and(labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id)))
.and(
labelingAssignmentEntity.analUid.eq(
mapSheetAnalInferenceEntity.id)))
.innerJoin(memberEntity)
.on(whereSubBuilder)
.where(whereBuilder)
@@ -387,8 +387,7 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom {
}
/**
* 작업배정 상세조회
* (복잡한 집계 쿼리로 인해 DTO 직접 반환)
* 작업배정 상세조회 (복잡한 집계 쿼리로 인해 DTO 직접 반환)
*
* @param uuid 작업배정 UUID
* @return 작업배정 상세 정보

View File

@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import java.time.LocalDate;
import java.util.Optional;
import java.util.UUID;
import org.springframework.data.domain.Page;
public interface ModelMngRepositoryCustom {
@@ -16,4 +17,10 @@ public interface ModelMngRepositoryCustom {
String searchVal);
Optional<ModelMngEntity> findByModelUid(String modelVer);
Optional<ModelMngEntity> findByModelUuid(UUID uuid);
void insertModel(ModelMngDto.AddReq addReq);
void deleteByModelUuid(UUID uuid);
}

View File

@@ -14,6 +14,7 @@ import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberPath;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.validation.Valid;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.LocalDateTime;
@@ -22,6 +23,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
@@ -67,8 +69,6 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
.select(
Projections.constructor(
ModelMngDto.ModelList.class,
Expressions.numberTemplate(
Integer.class, "row_number() over(order by {0} desc)", sortColumn),
modelMngEntity.modelVer,
modelMngEntity.fileName,
modelMngEntity.modelType,
@@ -80,7 +80,8 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
roundNumericToPercent(modelResultMetricEntity.loss),
roundNumericToPercent(modelResultMetricEntity.iou),
modelMngEntity.memo,
modelMngEntity.deleted))
modelMngEntity.deleted,
modelMngEntity.uuid))
.from(modelMngEntity)
.innerJoin(modelResultMetricEntity)
.on(modelMngEntity.modelUid.eq(modelResultMetricEntity.modelUid))
@@ -116,6 +117,15 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
.fetchOne());
}
@Override
public Optional<ModelMngEntity> findByModelUuid(UUID uuid) {
System.out.println("uuid == " + uuid);
return Optional.ofNullable(
queryFactory.selectFrom(modelMngEntity).where(modelMngEntity.uuid.eq(uuid)).fetchOne());
}
private BooleanExpression eventEndedAtBetween(
LocalDate startDate, LocalDate endDate, String sortColumn) {
if (Objects.isNull(startDate) || Objects.isNull(endDate)) {
@@ -155,4 +165,36 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
private Expression<BigDecimal> roundNumericToPercent(NumberPath<Double> ratio) {
return Expressions.numberTemplate(BigDecimal.class, "function('round', {0} * 100, 2)", ratio);
}
@Override
public void insertModel(@Valid ModelMngDto.AddReq addReq) {
long execCount =
queryFactory
.insert(modelMngEntity)
.columns(
modelMngEntity.modelVer,
modelMngEntity.modelType,
modelMngEntity.filePath,
modelMngEntity.fileName,
modelMngEntity.memo,
modelMngEntity.uuid)
.values(
addReq.getModelVer(),
addReq.getModelType(),
addReq.getFilePath(),
addReq.getFileName(),
addReq.getMemo(),
addReq.getUuid())
.execute();
}
@Override
public void deleteByModelUuid(UUID uuid) {
long execCount =
queryFactory
.update(modelMngEntity)
.set(modelMngEntity.deleted, true)
.where(modelMngEntity.uuid.eq(uuid))
.execute();
}
}

View File

@@ -0,0 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.upload;
import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface UploadSessionRepository
extends JpaRepository<UploadSessionEntity, String>, UploadSessionRepositoryCustom {}

View File

@@ -0,0 +1,15 @@
package com.kamco.cd.kamcoback.postgres.repository.upload;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import java.util.UUID;
public interface UploadSessionRepositoryCustom {
void insertUploadSession(UploadDto.UploadAddReq addReq);
UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi);
UploadDto.uploadDto findByUuid(UUID uuid);
void updateUploadSessionStatus(UploadDto.UploadAddReq addReq);
}

View File

@@ -0,0 +1,132 @@
package com.kamco.cd.kamcoback.postgres.repository.upload;
import static com.kamco.cd.kamcoback.postgres.entity.QUploadSessionEntity.uploadSessionEntity;
import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import java.util.UUID;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
implements UploadSessionRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)");
@PersistenceContext private EntityManager em;
public UploadSessionRepositoryImpl(JPAQueryFactory queryFactory) {
super(UploadSessionEntity.class);
this.queryFactory = queryFactory;
}
@Override
public void insertUploadSession(UploadDto.UploadAddReq addReq) {
long execCnt =
queryFactory
.insert(uploadSessionEntity)
.columns(
uploadSessionEntity.uploadId,
uploadSessionEntity.datasetId,
uploadSessionEntity.fileName,
uploadSessionEntity.fileSize,
uploadSessionEntity.finalPath,
uploadSessionEntity.status,
uploadSessionEntity.tempPath,
uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.uploadDivi,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid)
.values(
addReq.getUploadId(),
addReq.getDatasetId(),
addReq.getFileName(),
addReq.getFileSize(),
addReq.getFinalPath(),
addReq.getStatus(),
addReq.getTempPath(),
addReq.getChunkIndex(),
addReq.getChunkTotalIndex(),
addReq.getUploadDivi(),
addReq.getFileHash(),
addReq.getUuid())
.execute();
}
@Override
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) {
UploadDto.uploadDto foundContent =
queryFactory
.select(
Projections.constructor(
UploadDto.uploadDto.class,
uploadSessionEntity.uploadId,
uploadSessionEntity.datasetId,
uploadSessionEntity.fileName,
uploadSessionEntity.fileSize,
uploadSessionEntity.finalPath,
uploadSessionEntity.uploadDivi,
uploadSessionEntity.status,
uploadSessionEntity.tempPath,
uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid))
.from(uploadSessionEntity)
.where(
uploadSessionEntity
.datasetId
.eq(datasetId)
.and(uploadSessionEntity.uploadDivi.eq(uploadDivi)))
.limit(1)
.fetchOne();
return foundContent;
}
@Override
public UploadDto.uploadDto findByUuid(UUID uuid) {
UploadDto.uploadDto foundContent =
queryFactory
.select(
Projections.constructor(
UploadDto.uploadDto.class,
uploadSessionEntity.uploadId,
uploadSessionEntity.datasetId,
uploadSessionEntity.fileName,
uploadSessionEntity.fileSize,
uploadSessionEntity.finalPath,
uploadSessionEntity.uploadDivi,
uploadSessionEntity.status,
uploadSessionEntity.tempPath,
uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid))
.from(uploadSessionEntity)
.where(uploadSessionEntity.uuid.eq(uuid))
.limit(1)
.fetchOne();
return foundContent;
}
public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq) {
long fileCount =
queryFactory
.update(uploadSessionEntity)
.set(uploadSessionEntity.chunkIndex, addReq.getChunkIndex())
.set(uploadSessionEntity.status, addReq.getStatus())
.where(uploadSessionEntity.uploadId.eq(addReq.getUploadId()))
.execute();
}
}