diff --git a/src/main/java/com/kamco/cd/training/dataset/dto/DatasetDto.java b/src/main/java/com/kamco/cd/training/dataset/dto/DatasetDto.java index 379ed8f..03cc556 100644 --- a/src/main/java/com/kamco/cd/training/dataset/dto/DatasetDto.java +++ b/src/main/java/com/kamco/cd/training/dataset/dto/DatasetDto.java @@ -313,24 +313,12 @@ public class DatasetDto { @NoArgsConstructor @AllArgsConstructor public static class AddReq { - private Long stage; - private String title; private String fileName; private String filePath; private Long fileSize; private String memo; } - @Getter - @Setter - @NoArgsConstructor - @AllArgsConstructor - @Builder - public static class DatasetFileDto { - private String fileName; - private String filePath; - } - @Getter @Setter @NoArgsConstructor diff --git a/src/main/java/com/kamco/cd/training/dataset/dto/DatasetObjDto.java b/src/main/java/com/kamco/cd/training/dataset/dto/DatasetObjDto.java index f55cc25..d7c33bb 100644 --- a/src/main/java/com/kamco/cd/training/dataset/dto/DatasetObjDto.java +++ b/src/main/java/com/kamco/cd/training/dataset/dto/DatasetObjDto.java @@ -145,6 +145,7 @@ public class DatasetObjDto { private String comparePath; private String targetPath; private String labelPath; + private String geojsonPath; private String mapSheetNum; private JsonNode geojson; private String fileName; diff --git a/src/main/java/com/kamco/cd/training/dataset/service/DatasetService.java b/src/main/java/com/kamco/cd/training/dataset/service/DatasetService.java index 6cc57e4..588982d 100644 --- a/src/main/java/com/kamco/cd/training/dataset/service/DatasetService.java +++ b/src/main/java/com/kamco/cd/training/dataset/service/DatasetService.java @@ -2,6 +2,8 @@ package com.kamco.cd.training.dataset.service; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.kamco.cd.training.common.enums.LearnDataType; import com.kamco.cd.training.common.exception.CustomApiException; import com.kamco.cd.training.common.service.FormatStorage; @@ -156,9 +158,10 @@ public class DatasetService { } } + @Transactional public ResponseObj insertDataset(@Valid AddReq addReq) { - Long datasetUid = 0L; // master id 값, 등록하면서 가져올 예정 + Long datasetUid = null; // master id 값, 등록하면서 가져올 예정 try { // 압축 해제 @@ -166,74 +169,27 @@ public class DatasetService { // 해제한 폴더 읽어서 데이터 저장 List> list = - getUnzipDatasetFiles(addReq.getFilePath() + addReq.getFileName().replace(".zip", "")); + getUnzipDatasetFiles( + addReq.getFilePath() + addReq.getFileName().replace(".zip", ""), "train"); int idx = 0; for (Map map : list) { - String comparePath = (String) map.get("input1"); - String targetPath = (String) map.get("input2"); - String labelPath = (String) map.get("label"); - JsonNode json = (JsonNode) map.get("label-json"); - - String fileName = Paths.get(comparePath).getFileName().toString(); - String[] fileNameStr = fileName.split("_"); - String compareYyyy = fileNameStr[1]; - String targetYyyy = fileNameStr[2]; - String mapSheetNum = fileNameStr[3]; - - if (idx == 0) { - String title = compareYyyy + "-" + targetYyyy; - String dataType = LearnDataType.PRODUCTION.getId(); // 만들어 넣는 건 다 제작 - Long stage = - datasetCoreService.getDatasetMaxStage( - Integer.parseInt(compareYyyy), Integer.parseInt(targetYyyy)) - + 1; - String uid = Paths.get(addReq.getFilePath()).getParent().getFileName().toString(); - - DatasetMngRegDto mngRegDto = - DatasetMngRegDto.builder() - .uid(uid) - .dataType(dataType) - .compareYyyy(Integer.parseInt(compareYyyy)) - .targetYyyy(Integer.parseInt(targetYyyy)) - .title(title) - .memo(addReq.getMemo()) - .roundNo(stage) - .totalSize(addReq.getFileSize()) - .datasetPath(addReq.getFilePath()) - .build(); - - datasetUid = datasetCoreService.insertDatasetMngData(mngRegDto); // tb_dataset 에 insert - } - - // datasetUid 로 obj 도 등록하기 - // Json 갯수만큼 for문 돌려서 insert 해야 함 - for (JsonNode feature : json.path("features")) { - JsonNode prop = feature.path("properties"); - String compareClassCd = prop.path("before").asText(null); - String targetClassCd = prop.path("after").asText(null); - - DatasetObjRegDto objRegDto = - DatasetObjRegDto.builder() - .datasetUid(datasetUid) - .compareYyyy(Integer.parseInt(compareYyyy)) - .compareClassCd(compareClassCd) - .targetYyyy(Integer.parseInt(targetYyyy)) - .targetClassCd(targetClassCd) - .comparePath(comparePath) - .targetPath(targetPath) - .labelPath(labelPath) - .mapSheetNum(mapSheetNum) - .geojson(feature) - .fileName(fileName) - .build(); - - datasetCoreService.insertDatasetObj(objRegDto); - } - + datasetUid = + this.insertTrainTestData(map, addReq, idx, datasetUid, "train"); // train 데이터 insert idx++; } + List> testList = + getUnzipDatasetFiles( + addReq.getFilePath() + addReq.getFileName().replace(".zip", ""), "test"); + + int testIdx = 0; + for (Map test : testList) { + datasetUid = + this.insertTrainTestData(test, addReq, testIdx, datasetUid, "test"); // test 데이터 insert + testIdx++; + } + } catch (IOException e) { log.error(e.getMessage()); return new ResponseObj(ApiResponseCode.INTERNAL_SERVER_ERROR, e.getMessage()); @@ -241,11 +197,105 @@ public class DatasetService { return new ResponseObj(ApiResponseCode.OK, "업로드 성공하였습니다."); } - private List> getUnzipDatasetFiles(String unzipRootPath) { + @Transactional + public Long insertTrainTestData( + Map map, AddReq addReq, int idx, Long datasetUid, String subDir) { + ObjectMapper mapper = new ObjectMapper(); + String comparePath = (String) map.get("input1"); + String targetPath = (String) map.get("input2"); + String labelPath = (String) map.get("label"); + String geojsonPath = (String) map.get("geojson_path"); + Object labelJson = map.get("label-json"); + JsonNode json; - Path root = Paths.get(unzipRootPath).resolve("train"); + if (labelJson instanceof JsonNode jn) { + json = jn; + } else { + try { + json = mapper.readTree(labelJson.toString()); + } catch (IOException e) { + throw new RuntimeException("label_json parse error", e); + } + } + + String fileName = Paths.get(comparePath).getFileName().toString(); + String[] fileNameStr = fileName.split("_"); + String compareYyyy = fileNameStr[1]; + String targetYyyy = fileNameStr[2]; + String mapSheetNum = fileNameStr[3]; + + if (idx == 0 && subDir.equals("train")) { + String title = compareYyyy + "-" + targetYyyy; // 제목 : 비교년도-기준년도 + String dataType = LearnDataType.PRODUCTION.getId(); // 만들어 넣는 건 다 제작 + Long stage = + datasetCoreService.getDatasetMaxStage( + Integer.parseInt(compareYyyy), Integer.parseInt(targetYyyy)) + + 1; + String uid = addReq.getFileName().replace(".zip", ""); + + DatasetMngRegDto mngRegDto = + DatasetMngRegDto.builder() + .uid(uid) + .dataType(dataType) + .compareYyyy(Integer.parseInt(compareYyyy)) + .targetYyyy(Integer.parseInt(targetYyyy)) + .title(title) + .memo(addReq.getMemo()) + .roundNo(stage) + .totalSize(addReq.getFileSize()) + .datasetPath(addReq.getFilePath()) + .build(); + + datasetUid = datasetCoreService.insertDatasetMngData(mngRegDto); // tb_dataset 에 insert + } + + // datasetUid 로 obj 도 등록하기 + // Json 갯수만큼 for문 돌려서 insert 해야 함, features에 빈값 + if (json != null && json.path("features") != null && !json.path("features").isEmpty()) { + + for (JsonNode feature : json.path("features")) { + JsonNode prop = feature.path("properties"); + String compareClassCd = prop.path("before").asText(null); + String targetClassCd = prop.path("after").asText(null); + + // 한 개씩 자른 geojson을 FeatureCollection 으로 만들어서 넣기 + ObjectNode root = mapper.createObjectNode(); + root.put("type", "FeatureCollection"); + ArrayNode features = mapper.createArrayNode(); + features.add(feature); + root.set("features", features); + + DatasetObjRegDto objRegDto = + DatasetObjRegDto.builder() + .datasetUid(datasetUid) + .compareYyyy(Integer.parseInt(compareYyyy)) + .compareClassCd(compareClassCd) + .targetYyyy(Integer.parseInt(targetYyyy)) + .targetClassCd(targetClassCd) + .comparePath(comparePath) + .targetPath(targetPath) + .labelPath(labelPath) + .mapSheetNum(mapSheetNum) + .geojson(root) + .geojsonPath(geojsonPath) + .fileName(fileName) + .build(); + + if (subDir.equals("train")) { + datasetCoreService.insertDatasetObj(objRegDto); + } else { + datasetCoreService.insertDatasetTestObj(objRegDto); + } + } + } + + return datasetUid; + } + + private List> getUnzipDatasetFiles(String unzipRootPath, String subDir) { + + Path root = Paths.get(unzipRootPath).resolve(subDir); Map> grouped = new HashMap<>(); - long total_file_cnt = 0; for (String dirName : LABEL_DIRS) { Path dir = root.resolve(dirName); @@ -254,15 +304,6 @@ public class DatasetService { throw new IllegalStateException("폴더가 존재하지 않습니다 : " + dir); } - // 파일 갯수는 같아서 비교년도일 때만 file_cnt 를 가지고 오기 - // if(dirName.equals("input1")){ - // try (Stream stream = Files.list(dir)) { - // total_file_cnt = stream.filter(Files::isRegularFile).count(); - // } catch (IOException e) { - // throw new RuntimeException(e); - // } - // } - try (Stream stream = Files.list(dir)) { stream .filter(Files::isRegularFile) @@ -282,8 +323,8 @@ public class DatasetService { if ("label-json".equals(dirName)) { // json 파일이면 파싱 data.put("label-json", readJson(path)); + data.put("geojson_path", path.toAbsolutePath().toString()); } else { - // 나머지는 경로 or 파일명 data.put(dirName, path.toAbsolutePath().toString()); } }); diff --git a/src/main/java/com/kamco/cd/training/postgres/core/DatasetCoreService.java b/src/main/java/com/kamco/cd/training/postgres/core/DatasetCoreService.java index 8e2e25a..964b1c3 100644 --- a/src/main/java/com/kamco/cd/training/postgres/core/DatasetCoreService.java +++ b/src/main/java/com/kamco/cd/training/postgres/core/DatasetCoreService.java @@ -229,4 +229,8 @@ public class DatasetCoreService public String getFilePathByUUIDPathType(UUID uuid, String pathType) { return datasetObjRepository.getFilePathByUUIDPathType(uuid, pathType); } + + public void insertDatasetTestObj(DatasetObjRegDto objRegDto) { + datasetObjRepository.insertDatasetTestObj(objRegDto); + } } diff --git a/src/main/java/com/kamco/cd/training/postgres/entity/DatasetObjEntity.java b/src/main/java/com/kamco/cd/training/postgres/entity/DatasetObjEntity.java index a2c584f..476dc6f 100644 --- a/src/main/java/com/kamco/cd/training/postgres/entity/DatasetObjEntity.java +++ b/src/main/java/com/kamco/cd/training/postgres/entity/DatasetObjEntity.java @@ -10,7 +10,6 @@ import jakarta.persistence.Id; import jakarta.persistence.Table; import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.Size; -import java.math.BigDecimal; import java.time.ZonedDateTime; import java.util.UUID; import lombok.Getter; @@ -18,6 +17,7 @@ import lombok.Setter; import org.hibernate.annotations.ColumnDefault; import org.hibernate.annotations.JdbcTypeCode; import org.hibernate.type.SqlTypes; +import org.locationtech.jts.geom.Geometry; @Getter @Setter @@ -86,19 +86,16 @@ public class DatasetObjEntity { @Column(name = "uid") private String uid; - @Column(precision = 5, scale = 2) - private BigDecimal chnDtctP; - - @Column(precision = 5, scale = 2) - private BigDecimal bfClsPro; - - @Column(precision = 5, scale = 2) - private BigDecimal afClsPro; - @JdbcTypeCode(SqlTypes.JSON) @Column(name = "geo_jsonb", columnDefinition = "jsonb") private String geoJsonb; + @Column(name = "file_name") + private String fileName; + + @Column(name = "geom", columnDefinition = "geometry") + private Geometry geom; + public Basic toDto() { return new DatasetObjDto.Basic( this.objId, diff --git a/src/main/java/com/kamco/cd/training/postgres/entity/DatasetTestObjEntity.java b/src/main/java/com/kamco/cd/training/postgres/entity/DatasetTestObjEntity.java new file mode 100644 index 0000000..90b564b --- /dev/null +++ b/src/main/java/com/kamco/cd/training/postgres/entity/DatasetTestObjEntity.java @@ -0,0 +1,117 @@ +package com.kamco.cd.training.postgres.entity; + +import com.kamco.cd.training.dataset.dto.DatasetObjDto.Basic; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Table; +import jakarta.validation.constraints.NotNull; +import jakarta.validation.constraints.Size; +import java.time.ZonedDateTime; +import java.util.UUID; +import lombok.Getter; +import lombok.Setter; +import org.hibernate.annotations.ColumnDefault; +import org.hibernate.annotations.JdbcTypeCode; +import org.hibernate.type.SqlTypes; +import org.locationtech.jts.geom.Geometry; + +@Getter +@Setter +@Entity +@Table(name = "tb_dataset_test_obj") +public class DatasetTestObjEntity { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "obj_id", nullable = false) + private Long objId; + + @NotNull + @Column(name = "dataset_uid", nullable = false) + private Long datasetUid; + + @Column(name = "target_yyyy") + private Integer targetYyyy; + + @Size(max = 255) + @Column(name = "target_class_cd") + private String targetClassCd; + + @Column(name = "compare_yyyy") + private Integer compareYyyy; + + @Size(max = 255) + @Column(name = "compare_class_cd") + private String compareClassCd; + + @Size(max = 255) + @Column(name = "target_path") + private String targetPath; + + @Size(max = 255) + @Column(name = "compare_path") + private String comparePath; + + @Size(max = 255) + @Column(name = "label_path") + private String labelPath; + + @Size(max = 255) + @Column(name = "geojson_path") + private String geojsonPath; + + @Size(max = 255) + @Column(name = "map_sheet_num") + private String mapSheetNum; + + @ColumnDefault("now()") + @Column(name = "created_dttm") + private ZonedDateTime createdDttm; + + @Column(name = "created_uid") + private Long createdUid; + + @ColumnDefault("false") + @Column(name = "deleted") + private Boolean deleted; + + @Column(name = "uuid") + private UUID uuid; + + @Size(max = 32) + @Column(name = "uid") + private String uid; + + @JdbcTypeCode(SqlTypes.JSON) + @Column(name = "geo_jsonb", columnDefinition = "jsonb") + private String geoJsonb; + + @Column(name = "file_name") + private String fileName; + + @Column(name = "geom", columnDefinition = "geometry") + private Geometry geom; + + public Basic toDto() { + return new Basic( + this.objId, + this.datasetUid, + this.targetYyyy, + this.targetClassCd, + this.compareYyyy, + this.compareClassCd, + this.targetPath, + this.comparePath, + this.labelPath, + this.geojsonPath, + this.mapSheetNum, + this.createdDttm, + this.createdUid, + this.deleted, + this.uuid, + this.geoJsonb); + } +} diff --git a/src/main/java/com/kamco/cd/training/postgres/repository/dataset/DatasetObjRepositoryCustom.java b/src/main/java/com/kamco/cd/training/postgres/repository/dataset/DatasetObjRepositoryCustom.java index ea0e80a..726648e 100644 --- a/src/main/java/com/kamco/cd/training/postgres/repository/dataset/DatasetObjRepositoryCustom.java +++ b/src/main/java/com/kamco/cd/training/postgres/repository/dataset/DatasetObjRepositoryCustom.java @@ -20,4 +20,6 @@ public interface DatasetObjRepositoryCustom { void insertDatasetObj(DatasetObjRegDto objRegDto); String getFilePathByUUIDPathType(UUID uuid, String pathType); + + void insertDatasetTestObj(DatasetObjRegDto objRegDto); } diff --git a/src/main/java/com/kamco/cd/training/postgres/repository/dataset/DatasetObjRepositoryImpl.java b/src/main/java/com/kamco/cd/training/postgres/repository/dataset/DatasetObjRepositoryImpl.java index 28cd525..394a990 100644 --- a/src/main/java/com/kamco/cd/training/postgres/repository/dataset/DatasetObjRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/training/postgres/repository/dataset/DatasetObjRepositoryImpl.java @@ -25,12 +25,15 @@ import java.util.Objects; import java.util.Optional; import java.util.UUID; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; +import org.locationtech.jts.geom.Geometry; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Repository; +@Slf4j @Repository @RequiredArgsConstructor public class DatasetObjRepositoryImpl implements DatasetObjRepositoryCustom { @@ -50,6 +53,50 @@ public class DatasetObjRepositoryImpl implements DatasetObjRepositoryCustom { .fetchOne()); } + @Override + public void insertDatasetTestObj(DatasetObjRegDto objRegDto) { + ObjectMapper objectMapper = new ObjectMapper(); + String json; + Geometry geometry; + String geometryJson; + try { + json = objectMapper.writeValueAsString(objRegDto.getGeojson()); + geometryJson = + objectMapper.writeValueAsString( + objRegDto.getGeojson().path("features").get(0).path("geometry")); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + + try { + em.createNativeQuery( + """ + insert into tb_dataset_test_obj + (dataset_uid, target_yyyy, target_class_cd, + compare_yyyy, compare_class_cd, + target_path, compare_path, label_path, geo_jsonb, map_sheet_num, file_name, geom, geojson_path) + values + (?, ?, ?, ?, ?, ?, ?, ?, cast(? as jsonb), ?, ?, ST_SetSRID(ST_GeomFromGeoJSON(?), 5186), ?) + """) + .setParameter(1, objRegDto.getDatasetUid()) + .setParameter(2, objRegDto.getTargetYyyy()) + .setParameter(3, objRegDto.getTargetClassCd()) + .setParameter(4, objRegDto.getCompareYyyy()) + .setParameter(5, objRegDto.getCompareClassCd()) + .setParameter(6, objRegDto.getTargetPath()) + .setParameter(7, objRegDto.getComparePath()) + .setParameter(8, objRegDto.getLabelPath()) + .setParameter(9, json) + .setParameter(10, objRegDto.getMapSheetNum()) + .setParameter(11, objRegDto.getFileName()) + .setParameter(12, geometryJson) + .setParameter(13, objRegDto.getGeojsonPath()) + .executeUpdate(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + @Override public Page searchDatasetObjectList(SearchReq searchReq) { Pageable pageable = searchReq.toPageable(); @@ -137,55 +184,43 @@ public class DatasetObjRepositoryImpl implements DatasetObjRepositoryCustom { public void insertDatasetObj(DatasetObjRegDto objRegDto) { ObjectMapper objectMapper = new ObjectMapper(); String json; + String geometryJson; try { json = objectMapper.writeValueAsString(objRegDto.getGeojson()); + geometryJson = + objectMapper.writeValueAsString( + objRegDto.getGeojson().path("features").get(0).path("geometry")); } catch (JsonProcessingException e) { throw new RuntimeException(e); } - // queryFactory - // .insert(datasetObjEntity) - // .columns( - // datasetObjEntity.datasetUid, - // datasetObjEntity.targetYyyy, - // datasetObjEntity.targetClassCd, - // datasetObjEntity.compareYyyy, - // datasetObjEntity.compareClassCd, - // datasetObjEntity.targetPath, - // datasetObjEntity.comparePath, - // datasetObjEntity.labelPath, - // datasetObjEntity.geoJsonb) - // .values( - // objRegDto.getDatasetUid(), - // objRegDto.getTargetYyyy(), - // objRegDto.getTargetClassCd(), - // objRegDto.getCompareYyyy(), - // objRegDto.getCompareClassCd(), - // objRegDto.getTargetPath(), - // objRegDto.getComparePath(), - // objRegDto.getLabelPath(), - // Expressions.stringTemplate("cast({0} as jsonb)", json)) - // .execute(); - em.createNativeQuery( - """ - insert into tb_dataset_obj - (dataset_uid, target_yyyy, target_class_cd, - compare_yyyy, compare_class_cd, - target_path, compare_path, label_path, geo_jsonb, map_sheet_num) - values - (?, ?, ?, ?, ?, ?, ?, ?, cast(? as jsonb), ?) - """) - .setParameter(1, objRegDto.getDatasetUid()) - .setParameter(2, objRegDto.getTargetYyyy()) - .setParameter(3, objRegDto.getTargetClassCd()) - .setParameter(4, objRegDto.getCompareYyyy()) - .setParameter(5, objRegDto.getCompareClassCd()) - .setParameter(6, objRegDto.getTargetPath()) - .setParameter(7, objRegDto.getComparePath()) - .setParameter(8, objRegDto.getLabelPath()) - .setParameter(9, json) - .setParameter(10, objRegDto.getMapSheetNum()) - .executeUpdate(); + try { + em.createNativeQuery( + """ + insert into tb_dataset_obj + (dataset_uid, target_yyyy, target_class_cd, + compare_yyyy, compare_class_cd, + target_path, compare_path, label_path, geo_jsonb, map_sheet_num, file_name, geom, geojson_path) + values + (?, ?, ?, ?, ?, ?, ?, ?, cast(? as jsonb), ?, ?, ST_SetSRID(ST_GeomFromGeoJSON(?), 5186), ?) + """) + .setParameter(1, objRegDto.getDatasetUid()) + .setParameter(2, objRegDto.getTargetYyyy()) + .setParameter(3, objRegDto.getTargetClassCd()) + .setParameter(4, objRegDto.getCompareYyyy()) + .setParameter(5, objRegDto.getCompareClassCd()) + .setParameter(6, objRegDto.getTargetPath()) + .setParameter(7, objRegDto.getComparePath()) + .setParameter(8, objRegDto.getLabelPath()) + .setParameter(9, json) + .setParameter(10, objRegDto.getMapSheetNum()) + .setParameter(11, objRegDto.getFileName()) + .setParameter(12, geometryJson) + .setParameter(13, objRegDto.getGeojsonPath()) + .executeUpdate(); + } catch (Exception e) { + throw new RuntimeException(e); + } } @Override diff --git a/src/main/java/com/kamco/cd/training/postgres/repository/dataset/DatasetRepositoryImpl.java b/src/main/java/com/kamco/cd/training/postgres/repository/dataset/DatasetRepositoryImpl.java index 56347f5..9efc078 100644 --- a/src/main/java/com/kamco/cd/training/postgres/repository/dataset/DatasetRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/training/postgres/repository/dataset/DatasetRepositoryImpl.java @@ -233,12 +233,10 @@ public class DatasetRepositoryImpl implements DatasetRepositoryCustom { mngRegDto.getDatasetPath()) .execute(); - Long datasetUid = - queryFactory - .select(dataset.id) - .from(dataset) - .where(dataset.uid.eq(mngRegDto.getUid())) - .fetchOne(); - return datasetUid; + return queryFactory + .select(dataset.id) + .from(dataset) + .where(dataset.uid.eq(mngRegDto.getUid())) + .fetchOne(); } }