Merge branch 'feat/dean/sample' into develop

This commit is contained in:
2025-12-02 14:32:36 +09:00
4 changed files with 28 additions and 6 deletions

View File

@@ -1,5 +1,8 @@
package com.kamco.cd.kamcoback.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import java.time.Duration;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.CacheManager;
@@ -38,6 +41,14 @@ public class RedisConfig {
return new LettuceConnectionFactory(redisConfig);
}
@Bean
public ObjectMapper redisObjectMapper() {
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.registerModule(new JavaTimeModule());
objectMapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
return objectMapper;
}
@Bean
public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory connectionFactory) {
RedisTemplate<String, Object> template = new RedisTemplate<>();
@@ -47,9 +58,11 @@ public class RedisConfig {
template.setKeySerializer(new StringRedisSerializer());
template.setHashKeySerializer(new StringRedisSerializer());
// Value는 JSON으로 직렬화
template.setValueSerializer(new GenericJackson2JsonRedisSerializer());
template.setHashValueSerializer(new GenericJackson2JsonRedisSerializer());
// Value는 JSON으로 직렬화 (JavaTimeModule 포함)
GenericJackson2JsonRedisSerializer serializer =
new GenericJackson2JsonRedisSerializer(redisObjectMapper());
template.setValueSerializer(serializer);
template.setHashValueSerializer(serializer);
template.afterPropertiesSet();
return template;
@@ -58,6 +71,9 @@ public class RedisConfig {
// 기본 레디스 캐시 세팅
@Bean
public CacheManager cacheManager(RedisConnectionFactory connectionFactory) {
GenericJackson2JsonRedisSerializer serializer =
new GenericJackson2JsonRedisSerializer(redisObjectMapper());
RedisCacheConfiguration config =
RedisCacheConfiguration.defaultCacheConfig()
.entryTtl(Duration.ofHours(1)) // 기본 TTL 1시간
@@ -65,8 +81,7 @@ public class RedisConfig {
RedisSerializationContext.SerializationPair.fromSerializer(
new StringRedisSerializer()))
.serializeValuesWith(
RedisSerializationContext.SerializationPair.fromSerializer(
new GenericJackson2JsonRedisSerializer()));
RedisSerializationContext.SerializationPair.fromSerializer(serializer));
return RedisCacheManager.builder(connectionFactory).cacheDefaults(config).build();
}

View File

@@ -182,6 +182,7 @@ public class InferenceResultDto {
private Clazz target;
private MapSheet mapSheet;
private Coordinate center;
@JsonFormatDttm private ZonedDateTime updatedDttm;
}
// MAP NO

View File

@@ -7,6 +7,7 @@ import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import jakarta.validation.constraints.NotNull;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -66,6 +67,11 @@ public class InferenceResultService {
* @param searchReq
* @return
*/
@Cacheable(
value = "inferenceResultWithGeom",
key =
"#id + '_' + #searchReq.page + '_' + #searchReq.size + '_' + (#searchReq.sort != null ? #searchReq.sort : 'none') + '_' + (#searchReq.targetClass != null ? #searchReq.targetClass : 'none') + '_' + (#searchReq.compareClass != null ? #searchReq.compareClass : 'none') + '_' + (#searchReq.mapSheetNum != null ? #searchReq.mapSheetNum.toString() : 'none')",
unless = "#result == null || #result.isEmpty()")
public Page<InferenceResultDto.DetailListEntity> listInferenceResultWithGeom(
@NotNull Long id, InferenceResultDto.SearchGeoReq searchReq) {

View File

@@ -155,6 +155,6 @@ public class MapSheetAnalDataGeomEntity {
}
return new InferenceResultDto.DetailListEntity(
comparedClazz, targetClazz, mapSheet, coordinate);
comparedClazz, targetClazz, mapSheet, coordinate, createdDttm);
}
}