package test.config; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.PropertyAccessor; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.jsontype.impl.LaissezFaireSubTypeValidator; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import lombok.Data; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.cache.CacheManager; import org.springframework.cache.annotation.EnableCaching; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.data.redis.cache.RedisCacheConfiguration; import org.springframework.data.redis.cache.RedisCacheWriter; import org.springframework.data.redis.connection.RedisConnectionFactory; import org.springframework.data.redis.core.RedisTemplate; import org.springframework.data.redis.serializer.GenericToStringSerializer; import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer; import org.springframework.data.redis.serializer.RedisSerializationContext; import org.springframework.util.CollectionUtils; import javax.annotation.Resource; import java.time.Duration; import java.util.HashMap; import java.util.Map; @Data @EnableCaching @Configuration @ConfigurationProperties(prefix = "spring.cache.redis") public class RedisConfig { //如果為空默認為12個小時 @Value("${spring.cache.redis.time-to-live:43200000}") private Long redisKeyTtl; private Map<String, Long> customTtl; @Resource private RedisTemplate redisTemplate; /** * 默認的字符串序列畫 * @param factory 工廠類 * @return */ @Primary @Bean public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory factory) { RedisTemplate<String, Object> redisTemplate = new RedisTemplate<>(); redisTemplate.setConnectionFactory(factory); //使用Jackson2JsonRedisSerializer來序列化和反序列化redis的value值 GenericToStringSerializer<String> stringSerializer = new GenericToStringSerializer<>(String.class); //key序列化 redisTemplate.setKeySerializer(stringSerializer); //value序列化 redisTemplate.setValueSerializer(jacksonSerializer()); //Hash key序列化 redisTemplate.setHashKeySerializer(stringSerializer); //Hash value序列化 redisTemplate.setHashValueSerializer(jacksonSerializer()); redisTemplate.afterPropertiesSet(); return redisTemplate; } /** * 設置redis的key 為 String, value為json * ttl 默認為12個小時 * * @return */ @Bean public CacheManager cacheManager() { //1.構造一個redis緩存管理器 return new RedisCacheManagerResolver( //redis 連接工廠(非阻塞緩存) new MyDefaultRedisCacheWriter(redisTemplate.getConnectionFactory()), // RedisCacheWriter.nonLockingRedisCacheWriter(redisTemplate.getConnectionFactory()), //緩存默認策略(ttl) this.newRedisCacheConfiguration(redisKeyTtl), //配置自定義策略含(自定義ttl) this.getRedisCacheConfigurationMap() ); } private Map<String, RedisCacheConfiguration> getRedisCacheConfigurationMap() { Map<String, RedisCacheConfiguration> configurationMap = new HashMap<>(); if (!CollectionUtils.isEmpty(customTtl)) { customTtl.forEach((k, v) -> configurationMap.put(k, newRedisCacheConfiguration(v))); } return configurationMap; } /** * 新建一個默認的redisCacheConfig * * @param redisKeyTtl 過期時間,單位是毫秒 * @return */ private static RedisCacheConfiguration newRedisCacheConfiguration(Long redisKeyTtl) { return RedisCacheConfiguration .defaultCacheConfig() // 設置key為String, value 為自動轉Json的Object .serializeKeysWith(RedisSerializationContext.SerializationPair.fromSerializer(stringSerializer())) .serializeValuesWith(RedisSerializationContext.SerializationPair.fromSerializer(jacksonSerializer())) // 不緩存null .disableCachingNullValues() // 緩存數(shù)據(jù)保存12小時 .entryTtl(Duration.ofMillis(redisKeyTtl)); } /** * 默認的jacksonSerializer * * @return Jackson2JsonRedisSerializer */ private static Jackson2JsonRedisSerializer jacksonSerializer() { Jackson2JsonRedisSerializer<Object> jackson2JsonRedisSerializer = new Jackson2JsonRedisSerializer(Object.class); ObjectMapper objectMapper = new ObjectMapper(); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); //反序列化時候遇到不匹配的屬性并不拋出異常 objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); //序列化時候遇到空對象不拋出異常 objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); //反序列化的時候如果是無效子類型,不拋出異常 objectMapper.configure(DeserializationFeature.FAIL_ON_INVALID_SUBTYPE, false); //不使用默認的dateTime進行序列化, objectMapper.configure(SerializationFeature.WRITE_DATE_KEYS_AS_TIMESTAMPS, false); //使用JSR310提供的序列化類,里面包含了大量的JDK8時間序列化類 objectMapper.registerModule(new JavaTimeModule()); //啟用反序列化所需的類型信息,在屬性中添加@class objectMapper.activateDefaultTyping(LaissezFaireSubTypeValidator.instance, ObjectMapper.DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY); jackson2JsonRedisSerializer.setObjectMapper(objectMapper); return jackson2JsonRedisSerializer; } private static GenericToStringSerializer stringSerializer() { return new GenericToStringSerializer<>(String.class); } }
package test.config; import org.springframework.dao.PessimisticLockingFailureException; import org.springframework.data.redis.cache.RedisCacheWriter; import org.springframework.data.redis.connection.RedisConnection; import org.springframework.data.redis.connection.RedisConnectionFactory; import org.springframework.data.redis.connection.RedisStringCommands.SetOption; import org.springframework.data.redis.core.Cursor; import org.springframework.data.redis.core.ScanOptions; import org.springframework.data.redis.core.types.Expiration; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import java.nio.charset.StandardCharsets; import java.time.Duration; import java.util.HashSet; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Function; public class MyDefaultRedisCacheWriter implements RedisCacheWriter { private final RedisConnectionFactory connectionFactory; private final Duration sleepTime; MyDefaultRedisCacheWriter(RedisConnectionFactory connectionFactory) { this(connectionFactory, Duration.ZERO); } MyDefaultRedisCacheWriter(RedisConnectionFactory connectionFactory, Duration sleepTime) { Assert.notNull(connectionFactory, "ConnectionFactory must not be null!"); Assert.notNull(sleepTime, "SleepTime must not be null!"); this.connectionFactory = connectionFactory; this.sleepTime = sleepTime; } public void put(String name, byte[] key, byte[] value, @Nullable Duration ttl) { Assert.notNull(name, "Name must not be null!"); Assert.notNull(key, "Key must not be null!"); Assert.notNull(value, "Value must not be null!"); this.execute(name, (connection) -> { if (shouldExpireWithin(ttl)) { connection.set(key, value, Expiration.from(ttl.toMillis(), TimeUnit.MILLISECONDS), SetOption.upsert()); } else { connection.set(key, value); } return "OK"; }); } public byte[] get(String name, byte[] key) { Assert.notNull(name, "Name must not be null!"); Assert.notNull(key, "Key must not be null!"); return (byte[]) this.execute(name, (connection) -> { return connection.get(key); }); } public byte[] putIfAbsent(String name, byte[] key, byte[] value, @Nullable Duration ttl) { Assert.notNull(name, "Name must not be null!"); Assert.notNull(key, "Key must not be null!"); Assert.notNull(value, "Value must not be null!"); return (byte[]) this.execute(name, (connection) -> { if (this.isLockingCacheWriter()) { this.doLock(name, connection); } byte[] var7; try { boolean put; if (shouldExpireWithin(ttl)) { put = connection.set(key, value, Expiration.from(ttl), SetOption.ifAbsent()); } else { put = connection.setNX(key, value); } if (put) { Object var11 = null; return (byte[]) var11; } var7 = connection.get(key); } finally { if (this.isLockingCacheWriter()) { this.doUnlock(name, connection); } } return var7; }); } public void remove(String name, byte[] key) { Assert.notNull(name, "Name must not be null!"); Assert.notNull(key, "Key must not be null!"); this.execute(name, (connection) -> { return connection.del(new byte[][]{key}); }); } /** * 重寫clean 方法,因為原來的clean方法中有使用keys * 進行查詢數(shù)據(jù)庫會,如果數(shù)據(jù)庫中key的數(shù)量過多會阻塞數(shù)據(jù)庫 * * @param name * @param pattern */ @Override public void clean(String name, byte[] pattern) { Assert.notNull(name, "Name must not be null!"); Assert.notNull(pattern, "Pattern must not be null!"); this.execute(name, (connection) -> { boolean wasLocked = false; try { if (this.isLockingCacheWriter()) { this.doLock(name, connection); wasLocked = true; } // byte[][] keys = (byte[][])((Set)Optional.ofNullable(connection.keys(pattern)).orElse(Collections.emptySet())).toArray(new byte[0][]); //使用scan命令代替keys命令 byte[][] keys = scanKeys(pattern, connection); if (keys.length > 0) { connection.del(keys); } } finally { if (wasLocked && this.isLockingCacheWriter()) { this.doUnlock(name, connection); } } return "OK"; }); } /** * 通過scan掃描所有keys * * @param pattern 正則 * @param connection redis連接池 * @return 所有keys的byte */ private byte[][] scanKeys(byte[] pattern, RedisConnection connection) { Cursor<byte[]> cursor = connection.scan(new ScanOptions.ScanOptionsBuilder().match(new String(pattern)).count(1000).build()); Set<byte[]> byteSet = new HashSet<>(); while (cursor.hasNext()) { byteSet.add(cursor.next()); } return byteSet.toArray(new byte[0][]); } void lock(String name) { this.execute(name, (connection) -> { return this.doLock(name, connection); }); } void unlock(String name) { this.executeLockFree((connection) -> { this.doUnlock(name, connection); }); } private Boolean doLock(String name, RedisConnection connection) { return connection.setNX(createCacheLockKey(name), new byte[0]); } private Long doUnlock(String name, RedisConnection connection) { return connection.del(new byte[][]{createCacheLockKey(name)}); } boolean doCheckLock(String name, RedisConnection connection) { return connection.exists(createCacheLockKey(name)); } private boolean isLockingCacheWriter() { return !this.sleepTime.isZero() && !this.sleepTime.isNegative(); } private <T> T execute(String name, Function<RedisConnection, T> callback) { RedisConnection connection = this.connectionFactory.getConnection(); Object var4; try { this.checkAndPotentiallyWaitUntilUnlocked(name, connection); var4 = callback.apply(connection); } finally { connection.close(); } return (T) var4; } private void executeLockFree(Consumer<RedisConnection> callback) { RedisConnection connection = this.connectionFactory.getConnection(); try { callback.accept(connection); } finally { connection.close(); } } private void checkAndPotentiallyWaitUntilUnlocked(String name, RedisConnection connection) { if (this.isLockingCacheWriter()) { try { while (this.doCheckLock(name, connection)) { Thread.sleep(this.sleepTime.toMillis()); } } catch (InterruptedException var4) { Thread.currentThread().interrupt(); throw new PessimisticLockingFailureException(String.format("Interrupted while waiting to unlock cache %s", name), var4); } } } private static boolean shouldExpireWithin(@Nullable Duration ttl) { return ttl != null && !ttl.isZero() && !ttl.isNegative(); } private static byte[] createCacheLockKey(String name) { return (name + "~lock").getBytes(StandardCharsets.UTF_8); } }
package test.config; import org.springframework.data.redis.cache.RedisCache; import org.springframework.data.redis.cache.RedisCacheConfiguration; import org.springframework.data.redis.cache.RedisCacheManager; import org.springframework.data.redis.cache.RedisCacheWriter; import org.springframework.data.redis.connection.RedisConnectionFactory; import org.springframework.lang.Nullable; import java.util.Collections; import java.util.HashMap; import java.util.Map; public class RedisCacheManagerResolver extends RedisCacheManager { private final RedisCacheWriter cacheWriter; private final RedisCacheConfiguration defaultCacheConfig; public RedisCacheManagerResolver(RedisCacheWriter cacheWriter, RedisCacheConfiguration defaultCacheConfiguration) { super(cacheWriter, defaultCacheConfiguration); this.cacheWriter = cacheWriter; this.defaultCacheConfig = defaultCacheConfiguration; } public RedisCacheManagerResolver(RedisCacheWriter cacheWriter, RedisCacheConfiguration defaultCacheConfiguration, String... initialCacheNames) { super(cacheWriter, defaultCacheConfiguration, initialCacheNames); this.cacheWriter = cacheWriter; this.defaultCacheConfig = defaultCacheConfiguration; } public RedisCacheManagerResolver(RedisCacheWriter cacheWriter, RedisCacheConfiguration defaultCacheConfiguration, boolean allowInFlightCacheCreation, String... initialCacheNames) { super(cacheWriter, defaultCacheConfiguration, allowInFlightCacheCreation, initialCacheNames); this.cacheWriter = cacheWriter; this.defaultCacheConfig = defaultCacheConfiguration; } public RedisCacheManagerResolver(RedisCacheWriter cacheWriter, RedisCacheConfiguration defaultCacheConfiguration, Map<String, RedisCacheConfiguration> initialCacheConfigurations) { super(cacheWriter, defaultCacheConfiguration, initialCacheConfigurations); this.cacheWriter = cacheWriter; this.defaultCacheConfig = defaultCacheConfiguration; } public RedisCacheManagerResolver(RedisCacheWriter cacheWriter, RedisCacheConfiguration defaultCacheConfiguration, Map<String, RedisCacheConfiguration> initialCacheConfigurations, boolean allowInFlightCacheCreation) { super(cacheWriter, defaultCacheConfiguration, initialCacheConfigurations, allowInFlightCacheCreation); this.cacheWriter = cacheWriter; this.defaultCacheConfig = defaultCacheConfiguration; } public RedisCacheManagerResolver(RedisConnectionFactory redisConnectionFactory, RedisCacheConfiguration cacheConfiguration) { this(RedisCacheWriter.nonLockingRedisCacheWriter(redisConnectionFactory),cacheConfiguration); } /** * 覆蓋父類創(chuàng)建RedisCache,采用自定義的RedisCacheResolver * @Title: createRedisCache * @Description: 覆蓋父類創(chuàng)建RedisCache,采用自定義的RedisCacheResolver * @param @param name * @param @param cacheConfig * @param @return * @throws * */ @Override protected RedisCache createRedisCache(String name, @Nullable RedisCacheConfiguration cacheConfig) { return new RedisCacheResolver(name, cacheWriter, cacheConfig != null ? cacheConfig : defaultCacheConfig); } @Override public Map<String, RedisCacheConfiguration> getCacheConfigurations() { Map<String, RedisCacheConfiguration> configurationMap = new HashMap<>(getCacheNames().size()); getCacheNames().forEach(it -> { RedisCache cache = RedisCacheResolver.class.cast(lookupCache(it)); configurationMap.put(it, cache != null ? cache.getCacheConfiguration() : null); }); return Collections.unmodifiableMap(configurationMap); } }
package test.config; import org.springframework.core.convert.ConversionService; import org.springframework.data.redis.cache.RedisCache; import org.springframework.data.redis.cache.RedisCacheConfiguration; import org.springframework.data.redis.cache.RedisCacheWriter; import org.springframework.util.StringUtils; public class RedisCacheResolver extends RedisCache { private final String name; private final RedisCacheWriter cacheWriter; private final ConversionService conversionService; protected RedisCacheResolver(String name, RedisCacheWriter cacheWriter, RedisCacheConfiguration cacheConfig) { super(name, cacheWriter, cacheConfig); this.name = name; this.cacheWriter = cacheWriter; this.conversionService = cacheConfig.getConversionService(); } /** * * @Title: evict * @Description: 重寫刪除的方法,支持按key的后綴進行模糊刪除 * @param @param key * @throws * */ @Override public void evict(Object key) { if (key instanceof String) { String keyString = key.toString(); // 后綴刪除 if (StringUtils.endsWithIgnoreCase(keyString, "*")) { evictLikeSuffix(keyString); return; } } // 刪除指定的key super.evict(key); } /** * 后綴匹配匹配 * * @param key */ private void evictLikeSuffix(String key) { byte[] pattern = this.conversionService.convert(this.createCacheKey(key), byte[].class); this.cacheWriter.clean(this.name, pattern); } }
鑒于原生SpringCache存在以下問題,因此對SpringCache進行擴展 原生存在問題: 1.無法根據(jù)具體的cacheName自定義ttl 2.刪除無法使用key進行精細的模糊刪除 3.刪除采用keys,在大數(shù)據(jù)量的情況下會導致redis阻塞 改進如下: 1.實現(xiàn)springCache中自定義cacheName的ttl 2.實現(xiàn)springCahce中刪除緩存時支持key的模糊刪除 3.優(yōu)化SpringCache中查找keys的方法為scan
浙公網(wǎng)安備 33010602011771號