先放幾個必要的依賴吧
<dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-web</artifactId> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-data-redis</artifactId> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-aop</artifactId> </dependency> <dependency> <groupId>redis.clients</groupId> <artifactId>jedis</artifactId> <version>2.9.0</version> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus-boot-starter</artifactId> <version>3.1.2</version> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>8.0.11</version> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> <version>1.0.29</version> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>27.0.1-jre</version> </dependency> <dependency> <groupId>com.hazelcast</groupId> <artifactId>hazelcast-all</artifactId> <version>3.10.1</version> </dependency>
配置文件
spring:
application:
name: redis-caching
datasource:
driver-class-name: com.mysql.cj.jdbc.Driver
url: jdbc:mysql://127.0.0.1:3306/redis_caching?useSSL=FALSE&serverTimezone=GMT%2B8
username: root
password: ****
type: com.alibaba.druid.pool.DruidDataSource
filters: stat
maxActive: 20
initialSize: 1
maxWait: 60000
minIdle: 1
timeBetweenEvictionRunsMillis: 60000
minEvictableIdleTimeMillis: 300000
validationQuery: select 'x'
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
poolPreparedStatements: true
maxOpenPreparedStatements: 20
redis:
host: 127.0.0.1
port: 6379
password: ****
timeout: 10000
lettuce:
pool:
min-idle: 0
max-idle: 8
max-active: 8
max-wait: -1
server:
port: 8080
#mybatis
mybatis-plus:
mapper-locations: classpath*:/mybatis-mappers/*
#實體掃描,多個package用逗號或者分號分隔
typeAliasesPackage: com.guanjian.rediscaching.model
global-config:
#數據庫相關配置
db-config:
#主鍵類型 AUTO:"數據庫ID自增", INPUT:"用戶輸入ID", ID_WORKER:"全局唯一ID (數字類型唯一ID)", UUID:"全局唯一ID UUID";
id-type: INPUT
logic-delete-value: -1
logic-not-delete-value: 0
banner: false
#原生配置
configuration:
map-underscore-to-camel-case: true
cache-enabled: false
call-setters-on-nulls: true
jdbc-type-for-null: 'null'
配置類
@Configuration @EnableCaching public class RedisConfig extends CachingConfigurerSupport { @Value("${spring.redis.host}") private String host; @Value("${spring.redis.port}") private int port; @Value("${spring.redis.password}") private String password; @Bean public KeyGenerator wiselyKeyGenerator(){ return new KeyGenerator() { @Override public Object generate(Object target, Method method, Object... params) { StringBuilder sb = new StringBuilder(); sb.append(target.getClass().getName()); sb.append(method.getName()); for (Object obj : params) { sb.append(obj.toString()); } return sb.toString(); } }; } @Bean public JedisConnectionFactory redisConnectionFactory() { JedisConnectionFactory factory = new JedisConnectionFactory(); factory.setHostName(host); factory.setPort(port); factory.setPassword(password); return factory; } @Bean public CacheManager cacheManager(RedisConnectionFactory factory) { RedisCacheManager cacheManager =RedisCacheManager.create(factory); // Number of seconds before expiration. Defaults to unlimited (0) // cacheManager.setDefaultExpiration(10); //設置key-value超時時間 return cacheManager; } @Bean public RedisTemplate<String, String> redisTemplate(RedisConnectionFactory factory) { StringRedisTemplate template = new StringRedisTemplate(factory); setSerializer(template); //設置序列化工具,這樣ReportBean不需要實現Serializable接口 template.afterPropertiesSet(); return template; } private void setSerializer(StringRedisTemplate template) { Jackson2JsonRedisSerializer jackson2JsonRedisSerializer = new Jackson2JsonRedisSerializer(Object.class); ObjectMapper om = new ObjectMapper(); om.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); om.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL); jackson2JsonRedisSerializer.setObjectMapper(om); template.setValueSerializer(jackson2JsonRedisSerializer); } }
@Configuration public class DruidConfig { @ConfigurationProperties(prefix = "spring.datasource") @Bean @Primary public DataSource dataSource() { return new DruidDataSource(); } }
實體類
@Data @TableName("city") public class City implements Serializable{ @TableId private Integer id; private String name; }
dao
@Mapper public interface CityDao extends BaseMapper<City> { }
service
public interface CityService extends IService<City> { }
@Service public class CityServiceImpl extends ServiceImpl<CityDao,City> implements CityService{ }
controller
@RestController public class CityController { @Autowired private CityService cityService; @GetMapping("/findbyid") @Cacheable(cacheNames = "city_info",key = "#id") public City findCityById(@RequestParam("id") int id) { return cityService.getById(id); } @PostMapping("/save") @CachePut(cacheNames = "city_info",key = "#city.id") public City saveCity(@RequestBody City city) { cityService.save(city); return city; } @GetMapping("/deletebyid") @CacheEvict(cacheNames = "city_info",key = "#id") public boolean deleteCityById(@RequestParam("id") int id) { return cityService.removeById(id); } @PostMapping("/update") @CachePut(cacheNames = "city_info",key = "#city.id") public City updateCity(@RequestBody City city) { City cityQuery = new City(); cityQuery.setId(city.getId()); QueryWrapper<City> wrapper = new QueryWrapper<>(cityQuery); cityService.update(city,wrapper); return city; } }
測試
我們在數據庫中有一個city的表,其中有一條數據
而redis中任何數據都沒有
此時我們查詢第一個Rest接口
後端日誌爲
2020-09-30 06:06:12.919 DEBUG 1321 --- [nio-8080-exec-4] c.g.rediscaching.dao.CityDao.selectById : ==> Preparing: SELECT id,name FROM city WHERE id=?
2020-09-30 06:06:12.920 DEBUG 1321 --- [nio-8080-exec-4] c.g.rediscaching.dao.CityDao.selectById : ==> Parameters: 1(Integer)
2020-09-30 06:06:12.945 DEBUG 1321 --- [nio-8080-exec-4] c.g.rediscaching.dao.CityDao.selectById : <== Total: 1
此時我們查詢redis中如下
可見我們在沒有寫任何redis代碼的同時,就將數據存儲進了redis
此時我們再此查詢
則後端日誌沒有打印SQL語句,說明再次查詢是從redis中獲取而不是mysql中獲取的。
此時我們測試第二個Rest接口
此時數據庫中多出一條數據
我們再來看redis中的數據
查詢第二條數據可得
現在我們來刪除第二條數據
數據庫中第二條數據被刪除
同時我們在redis中可以看到第二條數據也被刪除了
現在我們來修改第一條數據
數據庫中同時更新了數據
redis中的數據依然存在
此時我們重新查詢第一條數據
後端日誌中也沒有相應的查詢SQL語句,之前的日誌如下
2020-09-30 06:32:57.729 DEBUG 1349 --- [nio-8080-exec-3] c.g.rediscaching.dao.CityDao.insert : ==> Preparing: INSERT INTO city ( id, name ) VALUES ( ?, ? )
2020-09-30 06:32:57.730 DEBUG 1349 --- [nio-8080-exec-3] c.g.rediscaching.dao.CityDao.insert : ==> Parameters: 2(Integer), 武漢(String)
2020-09-30 06:32:57.735 DEBUG 1349 --- [nio-8080-exec-3] c.g.rediscaching.dao.CityDao.insert : <== Updates: 1
2020-09-30 06:38:04.042 DEBUG 1349 --- [io-8080-exec-10] c.g.rediscaching.dao.CityDao.deleteById : ==> Preparing: DELETE FROM city WHERE id=?
2020-09-30 06:38:04.043 DEBUG 1349 --- [io-8080-exec-10] c.g.rediscaching.dao.CityDao.deleteById : ==> Parameters: 2(Integer)
2020-09-30 06:38:04.047 DEBUG 1349 --- [io-8080-exec-10] c.g.rediscaching.dao.CityDao.deleteById : <== Updates: 1
2020-09-30 06:40:09.723 DEBUG 1349 --- [nio-8080-exec-3] c.g.rediscaching.dao.CityDao.update : ==> Preparing: UPDATE city SET name=? WHERE id=?
2020-09-30 06:40:09.728 DEBUG 1349 --- [nio-8080-exec-3] c.g.rediscaching.dao.CityDao.update : ==> Parameters: 北京(String), 1(Integer)
2020-09-30 06:40:09.733 DEBUG 1349 --- [nio-8080-exec-3] c.g.rediscaching.dao.CityDao.update : <== Updates: 1
現在我們來給緩存設置過期時間
@Configuration @EnableCaching public class RedisConfig extends CachingConfigurerSupport { @Value("${spring.redis.host}") private String host; @Value("${spring.redis.port}") private int port; @Value("${spring.redis.password}") private String password; @Bean public KeyGenerator wiselyKeyGenerator(){ return new KeyGenerator() { @Override public Object generate(Object target, Method method, Object... params) { StringBuilder sb = new StringBuilder(); sb.append(target.getClass().getName()); sb.append(method.getName()); for (Object obj : params) { sb.append(obj.toString()); } return sb.toString(); } }; } @Bean public JedisConnectionFactory redisConnectionFactory() { JedisConnectionFactory factory = new JedisConnectionFactory(); factory.setHostName(host); factory.setPort(port); factory.setPassword(password); return factory; } @Bean public CacheManager cacheManager(RedisConnectionFactory factory) { Random random = new Random(); return new RedisCacheManager( RedisCacheWriter.nonLockingRedisCacheWriter(factory), //未設置過期策略的在20分鐘內過期 getRedisCacheConfigurationWithTtl(1140 + random.nextInt(60)), // 指定 key 策略 getRedisCacheConfigurationMap() ); } @Bean public RedisTemplate<String, String> redisTemplate(RedisConnectionFactory factory) { StringRedisTemplate template = new StringRedisTemplate(factory); setSerializer(template); //設置序列化工具,這樣ReportBean不需要實現Serializable接口 template.afterPropertiesSet(); return template; } private void setSerializer(StringRedisTemplate template) { Jackson2JsonRedisSerializer jackson2JsonRedisSerializer = new Jackson2JsonRedisSerializer(Object.class); ObjectMapper om = new ObjectMapper(); om.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); om.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL); jackson2JsonRedisSerializer.setObjectMapper(om); template.setValueSerializer(jackson2JsonRedisSerializer); } private Map<String, RedisCacheConfiguration> getRedisCacheConfigurationMap() { Map<String, RedisCacheConfiguration> redisCacheConfigurationMap = new ConcurrentHashMap<>(); Random random = new Random(); redisCacheConfigurationMap.put("city_info", getRedisCacheConfigurationWithTtl(540 + random.nextInt(60))); return redisCacheConfigurationMap; } private RedisCacheConfiguration getRedisCacheConfigurationWithTtl(Integer seconds) { Jackson2JsonRedisSerializer<Object> jackson2JsonRedisSerializer = new Jackson2JsonRedisSerializer<>(Object.class); ObjectMapper om = new ObjectMapper(); om.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); om.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL); jackson2JsonRedisSerializer.setObjectMapper(om); RedisCacheConfiguration redisCacheConfiguration = RedisCacheConfiguration.defaultCacheConfig(); redisCacheConfiguration = redisCacheConfiguration.serializeValuesWith( RedisSerializationContext .SerializationPair .fromSerializer(jackson2JsonRedisSerializer) ).entryTtl(Duration.ofSeconds(seconds)); return redisCacheConfiguration; } }
通過查看redis的鍵的過期時間,我們可以看到
它是用的指定鍵的過期時間
此時我們調整RedisConfig的內容,將指定的city_info改掉
private Map<String, RedisCacheConfiguration> getRedisCacheConfigurationMap() { Map<String, RedisCacheConfiguration> redisCacheConfigurationMap = new ConcurrentHashMap<>(); Random random = new Random(); redisCacheConfigurationMap.put("abcd", getRedisCacheConfigurationWithTtl(540 + random.nextInt(60))); return redisCacheConfigurationMap; }
此時我們會使用默認的20分鐘過期時間
此時我們可以看到,它使用的就是默認所有鍵都相同的20分鐘過期時間。
現在我們來增加防止緩存高併發的功能
緩存高併發的一般性原則可以參考建立緩存,防高併發代碼demo
現在我們要通過標籤來完成這個功能,新增一個標籤
@Target({ ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) public @interface Lock { }
新增一個Redis工具類,包含了分佈式鎖的實現
@Component public class RedisUtils { @Autowired private RedisTemplate redisTemplate; private static final Long RELEASE_SUCCESS = 1L; private static final String UNLOCK_LUA = "if redis.call('get', KEYS[1]) == ARGV[1] then return redis.call('del', KEYS[1]) else return 0 end"; /** * 寫入緩存 */ public boolean set(final String key, Object value) { boolean result = false; try { ValueOperations<Serializable, Object> operations = redisTemplate.opsForValue(); operations.set(key, value); result = true; } catch (Exception e) { e.printStackTrace(); } return result; } /** * 寫入緩存設置時效時間 */ public boolean set(final String key, Object value, Long expireTime , TimeUnit timeUnit) { boolean result = false; try { ValueOperations<Serializable, Object> operations = redisTemplate.opsForValue(); operations.set(key, value); redisTemplate.expire(key, expireTime, timeUnit); result = true; } catch (Exception e) { e.printStackTrace(); } return result; } /** * 寫入緩存設置時效時間,僅第一次有效 * @param key * @param value * @param expireTime * @param timeUnit * @return */ public boolean setIfAbsent(final String key, Object value, Long expireTime , TimeUnit timeUnit) { boolean result = false; try { ValueOperations<Serializable, Object> operations = redisTemplate.opsForValue(); operations.setIfAbsent(key,value,expireTime,timeUnit); result = true; } catch (Exception e) { e.printStackTrace(); } return result; } /** * 批量刪除對應的value */ public void remove(final String... keys) { for (String key : keys) { remove(key); } } /** * 批量刪除key */ public void removePattern(final String pattern) { Set<Serializable> keys = redisTemplate.keys(pattern); if (keys.size() > 0){ redisTemplate.delete(keys); } } /** * 刪除對應的value */ public void remove(final String key) { if (exists(key)) { redisTemplate.delete(key); } } /** * 判斷緩存中是否有對應的value */ public boolean exists(final String key) { return redisTemplate.hasKey(key); } /** * 讀取緩存 */ public Object get(final String key) { Object result = null; ValueOperations<Serializable, Object> operations = redisTemplate.opsForValue(); result = operations.get(key); return result; } /** * 哈希 添加 */ public void hmSet(String key, Object hashKey, Object value){ HashOperations<String, Object, Object> hash = redisTemplate.opsForHash(); hash.put(key,hashKey,value); } /** * 哈希獲取數據 */ public Object hmGet(String key, Object hashKey){ HashOperations<String, Object, Object> hash = redisTemplate.opsForHash(); return hash.get(key,hashKey); } /** * 列表添加 */ public void lPush(String k,Object v){ ListOperations<String, Object> list = redisTemplate.opsForList(); list.rightPush(k,v); } /** * 列表獲取 */ public List<Object> lRange(String k, long l, long l1){ ListOperations<String, Object> list = redisTemplate.opsForList(); return list.range(k,l,l1); } /** * 集合添加 */ public void add(String key,Object value){ SetOperations<String, Object> set = redisTemplate.opsForSet(); set.add(key,value); } /** * 集合獲取 */ public Set<Object> setMembers(String key){ SetOperations<String, Object> set = redisTemplate.opsForSet(); return set.members(key); } /** * 有序集合添加 */ public void zAdd(String key,Object value,double scoure){ ZSetOperations<String, Object> zset = redisTemplate.opsForZSet(); zset.add(key,value,scoure); } /** * 有序集合獲取 */ public Set<Object> rangeByScore(String key,double scoure,double scoure1){ ZSetOperations<String, Object> zset = redisTemplate.opsForZSet(); return zset.rangeByScore(key, scoure, scoure1); } /** * 嘗試獲取鎖 立即返回 * * @param key * @param value * @param timeout * @return */ public boolean lock(String key, String value, long timeout) { return setIfAbsent(key,value,timeout,TimeUnit.MILLISECONDS); } /** * 以阻塞方式的獲取鎖 * * @param key * @param value * @param timeout * @return */ public boolean lockBlock(String key, String value, long timeout) { long start = System.currentTimeMillis(); while (true) { //檢測是否超時 if (System.currentTimeMillis() - start > timeout) { return false; } //執行set命令 //1 Boolean absent = setIfAbsent(key,value,timeout,TimeUnit.MILLISECONDS); //其實沒必要判NULL,這裏是爲了程序的嚴謹而加的邏輯 if (absent == null) { return false; } //是否成功獲取鎖 if (absent) { return true; } } } /** * 解鎖 * @param key * @param value * @return */ public boolean unlock(String key, String value) { RedisScript<Long> redisScript = new DefaultRedisScript<>(UNLOCK_LUA,Long.class); Long result = (Long) redisTemplate.execute(redisScript,Collections.singletonList(key),value); //返回最終結果 return RELEASE_SUCCESS.equals(result); } }
實現一個AOP,用於攔截緩存過期高併發
/** * aop實現攔截緩存過期時的高併發 * * @author 關鍵 */ @Aspect @Component public class LockAop { @Autowired private RedisUtils redisUtils; @Around(value = "@annotation(com.guanjian.rediscaching.annotation.Lock)") public Object lock(ProceedingJoinPoint joinPoint) throws Throwable { MethodSignature methodSignature = (MethodSignature) joinPoint.getSignature(); Cacheable cacheableAnnotion = methodSignature.getMethod().getDeclaredAnnotation(Cacheable.class); String[] cacheNames = cacheableAnnotion.cacheNames(); String idKey = cacheableAnnotion.key(); String[] paramNames = methodSignature.getParameterNames(); if (paramNames != null && paramNames.length > 0) { Object[] args = joinPoint.getArgs(); Map<String,Object> params = new HashMap<>(); for (int i = 0; i < paramNames.length; i++) { params.put(paramNames[i],args[i]); } idKey = idKey.substring(1); String key = cacheNames[0] + "::" + params.get(idKey).toString(); if (!redisUtils.exists(key)) { if (redisUtils.lock(key + "lock","id" + params.get(idKey).toString(),3000)) { Object res = joinPoint.proceed(); try { return res; } finally { redisUtils.unlock(key + "lock","id" + params.get(idKey).toString()); } }else { LocalDateTime now = LocalDateTime.now(); Future<Object> future = CompletableFuture.supplyAsync(() -> { while (true) { if (redisUtils.exists(key)) { return redisUtils.get(key); } if (LocalDateTime.now().isAfter(now.plusSeconds(3))) { return null; } } }); try { return future.get(3000,TimeUnit.MILLISECONDS); } catch (Exception e) { e.printStackTrace(); return null; } } }else { return redisUtils.get(key); } } throw new IllegalArgumentException("參數錯誤"); } }
最後將標籤添加到查詢方法上面
@RestController public class CityController { @Autowired private CityService cityService; @GetMapping("/findbyid") @Cacheable(cacheNames = "city_info",key = "#id") @Lock public City findCityById(@RequestParam("id") int id) { return cityService.getById(id); } @PostMapping("/save") @CachePut(cacheNames = "city_info",key = "#city.id") public City saveCity(@RequestBody City city) { cityService.save(city); return city; } @GetMapping("/deletebyid") @CacheEvict(cacheNames = "city_info",key = "#id") public boolean deleteCityById(@RequestParam("id") int id) { return cityService.removeById(id); } @PostMapping("/update") @CachePut(cacheNames = "city_info",key = "#city.id") public City updateCity(@RequestBody City city) { City cityQuery = new City(); cityQuery.setId(city.getId()); QueryWrapper<City> wrapper = new QueryWrapper<>(cityQuery); cityService.update(city,wrapper); return city; } }
現在我們來增加布隆過濾器來防治惡意無效訪問
在該緩存系統中存在一個問題,那就是當用戶查詢了數據庫中不存在的id的時候,緩存系統依然會將空值添加到redis中。如果有惡意用戶通過工具不斷使用不存在的id進行訪問的時候,一方面會對數據庫造成巨大的訪問壓力,另一方面可能會把redis內存撐破。
比方說我們訪問一個不存在的id=5的時候
Redis依然會被寫入,查出來是NullValue
代碼實現(請注意,該實現依然存在漏洞,但是可以杜絕大部分的惡意訪問)
先寫一個標籤
@Target({ ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) public @interface Bloom { }
在RedisConfig中添加一個布隆過濾器的Bean
@Bean public BloomFilter<String> bloomFilter() { return BloomFilter.create(Funnels.stringFunnel(Charsets.UTF_8),100000000,0.0003); }
建立一個任務調度器,每一分鐘獲取一次數據庫中的id值寫入布隆過濾器中
@Component public class BloomFilterScheduler { private ScheduledExecutorService scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(); @Autowired private BloomFilter<String> bloomFilter; @Autowired private CityService cityService; private void getAllCityForBloomFilter() { List<City> list = cityService.list(); list.parallelStream().forEach(city -> bloomFilter.put("city_info::" + city.getId())); } private ScheduledFuture scheduleTask(Runnable task) { return scheduledExecutorService.scheduleAtFixedRate(task,0,1, TimeUnit.MINUTES); } @PostConstruct public ScheduledFuture scheduleChange() { return scheduleTask(this::getAllCityForBloomFilter); } }
再編寫一個布隆過濾器的AOP攔截,如果布隆過濾器中不存在該key,則不允許訪問數據庫,也不允許建立緩存。
@Aspect @Component public class BloomFilterAop { @Autowired private BloomFilter<String> bloomFilter; @Around(value = "@annotation(com.guanjian.rediscaching.annotation.Bloom)") public Object bloom(ProceedingJoinPoint joinPoint) throws Throwable { MethodSignature methodSignature = (MethodSignature) joinPoint.getSignature(); Cacheable cacheableAnnotion = methodSignature.getMethod().getDeclaredAnnotation(Cacheable.class); String[] cacheNames = cacheableAnnotion.cacheNames(); String idKey = cacheableAnnotion.key(); String[] paramNames = methodSignature.getParameterNames(); if (paramNames != null && paramNames.length > 0) { Object[] args = joinPoint.getArgs(); Map<String, Object> params = new HashMap<>(); for (int i = 0; i < paramNames.length; i++) { params.put(paramNames[i], args[i]); } idKey = idKey.substring(1); String key = cacheNames[0] + "::" + params.get(idKey).toString(); if (!bloomFilter.mightContain(key)) { throw new RuntimeException("系統不存在該key"); }else { return joinPoint.proceed(); } } throw new IllegalArgumentException("參數錯誤"); } }
最後是Controller,打上該標籤。這裏需要注意的是,當我們查詢出來的對象爲null的時候拋出異常,這樣可以避免在Redis中建立緩存。這裏保存對象的時候會把該對象的id寫入布隆過濾器中,但由於可能存在不同的集羣節點,所以會出現集羣各節點的布隆過濾器數據不一致的問題,但每一分鐘都會去檢索數據庫,所以每分鐘之後,各個節點的布隆過濾器的數據會再次同步,當然我們會考慮更好的數據一致性處理方式。
@RestController public class CityController { @Autowired private CityService cityService; @Autowired private BloomFilter<String> bloomFilter; @GetMapping("/findbyid") @Cacheable(cacheNames = "city_info",key = "#id") @Lock @Bloom public City findCityById(@RequestParam("id") int id) { City city = cityService.getById(id); if (city != null) { return city; } throw new IllegalArgumentException("id不存在"); } @PostMapping("/save") @CachePut(cacheNames = "city_info",key = "#city.id") public City saveCity(@RequestBody City city) { if (cityService.save(city)) { bloomFilter.put("city_info::" + city.getId()); return city; } throw new IllegalArgumentException("保存失敗"); } @GetMapping("/deletebyid") @CacheEvict(cacheNames = "city_info",key = "#id") public boolean deleteCityById(@RequestParam("id") int id) { return cityService.removeById(id); } @PostMapping("/update") @CachePut(cacheNames = "city_info",key = "#city.id") public City updateCity(@RequestBody City city) { City cityQuery = new City(); cityQuery.setId(city.getId()); QueryWrapper<City> wrapper = new QueryWrapper<>(cityQuery); cityService.update(city,wrapper); return city; } }
添加布隆過濾器的分佈式節點的同步模式
增加Hazelcast的配置,有關Hazelcast的內容,請參考JVM內存級分佈式緩存Hazelcast
@Configuration public class HazelcastConfiguration { @Bean public Config hazelCastConfig() { Config config = new Config(); config.setInstanceName("hazelcast-instance").addMapConfig( new MapConfig().setName("configuration").setMaxSizeConfig(new MaxSizeConfig(200, MaxSizeConfig.MaxSizePolicy.FREE_HEAP_SIZE)).setEvictionPolicy(EvictionPolicy.LFU) .setTimeToLiveSeconds(-1)); return config; } @Bean public HazelcastInstance instance() { return Hazelcast.newHazelcastInstance(); } @Bean public Map<Integer,BloomFilter<String>> bloomFilters() { Map<Integer,BloomFilter<String>> blooms = instance().getMap("bloom"); return blooms; } }
修改布隆過濾器AOP
@Aspect @Component public class BloomFilterAop { @Autowired private Map<Integer,BloomFilter<String>> bloomFilters; @Around(value = "@annotation(com.guanjian.rediscaching.annotation.Bloom)") public Object bloom(ProceedingJoinPoint joinPoint) throws Throwable { MethodSignature methodSignature = (MethodSignature) joinPoint.getSignature(); Cacheable cacheableAnnotion = methodSignature.getMethod().getDeclaredAnnotation(Cacheable.class); String[] cacheNames = cacheableAnnotion.cacheNames(); String idKey = cacheableAnnotion.key(); String[] paramNames = methodSignature.getParameterNames(); if (paramNames != null && paramNames.length > 0) { Object[] args = joinPoint.getArgs(); Map<String, Object> params = new HashMap<>(); for (int i = 0; i < paramNames.length; i++) { params.put(paramNames[i], args[i]); } idKey = idKey.substring(1); String key = cacheNames[0] + "::" + params.get(idKey).toString(); if (!bloomFilters.get(1).mightContain(key)) { throw new RuntimeException("系統不存在該key"); }else { return joinPoint.proceed(); } } throw new IllegalArgumentException("參數錯誤"); } }
調度器改爲只運行一次
@Component public class BloomFilterScheduler { @Autowired private BloomFilter<String> bloomFilter; @Autowired private CityService cityService; @Autowired private Map<Integer,BloomFilter<String>> bloomFilters; @PostConstruct public void getAllCityForBloomFilter() { List<City> list = cityService.list(); list.parallelStream().forEach(city -> bloomFilter.put("city_info::" + city.getId())); bloomFilters.put(1,bloomFilter); } }
最後是Controller
@RestController public class CityController { @Autowired private CityService cityService; @Autowired private BloomFilter<String> bloomFilter; @Autowired private Map<Integer,BloomFilter<String>> bloomFilters; @GetMapping("/findbyid") @Cacheable(cacheNames = "city_info",key = "#id") @Lock @Bloom public City findCityById(@RequestParam("id") int id) { City city = cityService.getById(id); if (city != null) { return city; } throw new IllegalArgumentException("id不存在"); } @PostMapping("/save") @CachePut(cacheNames = "city_info",key = "#city.id") public City saveCity(@RequestBody City city) { if (cityService.save(city)) { CompletableFuture.runAsync(() -> { bloomFilter.put("city_info::" + city.getId()); bloomFilters.put(1,bloomFilter); }); return city; } throw new IllegalArgumentException("保存失敗"); } @GetMapping("/deletebyid") @CacheEvict(cacheNames = "city_info",key = "#id") public boolean deleteCityById(@RequestParam("id") int id) { return cityService.removeById(id); } @PostMapping("/update") @CachePut(cacheNames = "city_info",key = "#city.id") public City updateCity(@RequestBody City city) { City cityQuery = new City(); cityQuery.setId(city.getId()); QueryWrapper<City> wrapper = new QueryWrapper<>(cityQuery); cityService.update(city,wrapper); return city; } }