feat(本地缓存): 加入本地缓存功能

1.项目第一次启动 查数据库 加入redis 加入本地缓存
2.第二次启动 查询redis 加入本地缓存
This commit is contained in:
ovo 2024-12-23 17:04:02 +08:00
parent 556802f406
commit f51134ea29
5 changed files with 85 additions and 116 deletions

View File

@ -1,89 +0,0 @@
package com.guwan.backend.config;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.stats.CacheStats;
import lombok.extern.slf4j.Slf4j;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.Map;
@Slf4j
@RestController
@RequestMapping("/api/cache")
public class CacheMonitor {
private final Cache<String, Object> userCache;
private final Cache<String, Object> productCache;
public CacheMonitor(Cache<String, Object> userCache,
Cache<String, Object> productCache) {
this.userCache = userCache;
this.productCache = productCache;
}
@GetMapping("/stats")
public Map<String, Object> getStats() {
Map<String, Object> stats = new HashMap<>();
// 用户缓存统计
Map<String, Object> userStats = new HashMap<>();
userStats.put("stats", userCache.stats());
userStats.put("estimatedSize", userCache.estimatedSize());
userStats.put("asMap", userCache.asMap());
stats.put("userCache", userStats);
// 产品缓存统计
Map<String, Object> productStats = new HashMap<>();
productStats.put("stats", productCache.stats());
productStats.put("estimatedSize", productCache.estimatedSize());
productStats.put("asMap", productCache.asMap());
stats.put("productCache", productStats);
return stats;
}
@GetMapping("/details")
public String getDetails() {
StringBuilder details = new StringBuilder();
details.append("=== Cache Details ===\n");
// 用户缓存详情
CacheStats userStats = userCache.stats();
details.append("User Cache:\n");
details.append(" Hit count: ").append(userStats.hitCount()).append("\n");
details.append(" Miss count: ").append(userStats.missCount()).append("\n");
details.append(" Load success count: ").append(userStats.loadSuccessCount()).append("\n");
details.append(" Load failure count: ").append(userStats.loadFailureCount()).append("\n");
details.append(" Total load time: ").append(userStats.totalLoadTime()).append("\n");
details.append(" Eviction count: ").append(userStats.evictionCount()).append("\n");
details.append(" Estimated size: ").append(userCache.estimatedSize()).append("\n");
// 产品缓存详情
CacheStats productStats = productCache.stats();
details.append("\nProduct Cache:\n");
details.append(" Hit count: ").append(productStats.hitCount()).append("\n");
details.append(" Miss count: ").append(productStats.missCount()).append("\n");
details.append(" Load success count: ").append(productStats.loadSuccessCount()).append("\n");
details.append(" Load failure count: ").append(productStats.loadFailureCount()).append("\n");
details.append(" Total load time: ").append(productStats.totalLoadTime()).append("\n");
details.append(" Eviction count: ").append(productStats.evictionCount()).append("\n");
details.append(" Estimated size: ").append(productCache.estimatedSize()).append("\n");
return details.toString();
}
// 每5分钟打印一次缓存统计信息
@Scheduled(fixedRate = 300000)
public void logCacheStats() {
log.info("=== Cache Stats ===");
log.info("User Cache: {}", userCache.stats());
log.info("Product Cache: {}", productCache.stats());
}
}

View File

@ -1,7 +1,7 @@
package com.guwan.backend.config;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.guwan.backend.controller.monitor.CacheMonitor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.cache.CacheManager;
import org.springframework.cache.annotation.EnableCaching;
@ -9,8 +9,8 @@ import org.springframework.cache.caffeine.CaffeineCacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import java.util.concurrent.TimeUnit;
import static com.guwan.backend.constant.CacheConstants.CACHE_LIST;
@Slf4j
@EnableCaching
@ -26,32 +26,13 @@ public class CaffeineConfig {
.expireAfterAccess(60, TimeUnit.MINUTES)
.initialCapacity(100)
.maximumSize(1000));
cacheManager.setCacheNames(CACHE_LIST);
return cacheManager;
}
@Bean
public Cache<String, Object> userCache() {
return Caffeine.newBuilder()
.recordStats()
.expireAfterWrite(10, TimeUnit.MINUTES)
.initialCapacity(100)
.maximumSize(1000)
.build();
}
@Bean
public Cache<String, Object> productCache() {
return Caffeine.newBuilder()
.recordStats()
.expireAfterWrite(30, TimeUnit.MINUTES)
.initialCapacity(100)
.maximumSize(1000)
.build();
}
@Bean
public CacheMonitor cacheMonitor(Cache<String, Object> userCache,
Cache<String, Object> productCache) {
return new CacheMonitor(userCache, productCache);
public CacheMonitor cacheMonitor(CacheManager cacheManager) {
return new CacheMonitor(cacheManager);
}
}

View File

@ -0,0 +1,23 @@
package com.guwan.backend.constant;
import java.util.List;
/**
* 安全相关常量配置
*/
public class CacheConstants {
/**
* API接口白名单
* 这些路径可以直接访问不需要认证
*/
public static final List<String> CACHE_LIST = List.of(
"userCache"
);
/**
* 静态资源白名单
* 这些路径用于访问静态资源不需要认证
*/
}

View File

@ -0,0 +1,51 @@
package com.guwan.backend.controller.monitor;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.stats.CacheStats;
import com.guwan.backend.common.BusinessException;
import lombok.extern.slf4j.Slf4j;
import org.springframework.cache.CacheManager;
import org.springframework.cache.caffeine.CaffeineCacheManager;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.Map;
@Slf4j
@RestController
@RequestMapping("/api/cache")
public class CacheMonitor {
private final CacheManager cacheManager;
public CacheMonitor(CacheManager cacheManager) {
this.cacheManager = cacheManager;
}
@GetMapping("/stats")
public Map<String, Object> getStats(String cacheName) {
CaffeineCacheManager caffeineCacheManager = (CaffeineCacheManager) cacheManager;
Cache<Object, Object> nativeCache = null;
try {
nativeCache = (Cache<Object, Object>)
caffeineCacheManager.getCache(cacheName).getNativeCache();
} catch (Exception e) {
// throw new RuntimeException(e);
throw new BusinessException("没有此本地缓存");
}
CacheStats stats = nativeCache.stats();
Map<String, Object> result = new HashMap<>();
result.put("hitCount 命中次数(成功从缓存中获取数据的次数)", stats.hitCount());
result.put("missCount 未命中次数(缓存中没有找到数据的次数)", stats.missCount());
result.put("loadCount 加载次数(需要从数据源重新加载数据的次数)", stats.loadCount());
result.put("evictionCount 驱逐次数(由于空间限制而被清除的缓存项数量)", stats.evictionCount());
result.put("hitRate 命中率", String.format("%.2f%%", stats.hitRate() * 100));
return result;
}
}

View File

@ -175,13 +175,16 @@ public class UserServiceImpl extends ServiceImpl<UserMapper, User> implements Us
@OperationLog(description = "根据Id获取用户信息")
@Cacheable(value = "userCache", key = "#id")
public UserDTO getUserById(Long id) {
log.info("Checking Redis cache");
log.info("查询redis缓存");
// 先从缓存获取
Object cached = redisUtil.get(USER_CACHE_KEY + id);
if (cached != null) {
return (UserDTO) cached;
}else {
log.info("redis缓存为空");
}
log.info("Getting user from database, id: {}", id);
log.info("查询数据库, id: {}", id);
User user = userMapper.selectById(id);
if (user == null) {
return null;