1. 技术背景
AI大模型的调用往往是一个高资源消耗的操作,在实际应用中,为了防止恶意用户使用辅助工具频繁地调用这些大模型,占用大量服务器资源,影响其他用户的请求处理,降低系统的整体性能和服务质量,使用分布式锁就可以来实现这一限制策略。
【实现思路】
请求拦截:在请求到达之前,拦截并尝试获取分布式锁。
获取锁:如果成功获取锁,继续执行AI模型调用。
失败处理:如果获取锁失败,返回HTTP 429(Too Many Requests)状态码,提示用户请求频率过高。
释放锁:调用结束后,释放锁,以便其他请求能够获取锁。
2. 分布式锁代码实现示例
此处分布式锁的实现选用的是 Redisson 框架,why ?: Redis 分布式锁存在什么问题 ?如何解决 ?_redis做分布式锁的问题-CSDN博客
2.1 添加 Redisson 框架支持
<!-- Redisson -->
<!-- https://mvnrepository.com/artifact/org.redisson/redisson-spring-boot-starter -->
<dependency><groupId>org.redisson</groupId><artifactId>redisson-spring-boot-starter</artifactId><version>3.25.2</version> <!-- 请根据实际情况使用最新版本 -->
</dependency>
2.2 配置 RedissonClient 对象
/*** Redisson 配置** @author helong*/
@Configuration
public class RedissonConfig {@Value("${spring.data.redis.host}")private String host;@Value("${spring.data.redis.port}")private Integer port;/*** 将 RedissonClient 注入容器** @return {@link RedissonClient }*/@Beanpublic RedissonClient redissonClient() {Config config = new Config();config.useSingleServer().setAddress("redis://" + host + ":" + port);return Redisson.create(config);}
}
2.3 自定义调用 AI 大模型请求分布式锁拦截器
/*** 自定义 AI 大模型调用分布式锁的拦截器** @author helong*/
@Component
public class AIModelLockInterceptor implements HandlerInterceptor {/*** 注入 RedissonClient*/@Resourceprivate RedissonClient redissonClient;private static final String REQUEST_KEY = "MODEL_LOCK";private static final String MODEL = "MODEL";private static final String TYPE = "TYPE";/*** 调用目标方法前执行* @param request* @param response* @param handler* @return boolean* @throws Exception*/@Overridepublic boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {SecurityUserDetails userDetails = SecurityUtil.getCurrentUser();Long uid = ObjectUtil.isNotNull(userDetails) ? userDetails.getUid() : NumberUtils.LONG_ZERO;// 获取请求中调用的AI模型和类型HashMap<String, Integer> modelAndTypeMap = getModelAndTypeByRequestURI(request.getRequestURI());if (modelAndTypeMap.isEmpty()) {return true;}// 获取当前用户分布式锁的 keyString lockKey = RedisUtil.getModelLockKey(uid, modelAndTypeMap.get(MODEL), modelAndTypeMap.get(TYPE));RLock rLock = redissonClient.getLock(lockKey);boolean isLock = rLock.tryLock(5, TimeUnit.SECONDS);if (!isLock) {// 客户端请求过于频繁,获取锁失败response.setContentType("application/json;charset=UTF-8");response.setCharacterEncoding("UTF-8");String json = "{\"code\": 429, \"msg\": \"请勿频繁请求,请稍后再试!\", \"data\": null}";response.getWriter().write(json);return false;}request.setAttribute(REQUEST_KEY, rLock);return true;}/*** 调用目标方法后执行(释放锁)* @param request* @param response* @param handler* @param ex*/@Overridepublic void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) {RLock rLock = (RLock) request.getAttribute(REQUEST_KEY);if (rLock != null && rLock.isHeldByCurrentThread()) {rLock.unlock();}}/*** 用于匹配请求路径*/private final Map<String, AiModelEnum> modelMap = new HashMap<>() {@Serialprivate static final long serialVersionUID = -1000996186146839620L;{put("/openai", AiModelEnum.CHAT_GPT);put("/tongyi", AiModelEnum.TONG_YI_QIAN_WEN);put("/xunfei", AiModelEnum.XUN_FEI_XIN_HUO);put("/wenxin", AiModelEnum.WEN_XIN_YI_YAN);put("/doubao", AiModelEnum.DOU_BAO);}};/*** 获取请求中调用的AI模型和类型** @param requestURI* @return {@link HashMap }<{@link String }, {@link Integer }>*/private HashMap<String, Integer> getModelAndTypeByRequestURI(String requestURI) {HashMap<String, Integer> modelAndTypeMap = new HashMap<>();// 遍历 modelMap 以匹配请求路径for (Map.Entry<String, AiModelEnum> entry : modelMap.entrySet()) {String key = entry.getKey();AiModelEnum modelEnum = entry.getValue();if (requestURI.startsWith(key + "/chat")) {modelAndTypeMap.put(MODEL, modelEnum.getCode());modelAndTypeMap.put(TYPE, AiTypeEnum.CHAT.getCode());return modelAndTypeMap;} else if (requestURI.startsWith(key + "/draw")) {modelAndTypeMap.put(MODEL, modelEnum.getCode());modelAndTypeMap.put(TYPE, AiTypeEnum.DRAW.getCode());return modelAndTypeMap;}}return modelAndTypeMap;}
}
/*** 调用 AI 大模型获取分布式锁的 key** @param uid* @param model* @param type* @return {@link String }*/
public static String getModelLockKey(Long uid, Integer model, Integer type) {return "MODEL_LOCK_KEY_" + uid + "_" + model + "_" + type;
}
2.4 配置请求拦截规则
/*** 配置自定义拦截器拦截规则** @author helong*/
@Configuration
public class WebConfig implements WebMvcConfigurer {/*** 注入调用AI模分布式型锁拦截器*/@Resourceprivate AIModelLockInterceptor aiModelLockInterceptor;@Overridepublic void addInterceptors(InterceptorRegistry registry) {// 拦截调用 AI 大模型分布式锁的拦截规则registry.addInterceptor(aiModelLockInterceptor)// 拦截调用 AI 大模型的请求.addPathPatterns("/openai/chat", "/openai/draw","/tongyi/chat", "/tongyi/draw","/xunfei/chat", "/xunfei/draw","/wenxin/chat", "/wenxin/draw","/doubao/chat", "/doubao/draw");}
}