使用 LangChain 的 bind_tools 方法一直报错,即使使用 ChatOpenAI
作为 llm 接口
根据这个issue 下的回答,修改了 ChatOpenAI
的 import 出处,解决了问题
https://github.com/langchain-ai/langchain/issues/21479#issuecomment-2105618237
我的完整代码为:
代码使用官方示例:https://python.langchain.com/docs/how_to/tool_calling/
将大模型修改为 litellm base 调用
import os
from typing_extensions import Annotated, TypedDictclass add(TypedDict):"""Add two integers.""" # Annotations must have the type and can optionally include a default value and description (in that order).a: Annotated[int, ..., "First integer"]b: Annotated[int, ..., "Second integer"]class multiply(TypedDict):"""Multiply two integers.""" a: Annotated[int, ..., "First integer"]b: Annotated[int, ..., "Second integer"]tools = [add, multiply]# from langchain_community.chat_models import ChatOpenAI # 错误
from langchain_openai import ChatOpenAIos.environ["OPENAI_API_KEY"] = "sk-1234"model_name = "zhipu--GLM-4-Flash"
model = ChatOpenAI(openai_api_base="http://0.0.0.0:4000",model = model_name,temperature=0.1,
)# response = model.invoke([HumanMessage(content="hi!")])
llm_with_tools = model.bind_tools(tools) query = "What is 3 * 12?"ret = llm_with_tools.invoke(query)print(ret)
结果
content=''
additional_kwargs={'tool_calls': [{'id': 'call_9115998954352548377', 'function': {'arguments': '{"a": 3, "b": 12}', 'name': 'multiply'}, 'type': 'function', 'index': 0}], 'refusal': None
} response_metadata={'token_usage': {'completion_tokens': 15, 'prompt_tokens': 299, 'total_tokens': 314, 'completion_tokens_details': None, 'prompt_tokens_details': None}, 'model_name': 'GLM-4-Flash', 'system_fingerprint': None, 'finish_reason': 'tool_calls', 'logprobs': None
} id='run-45a19962-0acd-4e36-9dd7-fcf395bfc98e-0'
tool_calls=[{'name': 'multiply', 'args': {'a': 3, 'b': 12}, 'id': 'call_9115998954352548377', 'type': 'tool_call'}] usage_metadata={'input_tokens': 299, 'output_tokens': 15, 'total_tokens': 314, 'input_token_details': {}, 'output_token_details': {}
}
2024-10-16(三)