# Example dummy function hard coded to return the same weather # In production, this could be your backend API or an external API defget_current_weather(location, unit="fahrenheit"): """Get the current weather in a given location""" weather_info = { "location": location, "temperature": "72", "unit": unit, "forecast": ["sunny", "windy"], } return json.dumps(weather_info) # define a function 这个描述格式很重要,在后文我们将其定义为open_ai的function格式 # 其实也可以看作是一个function格式的prompt functions = [ { "name": "get_current_weather", "description": "Get the current weather in a given location", "parameters": { "type": "object", "properties": { "location": { "type": "string", "description": "The city and state, e.g. San Francisco, CA", }, "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, }, "required": ["location"], }, } ]
messages = [ { "role": "user", "content": "What's the weather like in Boston?" } ]
{ "id":"chatcmpl-8pIytDN4Zj7WvgWUQPK9SCKYhH13G", "object":"chat.completion", "created":1707238383, "model":"gpt-3.5-turbo-0613", "choices":[ { "index":0, "message":{ "role":"assistant", "content":"The weather in Boston is currently 72\u00b0F. It is sunny and windy." }, "logprobs":null, "finish_reason":"stop" } ], "usage":{ "prompt_tokens":56, "completion_tokens":16, "total_tokens":72 }, "system_fingerprint":null }
到此我们确实拿到了LLM的输出结果:he weather in Boston is currently 72\u00b0F. It is sunny and windy.
from langchain.utils.openai_functions import convert_pydantic_to_openai_function classOpenMeteoInput(BaseModel): """Latitude of the location to fetch weather data for""" latitude: float = Field(..., description="Latitude of the location to fetch weather data for") longitude: float = Field(..., description="Longitude of the location to fetch weather data for") openMeteoInput_function=[convert_pydantic_to_openai_function(OpenMeteoInput)] print(openMeteoInput_function)
1 2 3 4 5 6 7 8 9 10 11 12
[{'name': 'OpenMeteoInput', 'description': 'Latitude of the location to fetch weather data for', 'parameters': {'title': 'OpenMeteoInput', 'description': 'Latitude of the location to fetch weather data for', 'type': 'object', 'properties': {'latitude': {'title': 'Latitude', 'description': 'Latitude of the location to fetch weather data for', 'type': 'number'}, 'longitude': {'title': 'Longitude', 'description': 'Longitude of the location to fetch weather data for', 'type': 'number'}}, 'required': ['latitude', 'longitude']}}]
这里需要注意的是,能被解析为openai_function的pydantic类必须包含”””Latitude of the location to fetch weather data for”””,这个被解析出来后是对function的description
import requests from pydantic import BaseModel, Field import datetime
# Define the input schema classOpenMeteoInput(BaseModel): latitude: float = Field(..., description="Latitude of the location to fetch weather data for") longitude: float = Field(..., description="Longitude of the location to fetch weather data for") # @tool会自动将get_current_temperature函数解析为上文所提到的LLM可直接调用的,functions格式,只不过这里是langchain的tools格式,其实原理是一样的,就是多了一层封装 # args_schema=OpenMeteoInput 的作用是让这个get_current_temperature的function的input_args与OpenMetoInput # 的描述一样 @tool(args_schema=OpenMeteoInput) defget_current_temperature(latitude: float, longitude: float) -> dict: """Fetch current temperature for given coordinates.""" BASE_URL = "https://api.open-meteo.com/v1/forecast" # Parameters for the request params = { 'latitude': latitude, 'longitude': longitude, 'hourly': 'temperature_2m', 'forecast_days': 1, }
# Make the request response = requests.get(BASE_URL, params=params) if response.status_code == 200: results = response.json() else: raise Exception(f"API Request failed with status code: {response.status_code}")
current_utc_time = datetime.datetime.utcnow() time_list = [datetime.datetime.fromisoformat(time_str.replace('Z', '+00:00')) for time_str in results['hourly']['time']] temperature_list = results['hourly']['temperature_2m'] closest_time_index = min(range(len(time_list)), key=lambda i: abs(time_list[i] - current_utc_time)) current_temperature = temperature_list[closest_time_index] returnf'The current temperature is {current_temperature}°C' # 这里同样也可以转换成openai的function格式 # 这里的format_tool_to_openai_function猜测是封装了convert_pydantic_to_openai_function类,原理都差不多 from langchain.tools.render import format_tool_to_openai_function get_current_temperature_functions=format_tool_to_openai_function(get_current_temperature) from langchain.chat_models import ChatOpenAI model = ChatOpenAI(temperature=0).bind(functions=get_current_temperature_functions) model.invoke("what is the weather in sf right now")
# 第二个函数 import wikipedia @tool defsearch_wikipedia(query: str) -> str: """Run Wikipedia search and get page summaries.""" page_titles = wikipedia.search(query) summaries = [] for page_title in page_titles[: 3]: try: wiki_page = wikipedia.page(title=page_title, auto_suggest=False) summaries.append(f"Page: {page_title}\nSummary: {wiki_page.summary}") except ( self.wiki_client.exceptions.PageError, self.wiki_client.exceptions.DisambiguationError, ): pass ifnot summaries: return"No good Wikipedia Search Result was found" return"\n\n".join(summaries)
# 构建functions,在这里还是用的openai本身的方法,只能做单步判断,最终输出逻辑还需要自己实现 functions = [ format_tool_to_openai_function(f) for f in [ search_wikipedia, get_current_temperature ] ] model = ChatOpenAI(temperature=0).bind(functions=functions) model.invoke("what is the weather in sf right now") # 输出:AIMessage(content='', additional_kwargs={'function_call': {'name': 'get_current_temperature', #'arguments': '{\n "latitude": 37.7749,\n "longitude": -122.4194\n}'}}) model.invoke("what is langchain") # AIMessage(content='', additional_kwargs={'function_call': {'name': 'search_wikipedia', 'arguments': '{\n "query": "langchain"\n}'}})
## 接下来使用构建chain的方式 from langchain.prompts import ChatPromptTemplate from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser prompt = ChatPromptTemplate.from_messages([ ("system", "You are helpful but sassy assistant"), ("user", "{input}"), ]) chain = prompt | model | OpenAIFunctionsAgentOutputParser() result = chain.invoke({"input": "what is the weather in sf right now"}) type(result) # langchain.schema.agent.AgentActionMessageLog result.tool # result.tool 'get_current_temperature' result.tool_input # {'latitude': 37.7749, 'longitude': -122.4194} result = chain.invoke({"input": "hi!"}) type(result) # langchain.schema.agent.AgentFinish result.return_values {'output': 'Hello! How can I assist you today?'}
from langchain.schema.agent import AgentFinish defroute(result): #实际上就是封装了一个判断type(result)?=langchain.schema.agent.AgentFinish的逻辑 ifisinstance(result, AgentFinish): return result.return_values['output'] else: tools = { "search_wikipedia": search_wikipedia, "get_current_temperature": get_current_temperature, } return tools[result.tool].run(result.tool_input) # 这里使用了tool类,当使用@tool注解后,一方面是可以转换成langchain的function格式,另一方面可以通过使用function_name.run(args)来调用function chain = prompt | model | OpenAIFunctionsAgentOutputParser() | route # 链式调用法则 result = chain.invoke({"input": "What is the weather in san francisco right now?"}) result # 'The current temperature is 11.6°C' result = chain.invoke({"input": "What is langchain?"}) result # 'Page: LangChain\nSummary: LangChain is a framework designed to simplify the creation of applications using large language models (LLMs). As a language model integration framework, LangChain\'s use-cases largely overlap with those of language models in general, including document analysis and summarization, chatbots, and code analysis.\n\n\n\nPage: OpenAI\nSummary: OpenAI is a U.S. based artificial intelligence (AI) research organization founded in December 2015, researching artificial intelligence with the goal of developing "safe and beneficial" artificial general intelligence, which it defines as "highly autonomous systems that outperform humans at most economically valuable work".\nAs one of the leading organizations of the AI Spring, it has developed several large language models, advanced image generation models, and previously, released open-source models. Its release of ChatGPT has been credited with starting the artificial intelligence spring.The organization consists of the non-profit OpenAI, Inc. registered in Delaware and its for-profit subsidiary OpenAI Global, LLC. It was founded by Ilya Sutskever, Greg Brockman, Trevor Blackwell, Vicki Cheung, Andrej Karpathy, Durk Kingma, Jessica Livingston, John Schulman, Pamela Vagata, and Wojciech Zaremba, with Sam Altman and Elon Musk serving as the initial board members. Microsoft provided OpenAI Global LLC with a $1 billion investment in 2019 and a $10 billion investment in 2023, with a significant portion of the investment in the form of compute resources on Microsoft\'s Azure cloud service.On November 17, 2023, the board removed Altman as CEO, while Brockman was removed as chairman and then resigned as president. Four days later, both returned after negotiations with the board, and most of the board members resigned. The new initial board included former Salesforce co-CEO Bret Taylor as chairman. It was also announced that Microsoft will have a non-voting board seat.\n\n\n\nPage: DataStax\nSummary: DataStax, Inc. is a real-time data for AI company based in Santa Clara, California. Its product Astra DB is a cloud database-as-a-service based on Apache Cassandra. DataStax also offers DataStax Enterprise (DSE), an on-premises database built on Apache Cassandra, and Astra Streaming, a messaging and event streaming cloud service based on Apache Pulsar. As of June 2022, the company has roughly 800 customers distributed in over 50 countries.\n\n' result = chain.invoke({"input": "hi!"}) result # 'Hello! How can I assist you today?'
from langchain.chat_models import ChatOpenAI from langchain.prompts import ChatPromptTemplate from langchain.tools.render import format_tool_to_openai_function from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser from langchain.prompts import MessagesPlaceholder from langchain.agents.format_scratchpad import format_to_openai_functions from langchain.schema.agent import AgentFinish tools = [ get_current_temperature, search_wikipedia ]
functions = [format_tool_to_openai_function(f) for f in tools]
model = ChatOpenAI(temperature=0).bind(functions=functions)
prompt = ChatPromptTemplate.from_messages([ ("system", "You are helpful but sassy assistant"), ("user", "{input}"), MessagesPlaceholder(variable_name="agent_scratchpad") ])
chain = prompt | model | OpenAIFunctionsAgentOutputParser()