环境:python3.9以上
# https://python.langchain.com/v0.2/docs/tutorials/agents/
# import getpass
import os
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain_community.document_loaders import WebBaseLoader
from langchain_community.vectorstores import FAISS
# from langchain_openai import OpenAIEmbeddings
from langchain_text_splitters import RecursiveCharacterTextSplitter
from langchain_community.embeddings import ModelScopeEmbeddings
# from langchain_community.embeddings import HuggingFaceBgeEmbeddings
from langchain.tools.retriever import create_retriever_tool
from langchain_openai import ChatOpenAI
from langchain_core.messages import HumanMessage
from langgraph.prebuilt import chat_agent_executor
""" 官网创建langsmith账号并创建 API key"""
os.environ["LANGCHAIN_TRACING_V2"] = "true"
os.environ["LANGCHAIN_API_KEY"] = "qwertyuiop-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"
os.environ["TAVILY_API_KEY"] = "qwertyuiop-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"
# os.environ["LANGCHAIN_API_KEY"] = getpass.getpass()
os.environ["TOGETHER_API_KEY"] = "qwertyuiop-34bfaf267dc3f1a3d5130f8949c14df7eb5770568739d05bfe398739ff83b783"
os.environ["TOKENIZERS_PARALLELISM"] = "False"
""" Define tools two tools: Tavily (to search online) and then a retriever over a local index """
""" Tavily Tool """
# , verbose=True
search = TavilySearchResults(max_results=2, verbose=True)
# search.invoke("今天是2024年5月21日,中国哪个城市最国际化?")
"""Retriever Tool """
loader = WebBaseLoader("https://www.chinacdc.cn/jkzt/crb/jl/sy/zstd_10917/201911/t20191113_206794.html")
docs = loader.load()
documents = RecursiveCharacterTextSplitter(chunk_size=100, chunk_overlap=20).split_documents(docs)
# vector = FAISS.from_documents(documents, OpenAIEmbeddings()) 需要`OPENAI_API_KEY`
vector = FAISS.from_documents(documents, ModelScopeEmbeddings(model_id="damo/nlp_corom_sentence-embedding_english-base"))
retriever = vector.as_retriever()
print(retriever.invoke("鼠疫的传播途径?")[0])
# print(retriever.invoke("what is langsmith?")[0])
""" 转换为工具 create_retriever_tool """
retriever_tool = create_retriever_tool(
retriever,
"faiss_search",
"search for information about langsmith, for any questions about langsmith, you must use this tool"
)
# Now that we have created both, we can create a list of tools that we will use downstream.
tools = [search, retriever_tool]
""" Using Language Models """
model = ChatOpenAI(
base_url="https://api.together.xyz/v1",
api_key=os.environ["TOGETHER_API_KEY"],
model="mistralai/Mixtral-8x7B-Instruct-v0.1",)
# You can call the language model by passing in a list of messages. By default, the response is a content string.
# response = model.invoke([HumanMessage(content="hi, who are you ?")])
# print(response.content)
# We can now see what it is like to enable this model to do tool calling. In order to enable that we use .bind_tools to give the language model knowledge of these tools
model_with_tools = model.bind_tools(tools)
# response2 = model_with_tools.invoke([HumanMessage(content="hi")])
# print(f"content: {response2.content}")
# print(f"tool calls: {response2.tool_calls}")
# Now, let's try calling it with some input that would expect a tool to be called.
# response = model_with_tools.invoke([HumanMessage(content="what is the weather in your shanghai?")])
# print(f"content: {response.content}")
# print(f"tool calls: {response.tool_calls}")
""" Create the agent """
agent_executor = chat_agent_executor.create_tool_calling_executor(model, tools)
""" Run the agent """
response = agent_executor.invoke({"messages": [HumanMessage(content="hi! 中国上海这个城市怎么样?")]})
print(response["messages"])
console
(py39) wangyp@ubuntu:~$ python /home/wangyp/Big_Model/big_AI/langchain_agent.py
2024-05-23 10:17:20,787 - modelscope - INFO - PyTorch version 2.3.0 Found.
2024-05-23 10:17:20,787 - modelscope - INFO - Loading ast index from /home/wangyp/.cache/modelscope/ast_indexer
2024-05-23 10:17:20,818 - modelscope - INFO - Loading done! Current index file version is 1.14.0, with md5 55d1161df5209b45fff81b0e52922dac and a total number of 976 components indexed
2024-05-23 10:17:22,431 - modelscope - WARNING - Model revision not specified, use revision: v1.0.0
2024-05-23 10:17:22,655 - modelscope - INFO - initiate model from /home/wangyp/.cache/modelscope/hub/damo/nlp_corom_sentence-embedding_english-base
2024-05-23 10:17:22,655 - modelscope - INFO - initiate model from location /home/wangyp/.cache/modelscope/hub/damo/nlp_corom_sentence-embedding_english-base.
2024-05-23 10:17:22,656 - modelscope - INFO - initialize model from /ho