langserve使用
构建chain
import os
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_openai import ChatOpenAI
# 大模型
llm = ChatOpenAI(
api_key=os.environ.get("ALI_API_KEY"),
base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
model="qwen-max"
)
# 提示词模板
prompt = ChatPromptTemplate.from_messages([
("system","你是一个园区的客服"),
("user","{text}")
])
#结果解析器
parser = StrOutputParser()
# lcel
chain = prompt| llm | parser
langserve服务端
from fastapi import FastAPI
from langserve import add_routes
# 构建服务端对象
app = FastAPI(title="大模型园区助手", version="1.0", description="基于langchain构建的大模型助手")
# 添加路由信息
add_routes(
app,
chain,
path="/langchain_demo"
)
#启动服务
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)
服务启动后,可用通过浏览器http://localhost:8000/langchain_demo/playground/
访问
langserve客户端
from langserve import RemoteRunnable
client = RemoteRunnable("http://127.0.0.1:8000/langchain_demo")
s = client.invoke({"text":"你是谁"})
print(s)