-
Notifications
You must be signed in to change notification settings - Fork 6
/
my_chainlit_example1.py
41 lines (33 loc) · 1.17 KB
/
my_chainlit_example1.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import chainlit as cl
from dotenv import load_dotenv
from langchain_community.chat_models import ChatOpenAI
from langchain_core.prompts import ChatPromptTemplate
from langchain.schema import StrOutputParser
from langchain.schema.runnable import Runnable
from langchain.schema.runnable.config import RunnableConfig
# 加载环境变量
load_dotenv()
@cl.on_chat_start
async def on_chat_start():
model = ChatOpenAI(streaming=True)
prompt = ChatPromptTemplate.from_messages(
[
(
"system",
"You're a very knowledgeable historian who provides accurate and eloquent answers to historical questions.",
),
("human", "{question}"),
]
)
runnable = prompt | model | StrOutputParser()
cl.user_session.set("runnable", runnable)
@cl.on_message
async def on_message(message: cl.Message):
runnable = cl.user_session.get("runnable") # type: Runnable
msg = cl.Message(content="")
async for chunk in runnable.astream(
{"question": message.content},
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
):
await msg.stream_token(chunk)
await msg.send()