-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathoracle_chat_prompts.py
41 lines (35 loc) · 1.16 KB
/
oracle_chat_prompts.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
"""
Author: Luigi Saetta
Date created: 2024-04-27
Date last modified: 2024-04-27
Python Version: 3.11
"""
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
#
# The prompt for the condensed query on the Vector Store
#
CONTEXT_Q_SYSTEM_PROMPT = """Given a chat history and the latest user question \
which might reference context in the chat history, formulate a standalone question \
which can be understood without the chat history. Do NOT answer the question, \
just reformulate it if needed and otherwise return it as is."""
CONTEXT_Q_PROMPT = ChatPromptTemplate.from_messages(
[
("system", CONTEXT_Q_SYSTEM_PROMPT),
MessagesPlaceholder("chat_history"),
("human", "{input}"),
]
)
#
# The prompt for the answer from the LLM
#
QA_SYSTEM_PROMPT = """You are an assistant for question-answering tasks. \
Use the following pieces of retrieved context to answer the question. \
If you don't know the answer, just say that you don't know. \
{context}"""
QA_PROMPT = ChatPromptTemplate.from_messages(
[
("system", QA_SYSTEM_PROMPT),
MessagesPlaceholder("chat_history"),
("human", "{input}"),
]
)