support other prompt languages in new backend

This commit is contained in:
Valerie 2023-05-21 16:01:53 +02:00
parent c96436ce4b
commit a7089ae5bc
2 changed files with 24 additions and 1 deletions

View File

@ -0,0 +1,19 @@
from langchain.prompts.prompt import PromptTemplate
_template = """Given the following conversation and a follow up question, answer the follow up question in the initial language of the question. If you don't know the answer, just say that you don't know, don't try to make up an answer.
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question:"""
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
prompt_template = """Use the following pieces of context to answer the question in the language of the question. If you don't know the answer, just say that you don't know, don't try to make up an answer.
{context}
Question: {question}
Helpful Answer:"""
QA_PROMPT = PromptTemplate(
template=prompt_template, input_variables=["context", "question"]
)

View File

@ -11,6 +11,7 @@ from langchain.llms import OpenAI
from fastapi.openapi.utils import get_openapi
from tempfile import SpooledTemporaryFile
import shutil
import LANGUAGE_PROMPT
from parsers.common import file_already_exists
@ -66,7 +67,6 @@ class ChatMessage(BaseModel):
file_processors = {
".txt": process_txt,
".csv": process_csv,
@ -107,6 +107,10 @@ async def upload_file(file: UploadFile):
async def chat_endpoint(chat_message: ChatMessage):
history = chat_message.history
# Logic from your Streamlit app goes here. For example:
#this overwrites the built-in prompt of the ConversationalRetrievalChain
ConversationalRetrievalChain.prompts = LANGUAGE_PROMPT
qa = None
if chat_message.model.startswith("gpt"):
qa = ConversationalRetrievalChain.from_llm(