Merge pull request #106 from ValerieRossDEV/main

support other prompt languages in new backend
This commit is contained in:
Stan Girard 2023-05-21 16:24:43 +02:00 committed by GitHub
commit aec870715f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 24 additions and 1 deletions

View File

@ -0,0 +1,19 @@
from langchain.prompts.prompt import PromptTemplate
_template = """Given the following conversation and a follow up question, answer the follow up question in the initial language of the question. If you don't know the answer, just say that you don't know, don't try to make up an answer.
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question:"""
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
prompt_template = """Use the following pieces of context to answer the question in the language of the question. If you don't know the answer, just say that you don't know, don't try to make up an answer.
{context}
Question: {question}
Helpful Answer:"""
QA_PROMPT = PromptTemplate(
template=prompt_template, input_variables=["context", "question"]
)

View File

@ -11,6 +11,7 @@ from langchain.llms import OpenAI
from fastapi.openapi.utils import get_openapi from fastapi.openapi.utils import get_openapi
from tempfile import SpooledTemporaryFile from tempfile import SpooledTemporaryFile
import shutil import shutil
import LANGUAGE_PROMPT
import pypandoc import pypandoc
from parsers.common import file_already_exists from parsers.common import file_already_exists
@ -73,7 +74,6 @@ class ChatMessage(BaseModel):
file_processors = { file_processors = {
".txt": process_txt, ".txt": process_txt,
".csv": process_csv, ".csv": process_csv,
@ -115,6 +115,10 @@ async def upload_file(file: UploadFile):
async def chat_endpoint(chat_message: ChatMessage): async def chat_endpoint(chat_message: ChatMessage):
history = chat_message.history history = chat_message.history
# Logic from your Streamlit app goes here. For example: # Logic from your Streamlit app goes here. For example:
#this overwrites the built-in prompt of the ConversationalRetrievalChain
ConversationalRetrievalChain.prompts = LANGUAGE_PROMPT
qa = None qa = None
if chat_message.model.startswith("gpt"): if chat_message.model.startswith("gpt"):
qa = ConversationalRetrievalChain.from_llm( qa = ConversationalRetrievalChain.from_llm(