mirror of
https://github.com/StanGirard/quivr.git
synced 2024-12-26 21:02:31 +03:00
Merge pull request #106 from ValerieRossDEV/main
support other prompt languages in new backend
This commit is contained in:
commit
aec870715f
19
backend/LANGUAGE_PROMPT.py
Normal file
19
backend/LANGUAGE_PROMPT.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from langchain.prompts.prompt import PromptTemplate
|
||||||
|
|
||||||
|
_template = """Given the following conversation and a follow up question, answer the follow up question in the initial language of the question. If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
||||||
|
|
||||||
|
Chat History:
|
||||||
|
{chat_history}
|
||||||
|
Follow Up Input: {question}
|
||||||
|
Standalone question:"""
|
||||||
|
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
|
||||||
|
|
||||||
|
prompt_template = """Use the following pieces of context to answer the question in the language of the question. If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
||||||
|
|
||||||
|
{context}
|
||||||
|
|
||||||
|
Question: {question}
|
||||||
|
Helpful Answer:"""
|
||||||
|
QA_PROMPT = PromptTemplate(
|
||||||
|
template=prompt_template, input_variables=["context", "question"]
|
||||||
|
)
|
@ -11,6 +11,7 @@ from langchain.llms import OpenAI
|
|||||||
from fastapi.openapi.utils import get_openapi
|
from fastapi.openapi.utils import get_openapi
|
||||||
from tempfile import SpooledTemporaryFile
|
from tempfile import SpooledTemporaryFile
|
||||||
import shutil
|
import shutil
|
||||||
|
import LANGUAGE_PROMPT
|
||||||
import pypandoc
|
import pypandoc
|
||||||
|
|
||||||
from parsers.common import file_already_exists
|
from parsers.common import file_already_exists
|
||||||
@ -73,7 +74,6 @@ class ChatMessage(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
file_processors = {
|
file_processors = {
|
||||||
".txt": process_txt,
|
".txt": process_txt,
|
||||||
".csv": process_csv,
|
".csv": process_csv,
|
||||||
@ -115,6 +115,10 @@ async def upload_file(file: UploadFile):
|
|||||||
async def chat_endpoint(chat_message: ChatMessage):
|
async def chat_endpoint(chat_message: ChatMessage):
|
||||||
history = chat_message.history
|
history = chat_message.history
|
||||||
# Logic from your Streamlit app goes here. For example:
|
# Logic from your Streamlit app goes here. For example:
|
||||||
|
|
||||||
|
#this overwrites the built-in prompt of the ConversationalRetrievalChain
|
||||||
|
ConversationalRetrievalChain.prompts = LANGUAGE_PROMPT
|
||||||
|
|
||||||
qa = None
|
qa = None
|
||||||
if chat_message.model.startswith("gpt"):
|
if chat_message.model.startswith("gpt"):
|
||||||
qa = ConversationalRetrievalChain.from_llm(
|
qa = ConversationalRetrievalChain.from_llm(
|
||||||
|
Loading…
Reference in New Issue
Block a user