mirror of
https://github.com/QuivrHQ/quivr.git
synced 2024-12-15 01:21:48 +03:00
chore(docker): increased number of replicas
This commit is contained in:
parent
3c11e03764
commit
15ba223f9c
@ -313,8 +313,6 @@ class KnowledgeBrainQA(BaseModel, QAInterface):
|
|||||||
|
|
||||||
# Combine all response tokens to form the final assistant message
|
# Combine all response tokens to form the final assistant message
|
||||||
assistant = "".join(response_tokens)
|
assistant = "".join(response_tokens)
|
||||||
logger.info("💋💋💋💋")
|
|
||||||
logger.info(streamed_chat_history)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if save_answer:
|
if save_answer:
|
||||||
|
@ -246,7 +246,6 @@ class HeadlessQA(BaseModel, QAInterface):
|
|||||||
)
|
)
|
||||||
|
|
||||||
async for token in callback.aiter():
|
async for token in callback.aiter():
|
||||||
logger.info("Token: %s", token)
|
|
||||||
response_tokens.append(token)
|
response_tokens.append(token)
|
||||||
streamed_chat_history.assistant = token
|
streamed_chat_history.assistant = token
|
||||||
yield f"data: {json.dumps(streamed_chat_history.dict())}"
|
yield f"data: {json.dumps(streamed_chat_history.dict())}"
|
||||||
|
@ -66,22 +66,14 @@ class BrainfulChat(ChatInterface):
|
|||||||
supabase_client, embeddings, table_name="vectors"
|
supabase_client, embeddings, table_name="vectors"
|
||||||
)
|
)
|
||||||
# Get the first question from the chat_question
|
# Get the first question from the chat_question
|
||||||
logger.info(f"Finding brain closest to {chat_question}")
|
|
||||||
logger.info("🔥🔥🔥🔥🔥")
|
|
||||||
question = chat_question.question
|
question = chat_question.question
|
||||||
logger.info(f"Question is {question}")
|
|
||||||
history = chat_service.get_chat_history(chat_id)
|
history = chat_service.get_chat_history(chat_id)
|
||||||
if history:
|
if history:
|
||||||
question = history[0].user_message
|
question = history[0].user_message
|
||||||
logger.info(f"Question is {question}")
|
|
||||||
brain_id_to_use = vector_store.find_brain_closest_query(question)
|
brain_id_to_use = vector_store.find_brain_closest_query(question)
|
||||||
logger.info(f"Found brain {brain_id_to_use}")
|
|
||||||
logger.info("🧠🧠🧠")
|
|
||||||
|
|
||||||
brain = brain_service.get_brain_by_id(brain_id_to_use)
|
brain = brain_service.get_brain_by_id(brain_id_to_use)
|
||||||
logger.info(f"Brain type: {brain.brain_type}")
|
|
||||||
logger.info(f"Id is {brain.brain_id}")
|
|
||||||
logger.info(f"Type of brain_id is {type(brain.brain_id)}")
|
|
||||||
if (
|
if (
|
||||||
brain
|
brain
|
||||||
and brain.brain_type == BrainType.DOC
|
and brain.brain_type == BrainType.DOC
|
||||||
|
@ -43,9 +43,6 @@ class CustomSupabaseVectorStore(SupabaseVectorStore):
|
|||||||
).execute()
|
).execute()
|
||||||
|
|
||||||
# Get the brain_id of the brain that is most similar to the query
|
# Get the brain_id of the brain that is most similar to the query
|
||||||
logger.info(f"Found {len(res.data)} brains")
|
|
||||||
logger.info(res.data)
|
|
||||||
logger.info("🔥🔥🔥🔥🔥")
|
|
||||||
brain_id = res.data[0].get("id", None)
|
brain_id = res.data[0].get("id", None)
|
||||||
if not brain_id:
|
if not brain_id:
|
||||||
return None
|
return None
|
||||||
|
@ -23,7 +23,7 @@ services:
|
|||||||
- "--port"
|
- "--port"
|
||||||
- "5050"
|
- "5050"
|
||||||
- "--workers"
|
- "--workers"
|
||||||
- "2"
|
- "6"
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
ports:
|
ports:
|
||||||
|
@ -41,7 +41,7 @@ services:
|
|||||||
- "--port"
|
- "--port"
|
||||||
- "5050"
|
- "5050"
|
||||||
- "--workers"
|
- "--workers"
|
||||||
- "1"
|
- "6"
|
||||||
restart: always
|
restart: always
|
||||||
volumes:
|
volumes:
|
||||||
- ./backend/:/code/
|
- ./backend/:/code/
|
||||||
|
Loading…
Reference in New Issue
Block a user