mirror of
https://github.com/QuivrHQ/quivr.git
synced 2024-12-15 09:32:22 +03:00
847e161d80
# Description - Defined quivr-core `ChatHistory` - `ChatHistory` can be iterated over in tuples of `HumanMessage,AIMessage` - Brain appends to the chatHistory once response is received - Brain holds a dict of chats and defines the default chat (TODO: define a system of selecting the chats) - Wrote test - Updated `QuivrQARAG` to use `ChatHistory` as input
51 lines
1.6 KiB
Python
51 lines
1.6 KiB
Python
from datetime import datetime
|
|
from typing import Any, Generator, Tuple
|
|
from uuid import UUID, uuid4
|
|
|
|
from langchain_core.messages import AIMessage, HumanMessage
|
|
|
|
from quivr_core.models import ChatMessage
|
|
|
|
|
|
class ChatHistory:
|
|
def __init__(self, chat_id: UUID, brain_id: UUID) -> None:
|
|
self.id = chat_id
|
|
self.brain_id = brain_id
|
|
# TODO(@aminediro): maybe use a deque() instead ?
|
|
self._msgs: list[ChatMessage] = []
|
|
|
|
def get_chat_history(self, newest_first: bool = False):
|
|
"""Returns a ChatMessage list sorted by time
|
|
|
|
Returns:
|
|
list[ChatMessage]: list of chat messages
|
|
"""
|
|
history = sorted(self._msgs, key=lambda msg: msg.message_time)
|
|
if newest_first:
|
|
return history[::-1]
|
|
return history
|
|
|
|
def __len__(self):
|
|
return len(self._msgs)
|
|
|
|
def append(
|
|
self, langchain_msg: AIMessage | HumanMessage, metadata: dict[str, Any] = {}
|
|
):
|
|
chat_msg = ChatMessage(
|
|
chat_id=self.id,
|
|
message_id=uuid4(),
|
|
brain_id=self.brain_id,
|
|
msg=langchain_msg,
|
|
message_time=datetime.now(),
|
|
metadata=metadata,
|
|
)
|
|
self._msgs.append(chat_msg)
|
|
|
|
def iter_pairs(self) -> Generator[Tuple[HumanMessage, AIMessage], None, None]:
|
|
# Reverse the chat_history, newest first
|
|
it = iter(self.get_chat_history(newest_first=True))
|
|
for ai_message, human_message in zip(it, it):
|
|
assert isinstance(human_message.msg, HumanMessage)
|
|
assert isinstance(ai_message.msg, AIMessage)
|
|
yield (human_message.msg, ai_message.msg)
|