refacto(backend): poetry package manager and chat route refactoring (#2684)

# Description
- Added package manager
- Added precommit checks
- Rewrote dependency injection of Services and Repositories
- Integrate async SQL alchemy engine
- Migrate Chat  repository to SQLModel 
- Migrated ChatHistory repository to SQLModel
- User SQLModel
- Unit test methodology with db rollback
- Unit tests ChatRepository
- Test ChatService get_history
- Brain entity SQL Model
- Promp SQLModel
- Rewrite chat/{chat_id}/question route
- updated docker files and docker compose in dev and production

Added `quivr_core` subpackages:
- Refactored KnowledgebrainQa
- Added Rag service to interface with non-rag dependencies

---------

Co-authored-by: aminediro <aminediro@github.com>
This commit is contained in:
AmineDiro 2024-06-26 09:58:55 +02:00 committed by GitHub
parent 1751504d30
commit ca93cb9062
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
420 changed files with 13339 additions and 2496 deletions

View File

@ -2,7 +2,7 @@
# This file is used to configure the Quivr stack. It is used by the `docker-compose.yml` file to configure the stack.
# OPENAI. Update this to use your API key. To skip OpenAI integration use a fake key, for example: tk-aabbccddAABBCCDDEeFfGgHhIiJKLmnopjklMNOPqQqQqQqQ
OPENAI_API_KEY=CHANGE_ME
OPENAI_API_KEY=CHANGE_ME
# LOCAL
# OLLAMA_API_BASE_URL=http://host.docker.internal:11434 # Uncomment to activate ollama. This is the local url for the ollama api
@ -28,6 +28,7 @@ NEXT_PUBLIC_AUTH_MODES=password
SUPABASE_URL=http://host.docker.internal:54321
SUPABASE_SERVICE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImV4cCI6MTk4MzgxMjk5Nn0.EGIM96RAZx35lJzdJsyH-qQwv8Hdp7fsn3W0YpN81IU
PG_DATABASE_URL=postgresql://postgres:postgres@host.docker.internal:54322/postgres
PG_DATABASE_ASYNC_URL=postgresql+asyncpg://postgres:postgres@host.docker.internal:54322/postgres
ANTHROPIC_API_KEY=null
JWT_SECRET_KEY=super-secret-jwt-token-with-at-least-32-characters-long
AUTHENTICATE=true

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,24 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: check-added-large-files
- id: check-toml
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
# Check poetry state
- repo: https://github.com/python-poetry/poetry
rev: "1.5.1"
hooks:
- id: poetry-check
args: ["-C", "./backend"]
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.4.8
hooks:
# Run the linter.
- id: ruff
args: [--fix]
# Run the formatter.
- id: ruff-format

View File

@ -31,8 +31,6 @@ RUN apt-get clean && apt-get update && apt-get install -y \
pandoc && \
rm -rf /var/lib/apt/lists/*
# Add Rust binaries to the PATH
ENV PATH="/root/.cargo/bin:${PATH}"
RUN ARCHITECTURE=$(uname -m) && \
if [ "$ARCHITECTURE" = "x86_64" ]; then \
@ -46,19 +44,31 @@ RUN ARCHITECTURE=$(uname -m) && \
fi && \
rm -rf /var/lib/apt/lists/*
RUN curl -sSL https://install.python-poetry.org | POETRY_HOME=/opt/poetry python && \
cd /usr/local/bin && \
ln -s /opt/poetry/bin/poetry && \
poetry config virtualenvs.create false
# Add Rust binaries to the PATH
ENV PATH="/root/.cargo/bin:${PATH}" \
POETRY_CACHE_DIR=/tmp/poetry_cache \
PYTHONDONTWRITEBYTECODE=1
WORKDIR /code
# Copy just the requirements first
COPY ./requirements.txt .
# Copy pyproject and poetry
COPY ./pyproject.toml ./poetry.lock* /code/
# Upgrade pip and install dependencies
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir -r requirements.txt && \
playwright install --with-deps
# Run install
RUN poetry install --no-root && \
playwright install --with-deps && \
rm -rf $POETRY_CACHE_DIR
ENV PYTHONPATH=/code
# Copy the rest of the application
COPY . .
EXPOSE 5050
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "5050", "--workers", "6"]

View File

@ -10,11 +10,8 @@ RUN apt-get clean && apt-get update && apt-get install -y \
libcurl4-openssl-dev \
libssl-dev \
binutils \
pandoc \
curl \
git \
poppler-utils \
tesseract-ocr \
autoconf \
automake \
build-essential \
@ -31,24 +28,29 @@ RUN apt-get clean && apt-get update && apt-get install -y \
pandoc && \
rm -rf /var/lib/apt/lists/* && apt-get clean
# TODO(@aminediro) : multistage build. Probably dont neet poetry once its built
# Install Poetry
RUN curl -sSL https://install.python-poetry.org | POETRY_HOME=/opt/poetry python && \
cd /usr/local/bin && \
ln -s /opt/poetry/bin/poetry && \
poetry config virtualenvs.create false
# Add Rust binaries to the PATH
ENV PATH="/root/.cargo/bin:${PATH}"
# Copy just the requirements first
COPY ./requirements.txt .
# Upgrade pip
RUN pip install --upgrade pip
# Increase timeout to wait for the new installation
RUN pip install --no-cache-dir -r requirements.txt --timeout 200 && \
playwright install --with-deps
ENV PATH="/root/.cargo/bin:${PATH}" \
POETRY_CACHE_DIR=/tmp/poetry_cache \
PYTHONDONTWRITEBYTECODE=1
WORKDIR /code
# Copy the rest of the application
COPY . .
# Copy pyproject and poetry
COPY ./pyproject.toml ./poetry.lock* /code/
# Run install
RUN poetry install --no-root && \
playwright install --with-deps && \
rm -rf $POETRY_CACHE_DIR
ENV PYTHONPATH=/code
EXPOSE 5050
CMD ["uvicorn", "main:app","--reload", "--host", "0.0.0.0", "--port", "5050", "--workers", "6"]

1
backend/README.md Normal file
View File

@ -0,0 +1 @@
# Quivr backend

View File

@ -40,5 +40,4 @@ elif CELERY_BROKER_URL.startswith("redis"):
else:
raise ValueError(f"Unsupported broker URL: {CELERY_BROKER_URL}")
celery.autodiscover_tasks(["modules.sync", "modules", "middlewares", "packages"])
celery.autodiscover_tasks(["backend.modules.sync.tasks"])

View File

@ -4,24 +4,27 @@ from tempfile import NamedTemporaryFile
from uuid import UUID
from celery.schedules import crontab
from celery_config import celery
from logger import get_logger
from middlewares.auth.auth_bearer import AuthBearer
from models.files import File
from models.settings import get_supabase_client, get_supabase_db
from modules.brain.integrations.Notion.Notion_connector import NotionConnector
from modules.brain.service.brain_service import BrainService
from modules.brain.service.brain_vector_service import BrainVectorService
from modules.notification.dto.inputs import NotificationUpdatableProperties
from modules.notification.entity.notification import NotificationsStatusEnum
from modules.notification.service.notification_service import NotificationService
from modules.onboarding.service.onboarding_service import OnboardingService
from packages.files.crawl.crawler import CrawlWebsite, slugify
from packages.files.parsers.github import process_github
from packages.files.processors import filter_file
from packages.utils.telemetry import maybe_send_telemetry
from pytz import timezone
from backend.celery_config import celery
from backend.logger import get_logger
from backend.middlewares.auth.auth_bearer import AuthBearer
from backend.models.files import File
from backend.models.settings import get_supabase_client, get_supabase_db
from backend.modules.brain.integrations.Notion.Notion_connector import NotionConnector
from backend.modules.brain.service.brain_service import BrainService
from backend.modules.brain.service.brain_vector_service import BrainVectorService
from backend.modules.notification.dto.inputs import NotificationUpdatableProperties
from backend.modules.notification.entity.notification import NotificationsStatusEnum
from backend.modules.notification.service.notification_service import (
NotificationService,
)
from backend.modules.onboarding.service.onboarding_service import OnboardingService
from backend.packages.files.crawl.crawler import CrawlWebsite, slugify
from backend.packages.files.parsers.github import process_github
from backend.packages.files.processors import filter_file
from backend.packages.utils.telemetry import maybe_send_telemetry
logger = get_logger(__name__)
onboardingService = OnboardingService()
@ -64,7 +67,7 @@ def process_file_and_notify(
file_original_name, only_vectors=True
)
message = filter_file(
filter_file(
file=file_instance,
brain_id=brain_id,
original_file_name=file_original_name,
@ -102,7 +105,6 @@ def process_crawl_and_notify(
brain_id: UUID,
notification_id=None,
):
crawl_website = CrawlWebsite(url=crawl_website_url)
if not crawl_website.checkGithub():
@ -123,7 +125,7 @@ def process_crawl_and_notify(
file_size=len(extracted_content),
file_extension=".txt",
)
message = filter_file(
filter_file(
file=file_instance,
brain_id=brain_id,
original_file_name=crawl_website_url,
@ -136,7 +138,7 @@ def process_crawl_and_notify(
),
)
else:
message = process_github(
process_github(
repo=crawl_website.url,
brain_id=brain_id,
)

View File

@ -6,29 +6,30 @@ import sentry_sdk
from dotenv import load_dotenv # type: ignore
from fastapi import FastAPI, HTTPException, Request
from fastapi.responses import HTMLResponse, JSONResponse
from logger import get_logger
from middlewares.cors import add_cors_middleware
from modules.analytics.controller.analytics_routes import analytics_router
from modules.api_key.controller import api_key_router
from modules.assistant.controller import assistant_router
from modules.brain.controller import brain_router
from modules.chat.controller import chat_router
from modules.contact_support.controller import contact_router
from modules.knowledge.controller import knowledge_router
from modules.misc.controller import misc_router
from modules.onboarding.controller import onboarding_router
from modules.prompt.controller import prompt_router
from modules.sync.controller import sync_router
from modules.upload.controller import upload_router
from modules.user.controller import user_router
from packages.utils import handle_request_validation_error
from packages.utils.telemetry import maybe_send_telemetry
from pyinstrument import Profiler
from routes.crawl_routes import crawl_router
from routes.subscription_routes import subscription_router
from sentry_sdk.integrations.fastapi import FastApiIntegration
from sentry_sdk.integrations.starlette import StarletteIntegration
from backend.logger import get_logger
from backend.middlewares.cors import add_cors_middleware
from backend.modules.analytics.controller.analytics_routes import analytics_router
from backend.modules.api_key.controller import api_key_router
from backend.modules.assistant.controller import assistant_router
from backend.modules.brain.controller import brain_router
from backend.modules.chat.controller import chat_router
from backend.modules.contact_support.controller import contact_router
from backend.modules.knowledge.controller import knowledge_router
from backend.modules.misc.controller import misc_router
from backend.modules.onboarding.controller import onboarding_router
from backend.modules.prompt.controller import prompt_router
from backend.modules.sync.controller import sync_router
from backend.modules.upload.controller import upload_router
from backend.modules.user.controller import user_router
from backend.packages.utils import handle_request_validation_error
from backend.packages.utils.telemetry import maybe_send_telemetry
from backend.routes.crawl_routes import crawl_router
from backend.routes.subscription_routes import subscription_router
load_dotenv()
# Set the logging level for all loggers to WARNING
@ -68,7 +69,6 @@ if sentry_dsn:
)
app = FastAPI()
add_cors_middleware(app)
app.include_router(brain_router)
@ -129,4 +129,4 @@ if __name__ == "__main__":
# run main.py to debug backend
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=5050, log_level="warning", access_log=False)
uvicorn.run(app, host="0.0.0.0", port=5050, log_level="debug", access_log=False)

View File

@ -3,9 +3,10 @@ from typing import Optional
from fastapi import Depends, HTTPException, Request
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from middlewares.auth.jwt_token_handler import decode_access_token, verify_token
from modules.api_key.service.api_key_service import ApiKeyService
from modules.user.entity.user_identity import UserIdentity
from backend.middlewares.auth.jwt_token_handler import decode_access_token, verify_token
from backend.modules.api_key.service.api_key_service import ApiKeyService
from backend.modules.user.entity.user_identity import UserIdentity
api_key_service = ApiKeyService()
@ -54,7 +55,7 @@ class AuthBearer(HTTPBearer):
def get_test_user(self) -> UserIdentity:
return UserIdentity(
email="test@example.com", id="XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX" # type: ignore
email="admin@quivr.app", id="39418e3b-0258-4452-af60-7acfcc1263ff" # type: ignore
) # replace with test user information

View File

@ -4,7 +4,8 @@ from typing import Optional
from jose import jwt
from jose.exceptions import JWTError
from modules.user.entity.user_identity import UserIdentity
from backend.modules.user.entity.user_identity import UserIdentity
SECRET_KEY = os.environ.get("JWT_SECRET_KEY")
ALGORITHM = "HS256"

View File

@ -1,7 +1,8 @@
from uuid import UUID
from logger import get_logger
from pydantic import ConfigDict, BaseModel
from pydantic import BaseModel, ConfigDict
from backend.logger import get_logger
logger = get_logger(__name__)

View File

@ -1,7 +1,7 @@
from pydantic import BaseModel
class LLMModels(BaseModel):
class LLMModel(BaseModel):
"""LLM models stored in the database that are allowed to be used by the users.
Args:
BaseModel (BaseModel): Pydantic BaseModel

View File

@ -2,7 +2,7 @@ from abc import ABC, abstractmethod
from datetime import datetime
from uuid import UUID
from .llm_models import LLMModels
from .llm_models import LLMModel
class Repository(ABC):
@ -15,7 +15,7 @@ class Repository(ABC):
pass
@abstractmethod
def get_model_settings(self) -> LLMModels | None:
def get_models(self) -> LLMModel | None:
pass
@abstractmethod

View File

@ -0,0 +1,6 @@
from backend.models.databases.supabase.brains_subscription_invitations import (
BrainSubscription,
)
from backend.models.databases.supabase.files import File
from backend.models.databases.supabase.user_usage import UserUsage
from backend.models.databases.supabase.vectors import Vector

View File

@ -1,6 +1,5 @@
from models.databases.repository import Repository
from logger import get_logger
from backend.logger import get_logger
from backend.models.databases.repository import Repository
logger = get_logger(__name__)

View File

@ -1,4 +1,4 @@
from models.databases.repository import Repository
from backend.models.databases.repository import Repository
class File(Repository):

View File

@ -1,6 +1,7 @@
from models.databases.supabase import BrainSubscription, File, UserUsage, Vector
from backend.models.databases.supabase import BrainSubscription, File, UserUsage, Vector
# TODO: REMOVE THIS CLASS !
class SupabaseDB(
UserUsage,
File,

View File

@ -1,8 +1,8 @@
from datetime import datetime, timedelta
from uuid import UUID
from logger import get_logger
from models.databases.repository import Repository
from backend.logger import get_logger
from backend.models.databases.repository import Repository
logger = get_logger(__name__)
@ -55,18 +55,15 @@ class UserUsage(Repository):
return user_settings
def get_model_settings(self):
"""
Fetch the user settings from the database
"""
def get_models(self):
model_settings_response = (self.db.from_("models").select("*").execute()).data
if len(model_settings_response) == 0:
raise ValueError("An issue occured while fetching the model settings")
return model_settings_response
def get_user_monthly(self, user_id):
pass
def get_user_usage(self, user_id):
"""
Fetch the user request stats from the database

View File

@ -1,4 +1,4 @@
from models.databases.repository import Repository
from backend.models.databases.repository import Repository
class Vector(Repository):

View File

@ -3,13 +3,14 @@ from typing import List, Optional
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_core.documents import Document
from logger import get_logger
from models.databases.supabase.supabase import SupabaseDB
from models.settings import get_supabase_db
from modules.brain.service.brain_vector_service import BrainVectorService
from packages.files.file import compute_sha1_from_content
from pydantic import BaseModel
from backend.logger import get_logger
from backend.models.databases.supabase.supabase import SupabaseDB
from backend.models.settings import get_supabase_db
from backend.modules.brain.service.brain_vector_service import BrainVectorService
from backend.packages.files.file import compute_sha1_from_content
logger = get_logger(__name__)

View File

@ -1,15 +1,18 @@
from typing import Optional
from uuid import UUID
from langchain.embeddings.base import Embeddings
from langchain_community.embeddings.ollama import OllamaEmbeddings
from langchain_community.vectorstores.supabase import SupabaseVectorStore
from langchain_openai import OpenAIEmbeddings
from logger import get_logger
from models.databases.supabase.supabase import SupabaseDB
from posthog import Posthog
from pydantic_settings import BaseSettings, SettingsConfigDict
from sqlalchemy import Engine, create_engine
from backend.logger import get_logger
from backend.models.databases.supabase.supabase import SupabaseDB
from supabase.client import Client, create_client
from langchain_community.vectorstores.supabase import SupabaseVectorStore
logger = get_logger(__name__)
@ -111,10 +114,11 @@ class BrainSettings(BaseSettings):
supabase_service_key: str = ""
resend_api_key: str = "null"
resend_email_address: str = "brain@mail.quivr.app"
ollama_api_base_url: str = None
langfuse_public_key: str = None
langfuse_secret_key: str = None
pg_database_url: str = None
ollama_api_base_url: str | None = None
langfuse_public_key: str | None = None
langfuse_secret_key: str | None = None
pg_database_url: str
pg_database_async_url: str
class ResendSettings(BaseSettings):
@ -126,22 +130,31 @@ class ResendSettings(BaseSettings):
_supabase_client: Optional[Client] = None
_supabase_db: Optional[SupabaseDB] = None
_db_engine: Optional[Engine] = None
_embedding_service = None
settings = BrainSettings()
def get_pg_database_engine():
global _db_engine
if _db_engine is None:
logger.info("Creating Postgres DB engine")
settings = BrainSettings() # pyright: ignore reportPrivateUsage=none
_db_engine = create_engine(settings.pg_database_url, pool_pre_ping=True)
return _db_engine
def get_pg_database_async_engine():
global _db_engine
if _db_engine is None:
logger.info("Creating Postgres DB engine")
_db_engine = create_engine(settings.pg_database_async_url, pool_pre_ping=True)
return _db_engine
def get_supabase_client() -> Client:
global _supabase_client
if _supabase_client is None:
logger.info("Creating Supabase client")
settings = BrainSettings() # pyright: ignore reportPrivateUsage=none
_supabase_client = create_client(
settings.supabase_url, settings.supabase_service_key
)
@ -156,8 +169,8 @@ def get_supabase_db() -> SupabaseDB:
return _supabase_db
def get_embeddings():
settings = BrainSettings() # pyright: ignore reportPrivateUsage=none
def get_embedding_client() -> Embeddings:
global _embedding_service
if settings.ollama_api_base_url:
embeddings = OllamaEmbeddings(
base_url=settings.ollama_api_base_url,
@ -168,8 +181,7 @@ def get_embeddings():
def get_documents_vector_store() -> SupabaseVectorStore:
settings = BrainSettings() # pyright: ignore reportPrivateUsage=none
embeddings = get_embeddings()
embeddings = get_embedding_client()
supabase_client: Client = get_supabase_client()
documents_vector_store = SupabaseVectorStore(
supabase_client, embeddings, table_name="vectors"

View File

@ -1,22 +1,25 @@
from uuid import UUID
from fastapi import APIRouter, Depends, Query
from middlewares.auth.auth_bearer import AuthBearer, get_current_user
from modules.analytics.entity.analytics import Range
from modules.analytics.service.analytics_service import AnalyticsService
from backend.middlewares.auth.auth_bearer import AuthBearer, get_current_user
from backend.modules.analytics.entity.analytics import Range
from backend.modules.analytics.service.analytics_service import AnalyticsService
analytics_service = AnalyticsService()
analytics_router = APIRouter()
@analytics_router.get(
"/analytics/brains-usages", dependencies=[Depends(AuthBearer())], tags=["Analytics"]
)
async def get_brains_usages(
user: UUID = Depends(get_current_user),
brain_id: UUID = Query(None),
graph_range: Range = Query(Range.WEEK, alias="graph_range")
graph_range: Range = Query(Range.WEEK, alias="graph_range"),
):
"""
Get all user brains usages
"""
return analytics_service.get_brains_usages(user.id, graph_range, brain_id)
return analytics_service.get_brains_usages(user.id, graph_range, brain_id)

View File

@ -3,9 +3,9 @@ from datetime import datetime, timedelta
from typing import Optional
from uuid import UUID
from models.settings import get_supabase_client
from modules.analytics.entity.analytics import BrainsUsages, Range, Usage
from modules.brain.service.brain_user_service import BrainUserService
from backend.models.settings import get_supabase_client
from backend.modules.analytics.entity.analytics import BrainsUsages, Range, Usage
from backend.modules.brain.service.brain_user_service import BrainUserService
brain_user_service = BrainUserService()

View File

@ -1,15 +1,22 @@
from abc import ABC, abstractmethod
from typing import Optional
from uuid import UUID
from modules.analytics.entity.analytics import BrainsUsages, Range
from backend.modules.analytics.entity.analytics import BrainsUsages, Range
class AnalyticsInterface(ABC):
@abstractmethod
def get_brains_usages(self, user_id: UUID, graph_range: Range = Range.WEEK, brain_id: Optional[UUID] = None) -> BrainsUsages:
def get_brains_usages(
self,
user_id: UUID,
graph_range: Range = Range.WEEK,
brain_id: Optional[UUID] = None,
) -> BrainsUsages:
"""
Get user brains usage
Args:
user_id (UUID): The id of the user
brain_id (Optional[UUID]): The id of the brain, optional
"""
pass
pass

View File

@ -0,0 +1,13 @@
from backend.modules.analytics.repository.analytics import Analytics
from backend.modules.analytics.repository.analytics_interface import AnalyticsInterface
class AnalyticsService:
repository: AnalyticsInterface
def __init__(self):
self.repository = Analytics()
def get_brains_usages(self, user_id, graph_range, brain_id=None):
return self.repository.get_brains_usages(user_id, graph_range, brain_id)

View File

@ -3,12 +3,13 @@ from typing import List
from uuid import uuid4
from fastapi import APIRouter, Depends
from logger import get_logger
from middlewares.auth import AuthBearer, get_current_user
from modules.api_key.dto.outputs import ApiKeyInfo
from modules.api_key.entity.api_key import ApiKey
from modules.api_key.repository.api_keys import ApiKeys
from modules.user.entity.user_identity import UserIdentity
from backend.logger import get_logger
from backend.middlewares.auth import AuthBearer, get_current_user
from backend.modules.api_key.dto.outputs import ApiKeyInfo
from backend.modules.api_key.entity.api_key import ApiKey
from backend.modules.api_key.repository.api_keys import ApiKeys
from backend.modules.user.entity.user_identity import UserIdentity
logger = get_logger(__name__)

View File

@ -2,7 +2,7 @@ from abc import ABC, abstractmethod
from typing import List
from uuid import UUID
from modules.api_key.entity.api_key import ApiKey
from backend.modules.api_key.entity.api_key import ApiKey
class ApiKeysInterface(ABC):

View File

@ -2,9 +2,9 @@ from datetime import datetime
from typing import Optional
from uuid import UUID
from models.settings import get_supabase_client
from modules.api_key.entity.api_key import ApiKey
from modules.api_key.repository.api_key_interface import ApiKeysInterface
from backend.models.settings import get_supabase_client
from backend.modules.api_key.entity.api_key import ApiKey
from backend.modules.api_key.repository.api_key_interface import ApiKeysInterface
class ApiKeys(ApiKeysInterface):

View File

@ -1,11 +1,12 @@
from datetime import datetime
from fastapi import HTTPException
from logger import get_logger
from modules.api_key.repository.api_key_interface import ApiKeysInterface
from modules.api_key.repository.api_keys import ApiKeys
from modules.user.entity.user_identity import UserIdentity
from modules.user.service.user_service import UserService
from backend.logger import get_logger
from backend.modules.api_key.repository.api_key_interface import ApiKeysInterface
from backend.modules.api_key.repository.api_keys import ApiKeys
from backend.modules.user.entity.user_identity import UserIdentity
from backend.modules.user.service.user_service import UserService
logger = get_logger(__name__)

View File

@ -1,14 +1,15 @@
from typing import List
from fastapi import APIRouter, Depends, HTTPException, UploadFile
from logger import get_logger
from middlewares.auth import AuthBearer, get_current_user
from modules.assistant.dto.inputs import InputAssistant
from modules.assistant.dto.outputs import AssistantOutput
from modules.assistant.ito.difference import DifferenceAssistant
from modules.assistant.ito.summary import SummaryAssistant, summary_inputs
from modules.assistant.service.assistant import Assistant
from modules.user.entity.user_identity import UserIdentity
from backend.logger import get_logger
from backend.middlewares.auth import AuthBearer, get_current_user
from backend.modules.assistant.dto.inputs import InputAssistant
from backend.modules.assistant.dto.outputs import AssistantOutput
from backend.modules.assistant.ito.difference import DifferenceAssistant
from backend.modules.assistant.ito.summary import SummaryAssistant, summary_inputs
from backend.modules.assistant.service.assistant import Assistant
from backend.modules.user.entity.user_identity import UserIdentity
assistant_router = APIRouter()
logger = get_logger(__name__)

View File

@ -1,8 +1,10 @@
import os
from tempfile import NamedTemporaryFile
from logger import get_logger
from modules.assistant.dto.outputs import (
from openai import OpenAI
from backend.logger import get_logger
from backend.modules.assistant.dto.outputs import (
AssistantOutput,
InputFile,
Inputs,
@ -10,8 +12,7 @@ from modules.assistant.dto.outputs import (
OutputEmail,
Outputs,
)
from modules.assistant.ito.ito import ITO
from openai import OpenAI
from backend.modules.assistant.ito.ito import ITO
logger = get_logger(__name__)

View File

@ -1,7 +1,8 @@
from bs4 import BeautifulSoup as Soup
from langchain_community.document_loaders.recursive_url_loader import RecursiveUrlLoader
from logger import get_logger
from modules.assistant.dto.outputs import (
from backend.logger import get_logger
from backend.modules.assistant.dto.outputs import (
AssistantOutput,
Inputs,
InputUrl,
@ -9,7 +10,7 @@ from modules.assistant.dto.outputs import (
OutputEmail,
Outputs,
)
from modules.assistant.ito.ito import ITO
from backend.modules.assistant.ito.ito import ITO
logger = get_logger(__name__)

View File

@ -11,9 +11,10 @@ from langchain_community.chat_models import ChatLiteLLM
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, PromptTemplate
from llama_parse import LlamaParse
from logger import get_logger
from modules.assistant.dto.inputs import InputAssistant
from modules.assistant.dto.outputs import (
from backend.logger import get_logger
from backend.modules.assistant.dto.inputs import InputAssistant
from backend.modules.assistant.dto.outputs import (
AssistantOutput,
InputFile,
Inputs,
@ -21,8 +22,8 @@ from modules.assistant.dto.outputs import (
OutputEmail,
Outputs,
)
from modules.assistant.ito.ito import ITO
from modules.user.entity.user_identity import UserIdentity
from backend.modules.assistant.ito.ito import ITO
from backend.modules.user.entity.user_identity import UserIdentity
if not isinstance(asyncio.get_event_loop(), uvloop.Loop):
nest_asyncio.apply()

View File

@ -8,18 +8,19 @@ from tempfile import NamedTemporaryFile
from typing import List, Optional
from fastapi import UploadFile
from logger import get_logger
from modules.assistant.dto.inputs import InputAssistant
from modules.assistant.ito.utils.pdf_generator import PDFGenerator, PDFModel
from modules.chat.controller.chat.utils import update_user_usage
from modules.contact_support.controller.settings import ContactsSettings
from modules.upload.controller.upload_routes import upload_file
from modules.user.entity.user_identity import UserIdentity
from modules.user.service.user_usage import UserUsage
from packages.emails.send_email import send_email
from pydantic import BaseModel
from unidecode import unidecode
from backend.logger import get_logger
from backend.modules.assistant.dto.inputs import InputAssistant
from backend.modules.assistant.ito.utils.pdf_generator import PDFGenerator, PDFModel
from backend.modules.chat.controller.chat.utils import update_user_usage
from backend.modules.contact_support.controller.settings import ContactsSettings
from backend.modules.upload.controller.upload_routes import upload_file
from backend.modules.user.entity.user_identity import UserIdentity
from backend.modules.user.service.user_usage import UserUsage
from backend.packages.emails.send_email import send_email
logger = get_logger(__name__)

View File

@ -12,9 +12,10 @@ from langchain_community.chat_models import ChatLiteLLM
from langchain_community.document_loaders import UnstructuredPDFLoader
from langchain_core.prompts import PromptTemplate
from langchain_text_splitters import CharacterTextSplitter
from logger import get_logger
from modules.assistant.dto.inputs import InputAssistant
from modules.assistant.dto.outputs import (
from backend.logger import get_logger
from backend.modules.assistant.dto.inputs import InputAssistant
from backend.modules.assistant.dto.outputs import (
AssistantOutput,
InputFile,
Inputs,
@ -22,8 +23,8 @@ from modules.assistant.dto.outputs import (
OutputEmail,
Outputs,
)
from modules.assistant.ito.ito import ITO
from modules.user.entity.user_identity import UserIdentity
from backend.modules.assistant.ito.ito import ITO
from backend.modules.user.entity.user_identity import UserIdentity
logger = get_logger(__name__)

View File

Before

Width:  |  Height:  |  Size: 23 KiB

After

Width:  |  Height:  |  Size: 23 KiB

View File

@ -1,7 +1,7 @@
from abc import ABC, abstractmethod
from typing import List
from modules.assistant.entity.assistant import AssistantEntity
from backend.modules.assistant.entity.assistant import AssistantEntity
class AssistantInterface(ABC):

View File

@ -1,6 +1,6 @@
from models.settings import get_supabase_client
from modules.assistant.entity.assistant import AssistantEntity
from modules.assistant.repository.assistant_interface import AssistantInterface
from backend.models.settings import get_supabase_client
from backend.modules.assistant.entity.assistant import AssistantEntity
from backend.modules.assistant.repository.assistant_interface import AssistantInterface
class Assistant(AssistantInterface):

View File

@ -0,0 +1,113 @@
from typing import Any, Generic, Sequence, TypeVar
from uuid import UUID
from fastapi import HTTPException
from pydantic import BaseModel
from sqlalchemy import exc
from sqlmodel import SQLModel, col, select
from sqlmodel.ext.asyncio.session import AsyncSession
from backend.modules.base_uuid_entity import BaseUUIDModel
ModelType = TypeVar("ModelType", bound=BaseUUIDModel)
CreateSchema = TypeVar("CreateSchema", bound=BaseModel)
UpdateSchema = TypeVar("UpdateSchema", bound=BaseModel)
T = TypeVar("T", bound=SQLModel)
class BaseCRUDRepository(Generic[ModelType, CreateSchema, UpdateSchema]):
def __init__(self, model: type[ModelType], session: AsyncSession):
"""
Base repository for default CRUD operations
"""
self.model = model
self.session = session
def get_db(self) -> AsyncSession:
return self.session
async def get_by_id(
self, *, id: UUID, db_session: AsyncSession
) -> ModelType | None:
query = select(self.model).where(self.model.id == id)
response = await db_session.exec(query)
return response.one()
async def get_by_ids(
self,
*,
list_ids: list[UUID],
db_session: AsyncSession | None = None,
) -> Sequence[ModelType] | None:
db_session = db_session or self.session
response = await db_session.exec(
select(self.model).where(col(self.model.id).in_(list_ids))
)
return response.all()
async def get_multi(
self,
*,
skip: int = 0,
limit: int = 100,
db_session: AsyncSession | None = None,
) -> Sequence[ModelType]:
db_session = db_session or self.session
query = select(self.model).offset(skip).limit(limit)
response = await db_session.exec(query)
return response.all()
async def create(
self,
*,
entity: CreateSchema | ModelType,
db_session: AsyncSession | None = None,
) -> ModelType:
db_session = db_session or self.session
db_obj = self.model.model_validate(entity) # type: ignore
try:
db_session.add(db_obj)
await db_session.commit()
except exc.IntegrityError:
await db_session.rollback()
# TODO(@aminediro) : for now, build an exception system
raise HTTPException(
status_code=409,
detail="Resource already exists",
)
await db_session.refresh(db_obj)
return db_obj
async def update(
self,
*,
obj_current: ModelType,
obj_new: UpdateSchema | dict[str, Any] | ModelType,
db_session: AsyncSession | None = None,
) -> ModelType:
db_session = db_session or self.session
if isinstance(obj_new, dict):
update_data = obj_new
else:
update_data = obj_new.dict(
exclude_unset=True
) # This tells Pydantic to not include the values that were not sent
for field in update_data:
setattr(obj_current, field, update_data[field])
db_session.add(obj_current)
await db_session.commit()
await db_session.refresh(obj_current)
return obj_current
async def remove(
self, *, id: UUID | str, db_session: AsyncSession | None = None
) -> ModelType:
db_session = db_session or self.session
response = await db_session.exec(select(self.model).where(self.model.id == id))
obj = response.one()
await db_session.delete(obj)
await db_session.commit()
return obj

View File

@ -0,0 +1,11 @@
from uuid import UUID
from sqlmodel import Field, SQLModel
class BaseUUIDModel(SQLModel, table=True):
id: UUID = Field(
primary_key=True,
index=True,
nullable=False,
)

View File

@ -6,21 +6,23 @@ import jq
import requests
from fastapi import HTTPException
from litellm import completion
from modules.brain.service.call_brain_api import call_brain_api
from modules.brain.service.get_api_brain_definition_as_json_schema import (
from backend.logger import get_logger
from backend.modules.brain.knowledge_brain_qa import KnowledgeBrainQA
from backend.modules.brain.qa_interface import QAInterface
from backend.modules.brain.service.brain_service import BrainService
from backend.modules.brain.service.call_brain_api import call_brain_api
from backend.modules.brain.service.get_api_brain_definition_as_json_schema import (
get_api_brain_definition_as_json_schema,
)
from logger import get_logger
from modules.brain.knowledge_brain_qa import KnowledgeBrainQA
from modules.brain.qa_interface import QAInterface
from modules.brain.service.brain_service import BrainService
from modules.chat.dto.chats import ChatQuestion
from modules.chat.dto.inputs import CreateChatHistory
from modules.chat.dto.outputs import GetChatHistoryOutput
from modules.chat.service.chat_service import ChatService
from backend.modules.chat.dto.chats import ChatQuestion
from backend.modules.chat.dto.inputs import CreateChatHistory
from backend.modules.chat.dto.outputs import GetChatHistoryOutput
from backend.modules.chat.service.chat_service import ChatService
from backend.modules.dependencies import get_service
brain_service = BrainService()
chat_service = ChatService()
chat_service = get_service(ChatService)()
logger = get_logger(__name__)
@ -275,9 +277,9 @@ class APIBrainQA(KnowledgeBrainQA, QAInterface):
"message_time": streamed_chat_history.message_time,
"user_message": question.question,
"assistant": "",
"prompt_title": self.prompt_to_use.title
if self.prompt_to_use
else None,
"prompt_title": (
self.prompt_to_use.title if self.prompt_to_use else None
),
"brain_name": brain.name if brain else None,
"brain_id": str(self.brain_id),
"metadata": self.metadata,
@ -291,9 +293,9 @@ class APIBrainQA(KnowledgeBrainQA, QAInterface):
"message_time": None,
"user_message": question.question,
"assistant": "",
"prompt_title": self.prompt_to_use.title
if self.prompt_to_use
else None,
"prompt_title": (
self.prompt_to_use.title if self.prompt_to_use else None
),
"brain_name": brain.name if brain else None,
"brain_id": str(self.brain_id),
"metadata": self.metadata,
@ -474,9 +476,9 @@ class APIBrainQA(KnowledgeBrainQA, QAInterface):
"user_message": question.question,
"assistant": answer,
"message_time": new_chat.message_time,
"prompt_title": self.prompt_to_use.title
if self.prompt_to_use
else None,
"prompt_title": (
self.prompt_to_use.title if self.prompt_to_use else None
),
"brain_name": brain.name if brain else None,
"message_id": new_chat.message_id,
"metadata": self.metadata,

View File

@ -4,24 +4,26 @@ from uuid import UUID
from fastapi import HTTPException
from litellm import completion
from logger import get_logger
from modules.brain.api_brain_qa import APIBrainQA
from modules.brain.entity.brain_entity import BrainEntity, BrainType
from modules.brain.knowledge_brain_qa import KnowledgeBrainQA
from modules.brain.qa_headless import HeadlessQA
from modules.brain.service.brain_service import BrainService
from modules.chat.dto.chats import ChatQuestion
from modules.chat.dto.inputs import CreateChatHistory
from modules.chat.dto.outputs import (
from backend.logger import get_logger
from backend.modules.brain.api_brain_qa import APIBrainQA
from backend.modules.brain.entity.brain_entity import BrainEntity, BrainType
from backend.modules.brain.knowledge_brain_qa import KnowledgeBrainQA
from backend.modules.brain.qa_headless import HeadlessQA
from backend.modules.brain.service.brain_service import BrainService
from backend.modules.chat.dto.chats import ChatQuestion
from backend.modules.chat.dto.inputs import CreateChatHistory
from backend.modules.chat.dto.outputs import (
BrainCompletionOutput,
CompletionMessage,
CompletionResponse,
GetChatHistoryOutput,
)
from modules.chat.service.chat_service import ChatService
from backend.modules.chat.service.chat_service import ChatService
from backend.modules.dependencies import get_service
brain_service = BrainService()
chat_service = ChatService()
chat_service = get_service(ChatService)()
logger = get_logger(__name__)
@ -75,9 +77,9 @@ class CompositeBrainQA(
self.user_id = user_id
def get_answer_generator_from_brain_type(self, brain: BrainEntity):
if brain.brain_type == BrainType.COMPOSITE:
if brain.brain_type == BrainType.composite:
return self.generate_answer
elif brain.brain_type == BrainType.API:
elif brain.brain_type == BrainType.api:
return APIBrainQA(
brain_id=str(brain.id),
chat_id=self.chat_id,
@ -90,7 +92,7 @@ class CompositeBrainQA(
raw=brain.raw,
jq_instructions=brain.jq_instructions,
).generate_answer
elif brain.brain_type == BrainType.DOC:
elif brain.brain_type == BrainType.doc:
return KnowledgeBrainQA(
brain_id=str(brain.id),
chat_id=self.chat_id,

View File

@ -2,28 +2,31 @@ from typing import Dict
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request
from logger import get_logger
from middlewares.auth.auth_bearer import AuthBearer, get_current_user
from modules.brain.dto.inputs import (
from backend.logger import get_logger
from backend.middlewares.auth.auth_bearer import AuthBearer, get_current_user
from backend.modules.brain.dto.inputs import (
BrainQuestionRequest,
BrainUpdatableProperties,
CreateBrainProperties,
)
from modules.brain.entity.brain_entity import PublicBrain, RoleEnum
from modules.brain.entity.integration_brain import IntegrationDescriptionEntity
from modules.brain.service.brain_authorization_service import has_brain_authorization
from modules.brain.service.brain_service import BrainService
from modules.brain.service.brain_user_service import BrainUserService
from modules.brain.service.get_question_context_from_brain import (
from backend.modules.brain.entity.brain_entity import PublicBrain, RoleEnum
from backend.modules.brain.entity.integration_brain import IntegrationDescriptionEntity
from backend.modules.brain.service.brain_authorization_service import (
has_brain_authorization,
)
from backend.modules.brain.service.brain_service import BrainService
from backend.modules.brain.service.brain_user_service import BrainUserService
from backend.modules.brain.service.get_question_context_from_brain import (
get_question_context_from_brain,
)
from modules.brain.service.integration_brain_service import (
from backend.modules.brain.service.integration_brain_service import (
IntegrationBrainDescriptionService,
)
from modules.prompt.service.prompt_service import PromptService
from modules.user.entity.user_identity import UserIdentity
from modules.user.service.user_usage import UserUsage
from packages.utils.telemetry import maybe_send_telemetry
from backend.modules.prompt.service.prompt_service import PromptService
from backend.modules.user.entity.user_identity import UserIdentity
from backend.modules.user.service.user_usage import UserUsage
from backend.packages.utils.telemetry import maybe_send_telemetry
logger = get_logger(__name__)
brain_router = APIRouter()

View File

@ -1,16 +1,17 @@
from typing import Optional
from uuid import UUID
from logger import get_logger
from modules.brain.entity.api_brain_definition_entity import (
from pydantic import BaseModel
from backend.logger import get_logger
from backend.modules.brain.entity.api_brain_definition_entity import (
ApiBrainAllowedMethods,
ApiBrainDefinitionEntity,
ApiBrainDefinitionSchema,
ApiBrainDefinitionSecret,
)
from modules.brain.entity.brain_entity import BrainType
from modules.brain.entity.integration_brain import IntegrationType
from pydantic import BaseModel, Extra
from backend.modules.brain.entity.brain_entity import BrainType
from backend.modules.brain.entity.integration_brain import IntegrationType
logger = get_logger(__name__)
@ -51,7 +52,7 @@ class CreateBrainProperties(BaseModel, extra="ignore"):
temperature: Optional[float] = 0.0
max_tokens: Optional[int] = 2000
prompt_id: Optional[UUID] = None
brain_type: Optional[BrainType] = BrainType.DOC
brain_type: Optional[BrainType] = BrainType.doc
brain_definition: Optional[CreateApiBrainDefinition] = None
brain_secrets_values: Optional[dict] = {}
connected_brains_ids: Optional[list[UUID]] = []
@ -65,7 +66,6 @@ class CreateBrainProperties(BaseModel, extra="ignore"):
class BrainUpdatableProperties(BaseModel, extra="ignore"):
name: Optional[str] = None
description: Optional[str] = None
temperature: Optional[float] = None

View File

@ -0,0 +1,135 @@
from datetime import datetime
from enum import Enum
from typing import List, Optional
from uuid import UUID
from pydantic import BaseModel
# from sqlmodel import Enum as PGEnum
from sqlalchemy.dialects.postgresql import ENUM as PGEnum
from sqlalchemy.ext.asyncio import AsyncAttrs
from sqlmodel import TIMESTAMP, Column, Field, Relationship, SQLModel, text
from sqlmodel import UUID as PGUUID
from backend.modules.brain.entity.api_brain_definition_entity import (
ApiBrainDefinitionEntity,
)
from backend.modules.brain.entity.integration_brain import (
IntegrationDescriptionEntity,
IntegrationEntity,
)
from backend.modules.prompt.entity.prompt import Prompt
class BrainType(str, Enum):
doc = "doc"
api = "api"
composite = "composite"
integration = "integration"
class Brain(AsyncAttrs, SQLModel, table=True):
__tablename__ = "brains" # type: ignore
brain_id: UUID | None = Field(
default=None,
sa_column=Column(
PGUUID,
server_default=text("uuid_generate_v4()"),
primary_key=True,
),
)
name: str
description: str
status: str | None = None
model: str | None = None
max_tokens: int | None = None
temperature: float | None = None
last_update: datetime | None = Field(
default=None,
sa_column=Column(
TIMESTAMP(timezone=False),
server_default=text("CURRENT_TIMESTAMP"),
),
)
brain_type: BrainType | None = Field(
sa_column=Column(
PGEnum(BrainType, name="brain_type_enum", create_type=False),
default=BrainType.integration,
),
)
brain_chat_history: List["ChatHistory"] = Relationship( # noqa: F821
back_populates="brain", sa_relationship_kwargs={"lazy": "select"}
)
prompt_id: UUID | None = Field(default=None, foreign_key="prompts.id")
prompt: Prompt | None = Relationship( # noqa: f821
back_populates="brain", sa_relationship_kwargs={"lazy": "joined"}
)
# TODO : add
# "meaning" "public"."vector",
# "tags" "public"."tags"[]
class BrainEntity(BaseModel):
brain_id: UUID
name: str
description: Optional[str] = None
temperature: Optional[float] = None
model: Optional[str] = None
max_tokens: Optional[int] = None
status: Optional[str] = None
prompt_id: Optional[UUID] = None
last_update: datetime
brain_type: BrainType
brain_definition: Optional[ApiBrainDefinitionEntity] = None
connected_brains_ids: Optional[List[UUID]] = None
raw: Optional[bool] = None
jq_instructions: Optional[str] = None
integration: Optional[IntegrationEntity] = None
integration_description: Optional[IntegrationDescriptionEntity] = None
@property
def id(self) -> UUID:
return self.brain_id
def dict(self, **kwargs):
data = super().dict(
**kwargs,
)
data["id"] = self.id
return data
class PublicBrain(BaseModel):
id: UUID
name: str
description: Optional[str] = None
number_of_subscribers: int = 0
last_update: str
brain_type: BrainType
brain_definition: Optional[ApiBrainDefinitionEntity] = None
class RoleEnum(str, Enum):
Viewer = "Viewer"
Editor = "Editor"
Owner = "Owner"
class BrainUser(BaseModel):
id: UUID
user_id: UUID
rights: RoleEnum
default_brain: bool = False
class MinimalUserBrainEntity(BaseModel):
id: UUID
name: str
rights: RoleEnum
status: str
brain_type: BrainType
description: str
integration_logo_url: str
max_files: int

View File

@ -11,9 +11,10 @@ from langchain_core.prompts.chat import (
SystemMessagePromptTemplate,
)
from langchain_core.prompts.prompt import PromptTemplate
from logger import get_logger
from modules.brain.knowledge_brain_qa import KnowledgeBrainQA
from modules.chat.dto.chats import ChatQuestion
from backend.logger import get_logger
from backend.modules.brain.knowledge_brain_qa import KnowledgeBrainQA
from backend.modules.chat.dto.chats import ChatQuestion
logger = get_logger(__name__)

View File

@ -4,8 +4,9 @@ from uuid import UUID
from langchain_community.chat_models import ChatLiteLLM
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from modules.brain.knowledge_brain_qa import KnowledgeBrainQA
from modules.chat.dto.chats import ChatQuestion
from backend.modules.brain.knowledge_brain_qa import KnowledgeBrainQA
from backend.modules.chat.dto.chats import ChatQuestion
class ClaudeBrain(KnowledgeBrainQA):
@ -16,6 +17,7 @@ class ClaudeBrain(KnowledgeBrainQA):
Attributes:
**kwargs: Arbitrary keyword arguments for KnowledgeBrainQA initialization.
"""
def __init__(
self,
**kwargs,

View File

@ -10,12 +10,14 @@ from langchain_core.tools import BaseTool
from langchain_openai import ChatOpenAI
from langgraph.graph import END, StateGraph
from langgraph.prebuilt import ToolExecutor, ToolInvocation
from logger import get_logger
from modules.brain.knowledge_brain_qa import KnowledgeBrainQA
from modules.chat.dto.chats import ChatQuestion
from modules.chat.dto.outputs import GetChatHistoryOutput
from modules.chat.service.chat_service import ChatService
from modules.tools import (
from backend.logger import get_logger
from backend.modules.brain.knowledge_brain_qa import KnowledgeBrainQA
from backend.modules.chat.dto.chats import ChatQuestion
from backend.modules.chat.dto.outputs import GetChatHistoryOutput
from backend.modules.chat.service.chat_service import ChatService
from backend.modules.dependencies import get_service
from backend.modules.tools import (
EmailSenderTool,
ImageGeneratorTool,
URLReaderTool,
@ -29,7 +31,7 @@ class AgentState(TypedDict):
logger = get_logger(__name__)
chat_service = ChatService()
chat_service = get_service(ChatService)()
class GPT4Brain(KnowledgeBrainQA):

Some files were not shown because too many files have changed in this diff Show More