refactor: reorg files in backend (#2449)

# Description

Please include a summary of the changes and the related issue. Please
also include relevant motivation and context.

## Checklist before requesting a review

Please delete options that are not relevant.

- [x] My code follows the style guidelines of this project
- [x] I have performed a self-review of my code
- [ ] I have commented hard-to-understand areas
- [ ] I have ideally added tests that prove my fix is effective or that
my feature works
- [ ] New and existing unit tests pass locally with my changes
- [ ] Any dependent changes have been merged

## Screenshots (if appropriate):


<!--
ELLIPSIS_HIDDEN
-->
----

| 🚀 This description was created by
[Ellipsis](https://www.ellipsis.dev) for commit
997576d577 |
|--------|

### Summary:
This PR involves a significant refactoring of the codebase, with file
and class relocations and renames, and updated import paths, without
introducing new functionality.

**Key points**:
- Significant refactoring of the codebase for improved organization and
clarity.
- File and class relocations and renames.
- Updated import paths to reflect new locations and names.
- No new functionality introduced.


----
Generated with ❤️ by [ellipsis.dev](https://www.ellipsis.dev)

<!--
ELLIPSIS_HIDDEN
-->
This commit is contained in:
Thoonsen Maxime 2024-04-19 22:25:03 -10:00 committed by GitHub
parent 1d7b31949d
commit dc6bd53519
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
44 changed files with 38 additions and 53 deletions

View File

@ -1 +0,0 @@

View File

@ -1,9 +0,0 @@
from langchain.prompts.prompt import PromptTemplate
_template = """Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language. include the follow up instructions in the standalone question.
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question:"""
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)

View File

@ -3,4 +3,3 @@ from .files import File
from .settings import (BrainRateLimiting, BrainSettings, ResendSettings,
get_documents_vector_store, get_embeddings,
get_supabase_client, get_supabase_db)
from .user_usage import UserUsage

View File

@ -2,7 +2,7 @@ from abc import ABC, abstractmethod
from datetime import datetime
from uuid import UUID
from .entity import LLMModels
from .llm_models import LLMModels
class Repository(ABC):

View File

@ -7,6 +7,7 @@ from models.databases.repository import Repository
logger = get_logger(__name__)
#TODO: change the name of this class because another one already exists
class UserUsage(Repository):
def __init__(self, supabase_client):
self.db = supabase_client

View File

@ -9,7 +9,7 @@ from typing import List, Optional
from fastapi import UploadFile
from logger import get_logger
from models.user_usage import UserUsage
from modules.user.service.user_usage import UserUsage
from modules.assistant.dto.inputs import InputAssistant
from modules.assistant.ito.utils.pdf_generator import PDFGenerator, PDFModel
from modules.chat.controller.chat.utils import update_user_usage

View File

@ -6,8 +6,8 @@ import jq
import requests
from fastapi import HTTPException
from litellm import completion
from llm.utils.call_brain_api import call_brain_api
from llm.utils.get_api_brain_definition_as_json_schema import (
from utils.call_brain_api import call_brain_api
from utils.get_api_brain_definition_as_json_schema import (
get_api_brain_definition_as_json_schema,
)
from logger import get_logger

View File

@ -4,7 +4,6 @@ from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException
from logger import get_logger
from middlewares.auth.auth_bearer import AuthBearer, get_current_user
from models import UserUsage
from modules.brain.dto.inputs import (
BrainQuestionRequest,
BrainUpdatableProperties,
@ -15,13 +14,14 @@ from modules.brain.entity.integration_brain import IntegrationDescriptionEntity
from modules.brain.service.brain_authorization_service import has_brain_authorization
from modules.brain.service.brain_service import BrainService
from modules.brain.service.brain_user_service import BrainUserService
from modules.brain.service.get_question_context_from_brain import get_question_context_from_brain
from modules.brain.service.integration_brain_service import (
IntegrationBrainDescriptionService,
)
from modules.prompt.service.prompt_service import PromptService
from modules.user.entity.user_identity import UserIdentity
from modules.user.service.user_usage import UserUsage
from packages.utils.telemetry import maybe_send_telemetry
from repository.brain import get_question_context_from_brain
logger = get_logger(__name__)
brain_router = APIRouter()

View File

@ -14,7 +14,7 @@ from modules.knowledge.dto.inputs import CreateKnowledgeProperties
from modules.knowledge.repository.knowledge_interface import KnowledgeInterface
from modules.knowledge.service.knowledge_service import KnowledgeService
from pydantic import BaseModel
from repository.files.upload_file import upload_file_storage
from modules.upload.service.upload_file import upload_file_storage
logger = get_logger(__name__)

View File

@ -3,12 +3,12 @@ from typing import AsyncIterable, List, Optional
from uuid import UUID
from langchain.callbacks.streaming_aiter import AsyncIteratorCallbackHandler
from llm.utils.format_chat_history import format_chat_history
from llm.utils.get_prompt_to_use import get_prompt_to_use
from llm.utils.get_prompt_to_use_id import get_prompt_to_use_id
from utils.format_chat_history import format_chat_history
from utils.get_prompt_to_use import get_prompt_to_use
from utils.get_prompt_to_use_id import get_prompt_to_use_id
from logger import get_logger
from models import BrainSettings
from models.user_usage import UserUsage
from modules.user.service.user_usage import UserUsage
from modules.brain.entity.brain_entity import BrainEntity
from modules.brain.qa_interface import QAInterface
from modules.brain.rags.quivr_rag import QuivrRAG
@ -24,7 +24,7 @@ from modules.chat.dto.outputs import GetChatHistoryOutput
from modules.chat.service.chat_service import ChatService
from pydantic import BaseModel, ConfigDict
from pydantic_settings import BaseSettings
from repository.files.generate_file_signed_url import generate_file_signed_url
from modules.upload.service.generate_file_signed_url import generate_file_signed_url
logger = get_logger(__name__)
QUIVR_DEFAULT_PROMPT = "Your name is Quivr. You're a helpful assistant. If you don't know the answer, just say that you don't know, don't try to make up an answer."

View File

@ -8,12 +8,12 @@ from langchain.chains import LLMChain
from langchain.chat_models.base import BaseChatModel
from langchain.prompts.chat import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatLiteLLM
from llm.utils.format_chat_history import (
from utils.format_chat_history import (
format_chat_history,
format_history_to_openai_mesages,
)
from llm.utils.get_prompt_to_use import get_prompt_to_use
from llm.utils.get_prompt_to_use_id import get_prompt_to_use_id
from utils.get_prompt_to_use import get_prompt_to_use
from utils.get_prompt_to_use_id import get_prompt_to_use_id
from logger import get_logger
from models import BrainSettings # Importing settings related to the 'brain'
from modules.brain.qa_interface import QAInterface

View File

@ -12,7 +12,7 @@ from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, PromptTemplate
from langchain_core.runnables import RunnableParallel, RunnablePassthrough
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
from llm.utils.get_prompt_to_use import get_prompt_to_use
from utils.get_prompt_to_use import get_prompt_to_use
from logger import get_logger
from models import BrainSettings # Importing settings related to the 'brain'
from modules.brain.service.brain_service import BrainService

View File

@ -14,7 +14,7 @@ from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, PromptTemplate
from langchain_core.runnables import RunnableLambda, RunnablePassthrough
from langchain_openai import OpenAIEmbeddings
from llm.utils.get_prompt_to_use import get_prompt_to_use
from utils.get_prompt_to_use import get_prompt_to_use
from logger import get_logger
from models import BrainSettings # Importing settings related to the 'brain'
from modules.brain.service.brain_service import BrainService

View File

@ -1,3 +1,2 @@
from .get_brain_url import get_brain_url
from .resend_invitation_email import resend_invitation_email
from .subscription_invitation_service import SubscriptionInvitationService

View File

@ -4,13 +4,18 @@ from logger import get_logger
from models import BrainSettings, BrainSubscription
from modules.brain.service.brain_service import BrainService
from packages.emails.send_email import send_email
from repository.brain_subscription import get_brain_url
logger = get_logger(__name__)
brain_service = BrainService()
def get_brain_url(origin: str, brain_id: UUID) -> str:
"""Generates the brain URL based on the brain_id."""
return f"{origin}/invitation/{brain_id}"
def resend_invitation_email(
brain_subscription: BrainSubscription,
inviter_email: str,

View File

@ -3,7 +3,7 @@ from uuid import UUID
from attr import dataclass
from logger import get_logger
from models.settings import get_embeddings, get_supabase_client
from repository.files.generate_file_signed_url import generate_file_signed_url
from modules.upload.service.generate_file_signed_url import generate_file_signed_url
from vectorstore.supabase import CustomSupabaseVectorStore
logger = get_logger(__name__)

View File

@ -3,10 +3,10 @@ from uuid import UUID
from fastapi import HTTPException
from logger import get_logger
from models import UserUsage
from models.databases.entity import LLMModels
from models.databases.llm_models import LLMModels
from modules.brain.service.brain_service import BrainService
from modules.chat.service.chat_service import ChatService
from modules.user.service.user_usage import UserUsage
logger = get_logger(__name__)
brain_service = BrainService()

View File

@ -8,7 +8,7 @@ from langchain_openai import OpenAIEmbeddings
from logger import get_logger
from middlewares.auth import AuthBearer, get_current_user
from models.settings import BrainSettings, get_supabase_client
from models.user_usage import UserUsage
from modules.user.service.user_usage import UserUsage
from modules.brain.service.brain_service import BrainService
from modules.chat.controller.chat.brainful_chat import BrainfulChat
from modules.chat.dto.chats import ChatItem, ChatQuestion

View File

@ -11,7 +11,7 @@ from modules.brain.service.brain_authorization_service import (
from modules.brain.service.brain_vector_service import BrainVectorService
from modules.knowledge.service.knowledge_service import KnowledgeService
from modules.user.entity.user_identity import UserIdentity
from repository.files.generate_file_signed_url import generate_file_signed_url
from modules.upload.service.generate_file_signed_url import generate_file_signed_url
knowledge_router = APIRouter()
logger = get_logger(__name__)

View File

@ -22,7 +22,7 @@ from modules.notification.service.notification_service import NotificationServic
from modules.user.entity.user_identity import UserIdentity
from packages.files.file import convert_bytes, get_file_size
from packages.utils.telemetry import maybe_send_telemetry
from repository.files.upload_file import upload_file_storage
from modules.upload.service.upload_file import upload_file_storage
logger = get_logger(__name__)
upload_router = APIRouter()

View File

View File

@ -5,7 +5,7 @@ from logger import get_logger
from models import File
from modules.brain.service.brain_vector_service import BrainVectorService
from packages.embeddings.vectors import Neurons
from repository.files.upload_file import DocumentSerializable
from modules.upload.service.upload_file import DocumentSerializable
logger = get_logger(__name__)

View File

@ -1 +0,0 @@
from .get_question_context_from_brain import get_question_context_from_brain

View File

@ -1,8 +0,0 @@
import os
from uuid import UUID
def get_brain_url(origin: str, brain_id: UUID) -> str:
"""Generates the brain URL based on the brain_id."""
return f"{origin}/invitation/{brain_id}"

View File

@ -17,7 +17,7 @@ from modules.prompt.service.prompt_service import PromptService
from modules.user.entity.user_identity import UserIdentity
from modules.user.service.user_service import UserService
from pydantic import BaseModel
from repository.brain_subscription import (
from modules.brain.service.brain_subscription import (
SubscriptionInvitationService,
resend_invitation_email,
)

View File

@ -24,7 +24,7 @@ from modules.knowledge.dto.inputs import CreateKnowledgeProperties
from modules.knowledge.service.knowledge_service import KnowledgeService
from ragas import evaluate
from ragas.embeddings.base import LangchainEmbeddingsWrapper
from repository.files.upload_file import upload_file_storage
from modules.upload.service.upload_file import upload_file_storage
def main(

View File

@ -1,10 +1,10 @@
from uuid import UUID
from fastapi import HTTPException
from llm.utils.extract_api_brain_definition_values_from_llm_output import (
from utils.extract_api_brain_definition_values_from_llm_output import (
extract_api_brain_definition_values_from_llm_output,
)
from llm.utils.make_api_request import get_api_call_response_as_text
from utils.make_api_request import get_api_call_response_as_text
from modules.brain.service.api_brain_definition_service import ApiBrainDefinitionService
from modules.brain.service.brain_service import BrainService

View File

@ -1,6 +1,6 @@
from fastapi import HTTPException
from llm.utils.extract_api_definition import format_api_brain_property
from llm.utils.sanitize_function_name import sanitize_function_name
from utils.extract_api_definition import format_api_brain_property
from utils.sanitize_function_name import sanitize_function_name
from modules.brain.entity.brain_entity import BrainEntity
from modules.brain.service.api_brain_definition_service import ApiBrainDefinitionService

View File

@ -1,7 +1,7 @@
from typing import Optional
from uuid import UUID
from llm.utils.get_prompt_to_use_id import get_prompt_to_use_id
from utils.get_prompt_to_use_id import get_prompt_to_use_id
from modules.prompt.service import PromptService
promptService = PromptService()