feat: 🎸 openai (#1658)

cleaning old code to introduce better patern

# Description

Please include a summary of the changes and the related issue. Please
also include relevant motivation and context.

## Checklist before requesting a review

Please delete options that are not relevant.

- [ ] My code follows the style guidelines of this project
- [ ] I have performed a self-review of my code
- [ ] I have commented hard-to-understand areas
- [ ] I have ideally added tests that prove my fix is effective or that
my feature works
- [ ] New and existing unit tests pass locally with my changes
- [ ] Any dependent changes have been merged

## Screenshots (if appropriate):
This commit is contained in:
Stan Girard 2023-11-20 01:22:03 +01:00 committed by GitHub
parent d955e31f50
commit 6a041b6f6d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 33 additions and 56 deletions

View File

@ -10,9 +10,7 @@ GOOGLE_CLOUD_PROJECT=<change-me>
CELERY_BROKER_URL=redis://redis:6379/0 CELERY_BROKER_URL=redis://redis:6379/0
CELEBRY_BROKER_QUEUE_NAME=quivr-preview.fifo CELEBRY_BROKER_QUEUE_NAME=quivr-preview.fifo
#Private LLM Variables
PRIVATE=False
MODEL_PATH=./local_models/ggml-gpt4all-j-v1.3-groovy.bin
#RESEND #RESEND
RESEND_API_KEY=<change-me> RESEND_API_KEY=<change-me>

View File

@ -65,7 +65,6 @@ class QABaseBrainPicking(BaseModel):
max_tokens: int = 256 max_tokens: int = 256
streaming: bool = False streaming: bool = False
openai_api_key: str = None # pyright: ignore reportPrivateUsage=none
callbacks: List[ callbacks: List[
AsyncIteratorCallbackHandler AsyncIteratorCallbackHandler
] = None # pyright: ignore reportPrivateUsage=none ] = None # pyright: ignore reportPrivateUsage=none
@ -85,9 +84,7 @@ class QABaseBrainPicking(BaseModel):
@property @property
def embeddings(self) -> OpenAIEmbeddings: def embeddings(self) -> OpenAIEmbeddings:
return OpenAIEmbeddings( return OpenAIEmbeddings() # pyright: ignore reportPrivateUsage=none
openai_api_key=self.openai_api_key
) # pyright: ignore reportPrivateUsage=none
supabase_client: Optional[Client] = None supabase_client: Optional[Client] = None
vector_store: Optional[CustomSupabaseVectorStore] = None vector_store: Optional[CustomSupabaseVectorStore] = None
@ -152,7 +149,6 @@ class QABaseBrainPicking(BaseModel):
streaming=streaming, streaming=streaming,
verbose=False, verbose=False,
callbacks=callbacks, callbacks=callbacks,
openai_api_key=self.openai_api_key,
) # pyright: ignore reportPrivateUsage=none ) # pyright: ignore reportPrivateUsage=none
def _create_prompt_template(self): def _create_prompt_template(self):

View File

@ -32,7 +32,6 @@ class HeadlessQA(BaseModel):
model: str model: str
temperature: float = 0.0 temperature: float = 0.0
max_tokens: int = 2000 max_tokens: int = 2000
openai_api_key: Optional[str] = None
streaming: bool = False streaming: bool = False
chat_id: str chat_id: str
callbacks: Optional[List[AsyncIteratorCallbackHandler]] = None callbacks: Optional[List[AsyncIteratorCallbackHandler]] = None
@ -80,7 +79,6 @@ class HeadlessQA(BaseModel):
streaming=streaming, streaming=streaming,
verbose=True, verbose=True,
callbacks=callbacks, callbacks=callbacks,
openai_api_key=self.openai_api_key,
) )
def _create_prompt_template(self): def _create_prompt_template(self):

View File

@ -6,7 +6,7 @@ from .chats import ChatMessage, ChatQuestion
from .files import File from .files import File
from .prompt import Prompt, PromptStatusEnum from .prompt import Prompt, PromptStatusEnum
from .settings import (BrainRateLimiting, BrainSettings, ContactsSettings, from .settings import (BrainRateLimiting, BrainSettings, ContactsSettings,
LLMSettings, ResendSettings, get_embeddings, ResendSettings, get_embeddings,
get_documents_vector_store, get_embeddings, get_documents_vector_store, get_embeddings,
get_supabase_client, get_supabase_db) get_supabase_client, get_supabase_db)
from .user_usage import UserUsage from .user_usage import UserUsage

View File

@ -13,7 +13,6 @@ class BrainSettings(BaseSettings):
openai_api_key: str openai_api_key: str
supabase_url: str supabase_url: str
supabase_service_key: str supabase_service_key: str
pg_database_url: str = "not implemented"
resend_api_key: str = "null" resend_api_key: str = "null"
resend_email_address: str = "brain@mail.quivr.app" resend_email_address: str = "brain@mail.quivr.app"
@ -23,11 +22,6 @@ class ContactsSettings(BaseSettings):
resend_contact_sales_to: str = "null" resend_contact_sales_to: str = "null"
class LLMSettings(BaseSettings):
private: bool = False
model_path: str = "./local_models/ggml-gpt4all-j-v1.3-groovy.bin"
class ResendSettings(BaseSettings): class ResendSettings(BaseSettings):
resend_api_key: str = "null" resend_api_key: str = "null"

View File

@ -1,11 +1,10 @@
from typing import Optional
from uuid import UUID from uuid import UUID
from models import get_supabase_client from models import get_supabase_client
from modules.user.entity.user_identity import UserIdentity from modules.user.entity.user_identity import UserIdentity
def create_user_identity(id: UUID, openai_api_key: Optional[str]) -> UserIdentity: def create_user_identity(id: UUID) -> UserIdentity:
supabase_client = get_supabase_client() supabase_client = get_supabase_client()
response = ( response = (
@ -13,12 +12,11 @@ def create_user_identity(id: UUID, openai_api_key: Optional[str]) -> UserIdentit
.insert( .insert(
{ {
"user_id": str(id), "user_id": str(id),
"openai_api_key": openai_api_key,
} }
) )
.execute() .execute()
) )
user_identity = response.data[0] user_identity = response.data[0]
return UserIdentity( return UserIdentity(
id=user_identity.get("user_id"), openai_api_key=user_identity.get("openai_api_key") # type: ignore id=user_identity.get("user_id")
) )

View File

@ -19,9 +19,8 @@ def get_user_identity(user_id: UUID) -> UserIdentity:
) )
if len(response.data) == 0: if len(response.data) == 0:
return create_user_identity(user_id, openai_api_key=None) return create_user_identity(user_id)
user_identity = response.data[0] user_identity = response.data[0]
openai_api_key = user_identity["openai_api_key"]
return UserIdentity(id=user_id, openai_api_key=openai_api_key) return UserIdentity(id=user_id)

View File

@ -1,4 +1,3 @@
from typing import Optional
from uuid import UUID from uuid import UUID
from models.settings import get_supabase_client from models.settings import get_supabase_client
@ -8,7 +7,8 @@ from pydantic import BaseModel
class UserUpdatableProperties(BaseModel): class UserUpdatableProperties(BaseModel):
openai_api_key: Optional[str] # Nothing for now
empty: bool = True
def update_user_properties( def update_user_properties(
@ -25,10 +25,10 @@ def update_user_properties(
if len(response.data) == 0: if len(response.data) == 0:
return create_user_identity( return create_user_identity(
user_id, openai_api_key=user_identity_updatable_properties.openai_api_key user_id
) )
user_identity = response.data[0] user_identity = response.data[0]
openai_api_key = user_identity["openai_api_key"]
return UserIdentity(id=user_id, openai_api_key=openai_api_key)
return UserIdentity(id=user_id)

View File

@ -2,7 +2,6 @@ from fastapi import HTTPException
from llm.api_brain_qa import APIBrainQA from llm.api_brain_qa import APIBrainQA
from llm.qa_base import QABaseBrainPicking from llm.qa_base import QABaseBrainPicking
from models.brain_entity import BrainType from models.brain_entity import BrainType
from repository.brain import get_brain_details
from repository.brain.get_brain_by_id import get_brain_by_id from repository.brain.get_brain_by_id import get_brain_by_id
from routes.authorizations.brain_authorization import validate_brain_authorization from routes.authorizations.brain_authorization import validate_brain_authorization
from routes.authorizations.types import RoleEnum from routes.authorizations.types import RoleEnum
@ -27,11 +26,6 @@ class BrainfulChat(ChatInterface):
required_roles=[RoleEnum.Viewer, RoleEnum.Editor, RoleEnum.Owner], required_roles=[RoleEnum.Viewer, RoleEnum.Editor, RoleEnum.Owner],
) )
def get_openai_api_key(self, brain_id, user_id):
brain_details = get_brain_details(brain_id)
if brain_details:
return brain_details.openai_api_key
def get_answer_generator( def get_answer_generator(
self, self,
brain_id, brain_id,

View File

@ -1,5 +1,4 @@
from llm.qa_headless import HeadlessQA from llm.qa_headless import HeadlessQA
from modules.user.repository import get_user_identity
from routes.chat.interface import ChatInterface from routes.chat.interface import ChatInterface
@ -7,12 +6,6 @@ class BrainlessChat(ChatInterface):
def validate_authorization(self, user_id, brain_id): def validate_authorization(self, user_id, brain_id):
pass pass
def get_openai_api_key(self, brain_id, user_id):
user_identity = get_user_identity(user_id)
if user_identity is not None:
return user_identity.openai_api_key
def get_answer_generator( def get_answer_generator(
self, self,
brain_id, brain_id,

View File

@ -6,10 +6,6 @@ class ChatInterface(ABC):
def validate_authorization(self, user_id, required_roles): def validate_authorization(self, user_id, required_roles):
pass pass
@abstractmethod
def get_openai_api_key(self, brain_id, user_id):
pass
@abstractmethod @abstractmethod
def get_answer_generator( def get_answer_generator(
self, self,

View File

@ -199,7 +199,6 @@ async def create_stream_question_handler(
chat_instance = get_chat_strategy(brain_id) chat_instance = get_chat_strategy(brain_id)
chat_instance.validate_authorization(user_id=current_user.id, brain_id=brain_id) chat_instance.validate_authorization(user_id=current_user.id, brain_id=brain_id)
# Retrieve user's OpenAI API key
brain = Brain(id=brain_id) brain = Brain(id=brain_id)
brain_details: BrainEntity | None = None brain_details: BrainEntity | None = None
userDailyUsage = UserUsage( userDailyUsage = UserUsage(

View File

@ -45,10 +45,6 @@ async def crawl_endpoint(
) )
userSettings = userDailyUsage.get_user_settings() userSettings = userDailyUsage.get_user_settings()
# [TODO] rate limiting of user for crawl
if request.headers.get("Openai-Api-Key"):
brain.max_brain_size = userSettings.get("max_brain_size", 1000000000)
file_size = 1000000 file_size = 1000000
remaining_free_space = userSettings.get("max_brain_size", 1000000000) remaining_free_space = userSettings.get("max_brain_size", 1000000000)

View File

@ -47,9 +47,6 @@ async def upload_file(
) )
userSettings = userDailyUsage.get_user_settings() userSettings = userDailyUsage.get_user_settings()
if request.headers.get("Openai-Api-Key"):
brain.max_brain_size = userSettings.get("max_brain_size", 1000000000)
remaining_free_space = userSettings.get("max_brain_size", 1000000000) remaining_free_space = userSettings.get("max_brain_size", 1000000000)
file_size = get_file_size(uploadFile) file_size = get_file_size(uploadFile)

View File

@ -4,7 +4,7 @@ import { UUID } from "crypto";
import { UserStats } from "@/lib/types/User"; import { UserStats } from "@/lib/types/User";
export type UserIdentityUpdatableProperties = { export type UserIdentityUpdatableProperties = {
openai_api_key?: string | null; empty?: string | null;
}; };
export type UserIdentity = { export type UserIdentity = {

View File

@ -10,7 +10,7 @@
}, },
"intro": { "intro": {
"title": "Get a Second Brain with", "title": "Get a Second Brain with",
"subtitle": "Upload all your files and start talking with them.", "subtitle": "Your productivity assistant connected to your files & favorite applications",
"try_demo": "Try free demo", "try_demo": "Try free demo",
"contact_sales": "Contact sales team" "contact_sales": "Contact sales team"
}, },

19
renovate.json Normal file
View File

@ -0,0 +1,19 @@
{
"extends": [
"config:base"
],
"semanticCommits": true,
"stabilityDays": 3,
"prCreation": "not-pending",
"labels": [
"type: dependencies"
],
"packageRules": [
{
"packageNames": [
"node"
],
"enabled": false
}
]
}