Feat/backend core (#656)

This commit is contained in:
Matt 2023-07-17 07:57:27 +01:00 committed by GitHub
parent 6a436d822c
commit e61f437ce8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
106 changed files with 249 additions and 62 deletions

View File

@ -4,7 +4,7 @@ on:
push:
branches: [ "main" ]
paths:
- 'backend/**'
- 'backend/core/**'
env:
AWS_REGION: eu-west-3 # set this to your preferred AWS region, e.g. us-west-1
@ -25,7 +25,7 @@ jobs:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./backend
working-directory: ./backend/core
environment: production
steps:

View File

@ -24,7 +24,7 @@ jobs:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./backend
working-directory: ./backend/core
environment: production
steps:

View File

@ -4,17 +4,17 @@ on:
push:
branches: [ main ]
paths:
- 'backend/**'
- 'backend/core/**'
pull_request:
branches: [ main ]
paths:
- 'backend/**'
- 'backend/core/**'
jobs:
build:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./backend
working-directory: ./backend/core
environment: preview
strategy:
matrix:

View File

@ -6,6 +6,7 @@ on:
permissions:
pull-requests: write
contents: read
jobs:
build:
@ -25,7 +26,7 @@ jobs:
run: npm install code-review-gpt
- name: Run code review script
run: npx code-review-gpt --ci
run: npx code-review-gpt review --ci
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BASE_SHA: ${{ github.event.pull_request.base.sha }}

6
.gitignore vendored
View File

@ -48,12 +48,12 @@ quivr/*
streamlit-demo/.streamlit/secrets.toml
.backend_env
.frontend_env
backend/pandoc-*
backend/core/pandoc-*
**/.pandoc-*
backend/application_default_credentials.json
backend/core/application_default_credentials.json
#local models
backend/local_models/*
backend/core/local_models/*
## scripts

View File

@ -40,9 +40,9 @@
"**/.next/": true,
"**/build/": true,
"**/.docusaurus/": true,
"**/node_modules/": true,
"**/node_modules/": true
},
"python.linting.pycodestyleCategorySeverity.W": "Error",
"python.defaultInterpreterPath": "python3",
"python.linting.flake8CategorySeverity.W": "Error",
}
}

View File

@ -87,19 +87,19 @@ Additionally, you'll need a [Supabase](https://supabase.com/) account for:
- **Step 2 - Bis**: Copy the `.XXXXX_env` files
```bash
cp .backend_env.example backend/.env
cp .backend_env.example backend/core/.env
cp .frontend_env.example frontend/.env
```
- **Step 3**: Update the `backend/.env` and `frontend/.env` file
- **Step 3**: Update the `backend/core/.env` and `frontend/.env` file
> _Your `supabase_service_key` can be found in your Supabase dashboard under Project Settings -> API. Use the `anon` `public` key found in the `Project API keys` section._
> _Your `JWT_SECRET_KEY`can be found in your supabase settings under Project Settings -> API -> JWT Settings -> JWT Secret_
> _To activate vertexAI with PaLM from GCP follow the instructions [here](https://python.langchain.com/en/latest/modules/models/llms/integrations/google_vertex_ai_palm.html) and update `backend/.env`- It is an advanced feature, please be expert in GCP before trying to use it_
> _To activate vertexAI with PaLM from GCP follow the instructions [here](https://python.langchain.com/en/latest/modules/models/llms/integrations/google_vertex_ai_palm.html) and update `backend/core/.env`- It is an advanced feature, please be expert in GCP before trying to use it_
- [ ] Change variables in `backend/.env`
- [ ] Change variables in `backend/core/.env`
- [ ] Change variables in `frontend/.env`
- **Step 4**: Run the following migration scripts on the Supabase database via the web interface (SQL Editor -> `New query`)

View File

@ -5,14 +5,14 @@ RUN apt-get update && apt-get install -y libgeos-dev pandoc
WORKDIR /code
COPY ./requirements.txt /code/requirements.txt
COPY ./core/requirements.txt /code/requirements.txt
RUN pip install --no-cache-dir -r /code/requirements.txt --timeout 100
#You may need to run `chmod +x ./backend/scripts/start.sh` on your host machine if you get a permission error
COPY ./scripts/start.sh /code/scripts/start.sh
#You may need to run `chmod +x ./backend/core/scripts/start.sh` on your host machine if you get a permission error
COPY ./core/scripts/start.sh /code/scripts/start.sh
RUN chmod +x /code/scripts/start.sh
COPY . /code/
COPY ./core /code/
ENTRYPOINT ["bash", "/code/scripts/start.sh"]

View File

@ -1,13 +1,12 @@
import os
from typing import Optional
from auth.api_key_handler import get_user_from_api_key, verify_api_key
from auth.jwt_token_handler import decode_access_token, verify_token
from fastapi import Depends, HTTPException, Request
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from models.users import User
from auth.api_key_handler import get_user_from_api_key, verify_api_key
from auth.jwt_token_handler import decode_access_token, verify_token
class AuthBearer(HTTPBearer):
def __init__(self, auto_error: bool = True):

View File

@ -1,4 +1,5 @@
from typing import Optional
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.llms.base import BaseLLM
from langchain.llms.gpt4all import GPT4All

View File

@ -8,7 +8,7 @@ origins = [
"https://www.quivr.app",
"http://quivr.app",
"http://www.quivr.app",
"*"
"*",
]

View File

@ -28,13 +28,19 @@ class BrainSubscription(BaseModel):
response = (
self.commons["supabase"]
.table("brain_subscription_invitations")
.insert({"brain_id": str(self.brain_id), "email": self.email, "rights": self.rights})
.insert(
{
"brain_id": str(self.brain_id),
"email": self.email,
"rights": self.rights,
}
)
.execute()
)
return response.data
def update_subscription_invitation(self):
logger.info('Updating subscription invitation')
logger.info("Updating subscription invitation")
response = (
self.commons["supabase"]
.table("brain_subscription_invitations")
@ -46,12 +52,19 @@ class BrainSubscription(BaseModel):
return response.data
def create_or_update_subscription_invitation(self):
response = self.commons["supabase"].table("brain_subscription_invitations").select("*").eq("brain_id", str(self.brain_id)).eq("email", self.email).execute()
response = (
self.commons["supabase"]
.table("brain_subscription_invitations")
.select("*")
.eq("brain_id", str(self.brain_id))
.eq("email", self.email)
.execute()
)
if response.data:
response = self.update_subscription_invitation()
else:
response = self.create_subscription_invitation()
response = self.create_subscription_invitation()
return response
@ -71,13 +84,15 @@ class BrainSubscription(BaseModel):
"""
try:
r = resend.Emails.send({
"from": "onboarding@resend.dev",
"to": self.email,
"subject": "Quivr - Brain Shared With You",
"html": html_body
})
print('Resend response', r)
r = resend.Emails.send(
{
"from": "onboarding@resend.dev",
"to": self.email,
"subject": "Quivr - Brain Shared With You",
"html": html_body,
}
)
print("Resend response", r)
except Exception as e:
logger.error(f"Error sending email: {e}")
return

View File

@ -5,7 +5,6 @@ from fastapi import APIRouter, Depends, Query
from models.brains import Brain
from models.settings import common_dependencies
from models.users import User
from routes.authorizations.brain_authorization import (
has_brain_authorization,
validate_brain_authorization,

View File

@ -1,5 +1,5 @@
/* Instructions:
1. in .backend/supabase folder, create .env file with BEEHIIV_PUBLICATION_ID and BEEHIIV_API_KEY variables
1. in .backend/core/supabase folder, create .env file with BEEHIIV_PUBLICATION_ID and BEEHIIV_API_KEY variables
2. cd into .backend
--- for the rest of these steps you will need your supabase project id which can be found in your console url: https://supabase.com/dashboard/project/<projectId> ---
3. run `supabase secrets set --env-file ./supabase/.env` to set the environment variables

View File

@ -0,0 +1,20 @@
# Use the same base image as the 'backend-core' container
FROM python:3.11-bullseye
# Install necessary packages
RUN apt-get update && apt-get install -y liblzma-dev cmake git
# Set the working directory
WORKDIR /app
# Copy the requirements file
COPY ./private/requirements.txt /app/requirements.txt
# Install Python dependencies
RUN pip install --no-cache-dir -r /app/requirements.txt --timeout 100
# Copy your application's code to the Docker container
COPY ./private /app
# Start the Uvicorn server on port 5051
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "5051"]

17
backend/private/logger.py Normal file
View File

@ -0,0 +1,17 @@
import logging
def get_logger(logger_name, log_level=logging.INFO):
logger = logging.getLogger(logger_name)
logger.setLevel(log_level)
logger.propagate = False # Prevent log propagation to avoid double logging
formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s: %(message)s")
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
if not logger.handlers:
logger.addHandler(console_handler)
return logger

30
backend/private/main.py Normal file
View File

@ -0,0 +1,30 @@
import os
import sentry_sdk
from fastapi import FastAPI, HTTPException
from fastapi.responses import JSONResponse
from logger import get_logger
from routes.completions_routes import completions_router
logger = get_logger(__name__)
if os.getenv("SENTRY_DSN"):
sentry_sdk.init(
dsn=os.getenv("SENTRY_DSN"),
# Set traces_sample_rate to 1.0 to capture 100%
# of transactions for performance monitoring.
# We recommend adjusting this value in production,
traces_sample_rate=1.0,
)
app = FastAPI()
app.include_router(completions_router)
@app.exception_handler(HTTPException)
async def http_exception_handler(_, exc):
return JSONResponse(
status_code=exc.status_code,
content={"detail": exc.detail},
)

View File

@ -0,0 +1,18 @@
from typing import List
from pydantic import BaseModel, Field
class Message(BaseModel):
role: str = Field(
...,
description="The role of the messages author. One of system, user, assistant, or function.",
)
content: str = Field(
...,
description="The contents of the message. content is required for all messages, and may be null for assistant messages with function calls.",
)
class Messages(BaseModel):
messages: List[Message]

View File

@ -0,0 +1,11 @@
langchain==0.0.228
tiktoken==0.4.0
fastapi==0.95.2
uvicorn==0.22.0
transformers==4.30.1
asyncpg==0.27.0
flake8==6.0.0
flake8-black==0.3.6
sentence_transformers>=2.0.0
sentry-sdk==1.26.0
pyright==1.1.316

Some files were not shown because too many files have changed in this diff Show More