diff --git a/.gitignore b/.gitignore index 66787ddfe..b64e13933 100644 --- a/.gitignore +++ b/.gitignore @@ -72,3 +72,4 @@ backend/share/* backend/slim.report.json volumes/db/data/ volumes/storage/stub/stub/quivr/* +supabase/migrations/20240103191539_private.sql diff --git a/.vscode/extensions.json b/.vscode/extensions.json index bd3d2aeff..8e0a5b3cc 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -4,6 +4,7 @@ "dbaeumer.vscode-eslint", "ms-python.vscode-pylance", "ms-pyright.pyright", - "inlang.vs-code-extension" + "inlang.vs-code-extension", + "denoland.vscode-deno" ] } \ No newline at end of file diff --git a/backend/models/databases/supabase/user_usage.py b/backend/models/databases/supabase/user_usage.py index add60426b..bd97cb9ef 100644 --- a/backend/models/databases/supabase/user_usage.py +++ b/backend/models/databases/supabase/user_usage.py @@ -32,6 +32,17 @@ class UserUsage(Repository): """ matching_customers = None try: + is_premium_user = ( + self.db.from_("user_settings") + .select("is_premium") + .filter("user_id", "eq", str(user_id)) + .execute() + .data + ) + + if len(is_premium_user) > 0 and is_premium_user[0]["is_premium"]: + return True + user_email_customer = ( self.db.from_("users") .select("*") @@ -42,20 +53,41 @@ class UserUsage(Repository): if len(user_email_customer) == 0: return False - matching_customers = ( - self.db.table("customers") - .select("email") - .filter("email", "eq", user_email_customer[0]["email"]) + subscription_still_valid = ( + self.db.from_("subscriptions") + .select("*") + .filter("customer", "eq", user_email_customer[0]["id"]) + .filter("current_period_end", "gte", datetime.now()) .execute() ).data + + if len(subscription_still_valid) > 0: + matching_customers = ( + self.db.table("customers") + .select("email") + .filter("email", "eq", user_email_customer[0]["email"]) + .execute() + ).data + + if len(matching_customers) > 0: + self.db.table("user_settings").update({"is_premium": True}).match( + {"user_id": str(user_id)} + ).execute() + return True + else: + self.db.table("user_settings").update({"is_premium": False}).match( + {"user_id": str(user_id)} + ).execute() + return False + except Exception as e: logger.info(matching_customers) - logger.error("Error while checking if user is a premium user") + logger.error( + "Error while checking if user is a premium user. Stripe needs to be configured." + ) logger.error(e) return False - return len(matching_customers) > 0 - def get_user_settings(self, user_id): """ Fetch the user settings from the database @@ -80,11 +112,10 @@ class UserUsage(Repository): raise ValueError("User settings could not be created") user_settings = user_settings_response[0] - user_settings["is_premium"] = False - is_premium_user = self.check_if_is_premium_user(user_id) - if is_premium_user: - user_settings["is_premium"] = True + check_is_premium = self.check_if_is_premium_user(user_id) + + if check_is_premium: user_settings["max_brains"] = int( os.environ.get("PREMIUM_MAX_BRAIN_NUMBER", 12) ) diff --git a/backend/modules/chat/controller/chat/utils.py b/backend/modules/chat/controller/chat/utils.py index 20d41e172..b7f139eec 100644 --- a/backend/modules/chat/controller/chat/utils.py +++ b/backend/modules/chat/controller/chat/utils.py @@ -35,7 +35,7 @@ def check_user_requests_limit( if int(userDailyUsage.daily_requests_count) >= int(daily_chat_credit): raise HTTPException( status_code=429, # pyright: ignore reportPrivateUsage=none - detail="You have reached the maximum number of requests for today.", # pyright: ignore reportPrivateUsage=none + detail=f"You have reached your daily chat limit of {daily_chat_credit} requests per day. Please upgrade your plan to increase your daily chat limit.", ) else: pass diff --git a/docker-compose-dev-only-back-saas-supabase.yml b/docker-compose-dev-only-back-saas-supabase.yml deleted file mode 100644 index 0dac5cc8f..000000000 --- a/docker-compose-dev-only-back-saas-supabase.yml +++ /dev/null @@ -1,83 +0,0 @@ -version: "3.8" - -services: - backend-core: - image: backend-base - pull_policy: never - env_file: - - .env - build: - context: backend - dockerfile: Dockerfile.dev - container_name: backend-core - healthcheck: - test: [ "CMD", "curl", "http://localhost:5050/healthz" ] - command: - - "uvicorn" - - "main:app" - - "--reload" - - "--host" - - "0.0.0.0" - - "--port" - - "5050" - - "--workers" - - "1" - restart: always - volumes: - - ./backend/:/code/ - ports: - - 5050:5050 - - redis: - image: redis:latest - container_name: redis - restart: always - ports: - - 6379:6379 - - - worker: - pull_policy: never - image: backend-base - env_file: - - .env - build: - context: backend - dockerfile: Dockerfile.dev - container_name: worker - command: celery -A celery_worker worker -l info - restart: always - depends_on: - - redis - - beat: - image: backend-base - pull_policy: never - env_file: - - .env - build: - context: backend - dockerfile: Dockerfile.dev - container_name: beat - command: celery -A celery_worker beat -l info - restart: always - depends_on: - - redis - - flower: - image: backend-base - pull_policy: never - env_file: - - .env - build: - context: backend - dockerfile: Dockerfile.dev - container_name: flower - command: celery -A celery_worker flower -l info --port=5555 - restart: always - depends_on: - - redis - - worker - - beat - ports: - - 5555:5555 \ No newline at end of file diff --git a/docker-compose-dev-saas-supabase.yml b/docker-compose-dev-saas-supabase.yml deleted file mode 100644 index 3d61c7319..000000000 --- a/docker-compose-dev-saas-supabase.yml +++ /dev/null @@ -1,106 +0,0 @@ -version: "3.8" - -services: - frontend: - pull_policy: never - env_file: - - .env - build: - context: frontend - dockerfile: Dockerfile.dev - args: - - NEXT_PUBLIC_ENV=local - - NEXT_PUBLIC_BACKEND_URL=${NEXT_PUBLIC_BACKEND_URL} - - NEXT_PUBLIC_SUPABASE_URL=${NEXT_PUBLIC_SUPABASE_URL} - - NEXT_PUBLIC_SUPABASE_ANON_KEY=${NEXT_PUBLIC_SUPABASE_ANON_KEY} - - NEXT_PUBLIC_CMS_URL=${NEXT_PUBLIC_CMS_URL} - - NEXT_PUBLIC_FRONTEND_URL=${NEXT_PUBLIC_FRONTEND_URL} - container_name: web - depends_on: - - backend-core - restart: always - volumes: - - ./frontend/:/app/ - ports: - - 3000:3000 - - - backend-core: - image: backend-base - pull_policy: never - env_file: - - .env - build: - context: backend - dockerfile: Dockerfile.dev - container_name: backend-core - healthcheck: - test: [ "CMD", "curl", "http://localhost:5050/healthz" ] - command: - - "uvicorn" - - "main:app" - - "--reload" - - "--host" - - "0.0.0.0" - - "--port" - - "5050" - - "--workers" - - "1" - restart: always - volumes: - - ./backend/:/code/ - ports: - - 5050:5050 - - redis: - image: redis:latest - container_name: redis - restart: always - ports: - - 6379:6379 - - worker: - pull_policy: never - image: backend-base - env_file: - - .env - build: - context: backend - dockerfile: Dockerfile.dev - container_name: worker - command: celery -A celery_worker worker -l info - restart: always - depends_on: - - redis - - beat: - image: backend-base - pull_policy: never - env_file: - - .env - build: - context: backend - dockerfile: Dockerfile.dev - container_name: beat - command: celery -A celery_worker beat -l info - restart: always - depends_on: - - redis - - flower: - image: backend-base - pull_policy: never - env_file: - - .env - build: - context: backend - dockerfile: Dockerfile.dev - container_name: flower - command: celery -A celery_worker flower -l info --port=5555 - restart: always - depends_on: - - redis - - worker - - beat - ports: - - 5555:5555 \ No newline at end of file diff --git a/docker-compose-no-frontend.dev.yml b/docker-compose-no-frontend.dev.yml deleted file mode 100644 index 5401121ab..000000000 --- a/docker-compose-no-frontend.dev.yml +++ /dev/null @@ -1,495 +0,0 @@ -version: "3.8" - -services: - backend-core: - image: backend-base - pull_policy: never - env_file: - - .env - build: - context: backend - dockerfile: Dockerfile.dev - args: - - DEV_MODE=true - container_name: backend-core - volumes: - - ./backend/:/code/ - command: - - "uvicorn" - - "main:app" - - "--reload" - - "--host" - - "0.0.0.0" - - "--port" - - "5050" - - "--workers" - - "2" - restart: always - depends_on: - db: - condition: service_healthy - kong: - condition: service_healthy - ports: - - 5050:5050 - - 5678:5678 # debug port - - redis: - image: redis:latest - container_name: redis - restart: always - ports: - - 6379:6379 - depends_on: - db: - condition: service_healthy - - worker: - pull_policy: never - image: backend-base - env_file: - - .env - build: - context: backend - dockerfile: Dockerfile - container_name: worker - command: celery -A celery_worker worker -l info - restart: always - depends_on: - - redis - - db - - beat: - image: backend-base - pull_policy: never - env_file: - - .env - build: - context: backend - dockerfile: Dockerfile - container_name: beat - command: celery -A celery_worker beat -l info - restart: always - depends_on: - - redis - - flower: - image: backend-base - pull_policy: never - env_file: - - .env - build: - context: backend - dockerfile: Dockerfile - container_name: flower - command: celery -A celery_worker flower -l info --port=5555 - restart: always - depends_on: - - redis - - worker - - beat - ports: - - 5555:5555 - - studio: - container_name: supabase-studio - image: supabase/studio:20231123-64a766a - restart: unless-stopped - healthcheck: - test: - [ - "CMD", - "node", - "-e", - "require('http').get('http://localhost:3000/api/profile', (r) => {if (r.statusCode !== 200) throw new Error(r.statusCode)})" - ] - timeout: 5s - interval: 5s - retries: 3 - depends_on: - analytics: - condition: service_healthy - environment: - STUDIO_PG_META_URL: http://meta:8080 - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - - DEFAULT_ORGANIZATION_NAME: ${STUDIO_DEFAULT_ORGANIZATION} - DEFAULT_PROJECT_NAME: ${STUDIO_DEFAULT_PROJECT} - - SUPABASE_URL: http://kong:8000 - SUPABASE_PUBLIC_URL: ${SUPABASE_PUBLIC_URL} - SUPABASE_ANON_KEY: ${ANON_KEY} - SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY} - - LOGFLARE_API_KEY: ${LOGFLARE_API_KEY} - LOGFLARE_URL: http://analytics:4000 - NEXT_PUBLIC_ENABLE_LOGS: true - # Comment to use Big Query backend for analytics - NEXT_ANALYTICS_BACKEND_PROVIDER: postgres - # Uncomment to use Big Query backend for analytics - # NEXT_ANALYTICS_BACKEND_PROVIDER: bigquery - - kong: - container_name: supabase-kong - image: kong:2.8.1 - restart: unless-stopped - # https://unix.stackexchange.com/a/294837 - entrypoint: bash -c 'eval "echo \"$$(cat ~/temp.yml)\"" > ~/kong.yml && /docker-entrypoint.sh kong docker-start' - ports: - - ${KONG_HTTP_PORT}:8000/tcp - - ${KONG_HTTPS_PORT}:8443/tcp - depends_on: - analytics: - condition: service_healthy - environment: - KONG_DATABASE: "off" - KONG_DECLARATIVE_CONFIG: /home/kong/kong.yml - # https://github.com/supabase/cli/issues/14 - KONG_DNS_ORDER: LAST,A,CNAME - KONG_PLUGINS: request-transformer,cors,key-auth,acl,basic-auth - KONG_NGINX_PROXY_PROXY_BUFFER_SIZE: 160k - KONG_NGINX_PROXY_PROXY_BUFFERS: 64 160k - SUPABASE_ANON_KEY: ${ANON_KEY} - SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY} - DASHBOARD_USERNAME: ${DASHBOARD_USERNAME} - DASHBOARD_PASSWORD: ${DASHBOARD_PASSWORD} - volumes: - # https://github.com/supabase/supabase/issues/12661 - - ./volumes/api/kong.yml:/home/kong/temp.yml:ro - - auth: - container_name: supabase-auth - image: supabase/gotrue:v2.99.0 - depends_on: - db: - # Disable this if you are using an external Postgres database - condition: service_healthy - analytics: - condition: service_healthy - healthcheck: - test: - [ - "CMD", - "wget", - "--no-verbose", - "--tries=1", - "--spider", - "http://localhost:9999/health" - ] - timeout: 5s - interval: 5s - retries: 3 - restart: unless-stopped - environment: - GOTRUE_API_HOST: 0.0.0.0 - GOTRUE_API_PORT: 9999 - API_EXTERNAL_URL: ${API_EXTERNAL_URL} - - GOTRUE_DB_DRIVER: postgres - GOTRUE_DB_DATABASE_URL: postgres://supabase_auth_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} - - GOTRUE_SITE_URL: ${SITE_URL} - GOTRUE_URI_ALLOW_LIST: ${ADDITIONAL_REDIRECT_URLS} - GOTRUE_DISABLE_SIGNUP: ${DISABLE_SIGNUP} - - GOTRUE_JWT_ADMIN_ROLES: service_role - GOTRUE_JWT_AUD: authenticated - GOTRUE_JWT_DEFAULT_GROUP_NAME: authenticated - GOTRUE_JWT_EXP: ${JWT_EXPIRY} - GOTRUE_JWT_SECRET: ${JWT_SECRET} - - GOTRUE_EXTERNAL_EMAIL_ENABLED: ${ENABLE_EMAIL_SIGNUP} - GOTRUE_MAILER_AUTOCONFIRM: ${ENABLE_EMAIL_AUTOCONFIRM} - # GOTRUE_MAILER_SECURE_EMAIL_CHANGE_ENABLED: true - # GOTRUE_SMTP_MAX_FREQUENCY: 1s - GOTRUE_SMTP_ADMIN_EMAIL: ${SMTP_ADMIN_EMAIL} - GOTRUE_SMTP_HOST: ${SMTP_HOST} - GOTRUE_SMTP_PORT: ${SMTP_PORT} - GOTRUE_SMTP_USER: ${SMTP_USER} - GOTRUE_SMTP_PASS: ${SMTP_PASS} - GOTRUE_SMTP_SENDER_NAME: ${SMTP_SENDER_NAME} - GOTRUE_MAILER_URLPATHS_INVITE: ${MAILER_URLPATHS_INVITE} - GOTRUE_MAILER_URLPATHS_CONFIRMATION: ${MAILER_URLPATHS_CONFIRMATION} - GOTRUE_MAILER_URLPATHS_RECOVERY: ${MAILER_URLPATHS_RECOVERY} - GOTRUE_MAILER_URLPATHS_EMAIL_CHANGE: ${MAILER_URLPATHS_EMAIL_CHANGE} - - GOTRUE_EXTERNAL_PHONE_ENABLED: ${ENABLE_PHONE_SIGNUP} - GOTRUE_SMS_AUTOCONFIRM: ${ENABLE_PHONE_AUTOCONFIRM} - - rest: - container_name: supabase-rest - image: postgrest/postgrest:v11.2.2 - depends_on: - db: - # Disable this if you are using an external Postgres database - condition: service_healthy - analytics: - condition: service_healthy - restart: unless-stopped - environment: - PGRST_DB_URI: postgres://authenticator:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} - PGRST_DB_SCHEMAS: ${PGRST_DB_SCHEMAS} - PGRST_DB_ANON_ROLE: anon - PGRST_JWT_SECRET: ${JWT_SECRET} - PGRST_DB_USE_LEGACY_GUCS: "false" - PGRST_APP_SETTINGS_JWT_SECRET: ${JWT_SECRET} - PGRST_APP_SETTINGS_JWT_EXP: ${JWT_EXPIRY} - command: "postgrest" - - realtime: - # This container name looks inconsistent but is correct because realtime constructs tenant id by parsing the subdomain - container_name: realtime-dev.supabase-realtime - image: supabase/realtime:v2.25.35 - depends_on: - db: - # Disable this if you are using an external Postgres database - condition: service_healthy - analytics: - condition: service_healthy - healthcheck: - test: - [ - "CMD", - "bash", - "-c", - "printf \\0 > /dev/tcp/localhost/4000" - ] - timeout: 5s - interval: 5s - retries: 3 - restart: unless-stopped - environment: - PORT: 4000 - DB_HOST: ${POSTGRES_HOST} - DB_PORT: ${POSTGRES_PORT} - DB_USER: supabase_admin - DB_PASSWORD: ${POSTGRES_PASSWORD} - DB_NAME: ${POSTGRES_DB} - DB_AFTER_CONNECT_QUERY: 'SET search_path TO _realtime' - DB_ENC_KEY: supabaserealtime - API_JWT_SECRET: ${JWT_SECRET} - FLY_ALLOC_ID: fly123 - FLY_APP_NAME: realtime - SECRET_KEY_BASE: UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq - ERL_AFLAGS: -proto_dist inet_tcp - ENABLE_TAILSCALE: "false" - DNS_NODES: "''" - command: > - sh -c "/app/bin/migrate && /app/bin/realtime eval 'Realtime.Release.seeds(Realtime.Repo)' && /app/bin/server" - - storage: - container_name: supabase-storage - image: supabase/storage-api:v0.43.11 - depends_on: - db: - # Disable this if you are using an external Postgres database - condition: service_healthy - rest: - condition: service_started - imgproxy: - condition: service_started - healthcheck: - test: - [ - "CMD", - "wget", - "--no-verbose", - "--tries=1", - "--spider", - "http://localhost:5000/status" - ] - timeout: 5s - interval: 5s - retries: 3 - restart: unless-stopped - environment: - ANON_KEY: ${ANON_KEY} - SERVICE_KEY: ${SERVICE_ROLE_KEY} - POSTGREST_URL: http://rest:3000 - PGRST_JWT_SECRET: ${JWT_SECRET} - DATABASE_URL: postgres://supabase_storage_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} - FILE_SIZE_LIMIT: 52428800 - STORAGE_BACKEND: file - FILE_STORAGE_BACKEND_PATH: /var/lib/storage - TENANT_ID: stub - # TODO: https://github.com/supabase/storage-api/issues/55 - REGION: stub - GLOBAL_S3_BUCKET: stub - ENABLE_IMAGE_TRANSFORMATION: "true" - IMGPROXY_URL: http://imgproxy:5001 - volumes: - - ./volumes/storage:/var/lib/storage:z - - imgproxy: - container_name: supabase-imgproxy - image: darthsim/imgproxy:v3.8.0 - healthcheck: - test: [ "CMD", "imgproxy", "health" ] - timeout: 5s - interval: 5s - retries: 3 - environment: - IMGPROXY_BIND: ":5001" - IMGPROXY_LOCAL_FILESYSTEM_ROOT: / - IMGPROXY_USE_ETAG: "true" - IMGPROXY_ENABLE_WEBP_DETECTION: ${IMGPROXY_ENABLE_WEBP_DETECTION} - volumes: - - ./volumes/storage:/var/lib/storage:z - - meta: - container_name: supabase-meta - image: supabase/postgres-meta:v0.68.0 - depends_on: - db: - # Disable this if you are using an external Postgres database - condition: service_healthy - analytics: - condition: service_healthy - restart: unless-stopped - environment: - PG_META_PORT: 8080 - PG_META_DB_HOST: ${POSTGRES_HOST} - PG_META_DB_PORT: ${POSTGRES_PORT} - PG_META_DB_NAME: ${POSTGRES_DB} - PG_META_DB_USER: supabase_admin - PG_META_DB_PASSWORD: ${POSTGRES_PASSWORD} - - functions: - container_name: supabase-edge-functions - image: supabase/edge-runtime:v1.22.4 - restart: unless-stopped - depends_on: - analytics: - condition: service_healthy - environment: - JWT_SECRET: ${JWT_SECRET} - SUPABASE_URL: http://kong:8000 - SUPABASE_ANON_KEY: ${ANON_KEY} - SUPABASE_SERVICE_ROLE_KEY: ${SERVICE_ROLE_KEY} - SUPABASE_DB_URL: postgresql://postgres:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} - # TODO: Allow configuring VERIFY_JWT per function. This PR might help: https://github.com/supabase/cli/pull/786 - VERIFY_JWT: "${FUNCTIONS_VERIFY_JWT}" - volumes: - - ./volumes/functions:/home/deno/functions:Z - command: - - start - - --main-service - - /home/deno/functions/main - - analytics: - container_name: supabase-analytics - image: supabase/logflare:1.4.0 - healthcheck: - test: [ "CMD", "curl", "http://localhost:4000/health" ] - timeout: 5s - interval: 5s - retries: 10 - restart: unless-stopped - depends_on: - db: - # Disable this if you are using an external Postgres database - condition: service_healthy - # Uncomment to use Big Query backend for analytics - # volumes: - # - type: bind - # source: ${PWD}/gcloud.json - # target: /opt/app/rel/logflare/bin/gcloud.json - # read_only: true - environment: - LOGFLARE_NODE_HOST: 127.0.0.1 - DB_USERNAME: supabase_admin - DB_DATABASE: ${POSTGRES_DB} - DB_HOSTNAME: ${POSTGRES_HOST} - DB_PORT: ${POSTGRES_PORT} - DB_PASSWORD: ${POSTGRES_PASSWORD} - DB_SCHEMA: _analytics - LOGFLARE_API_KEY: ${LOGFLARE_API_KEY} - LOGFLARE_SINGLE_TENANT: true - LOGFLARE_SUPABASE_MODE: true - LOGFLARE_MIN_CLUSTER_SIZE: 1 - RELEASE_COOKIE: cookie - - # Comment variables to use Big Query backend for analytics - POSTGRES_BACKEND_URL: postgresql://supabase_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} - POSTGRES_BACKEND_SCHEMA: _analytics - LOGFLARE_FEATURE_FLAG_OVERRIDE: multibackend=true - - # Uncomment to use Big Query backend for analytics - # GOOGLE_PROJECT_ID: ${GOOGLE_PROJECT_ID} - # GOOGLE_PROJECT_NUMBER: ${GOOGLE_PROJECT_NUMBER} - ports: - - 4000:4000 - entrypoint: | - sh -c `cat <<'EOF' > run.sh && sh run.sh - ./logflare eval Logflare.Release.migrate - ./logflare start --sname logflare - EOF - ` - - # Comment out everything below this point if you are using an external Postgres database - db: - container_name: supabase-db - image: supabase/postgres:15.1.0.117 - healthcheck: - test: pg_isready -U postgres -h localhost - interval: 5s - timeout: 5s - retries: 10 - depends_on: - vector: - condition: service_healthy - command: - - postgres - - -c - - config_file=/etc/postgresql/postgresql.conf - - -c - - log_min_messages=fatal # prevents Realtime polling queries from appearing in logs - restart: unless-stopped - ports: - # Pass down internal port because it's set dynamically by other services - - ${POSTGRES_PORT}:${POSTGRES_PORT} - environment: - POSTGRES_HOST: /var/run/postgresql - PGPORT: ${POSTGRES_PORT} - POSTGRES_PORT: ${POSTGRES_PORT} - PGPASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - PGDATABASE: ${POSTGRES_DB} - POSTGRES_DB: ${POSTGRES_DB} - JWT_SECRET: ${JWT_SECRET} - JWT_EXP: ${JWT_EXPIRY} - volumes: - - ./volumes/db/realtime.sql:/docker-entrypoint-initdb.d/migrations/99-realtime.sql:Z - # Must be superuser to create event trigger - - ./volumes/db/webhooks.sql:/docker-entrypoint-initdb.d/init-scripts/98-webhooks.sql:Z - # Must be superuser to alter reserved role - - ./volumes/db/roles.sql:/docker-entrypoint-initdb.d/init-scripts/99-roles.sql:Z - # Initialize the database settings with JWT_SECRET and JWT_EXP - - ./volumes/db/jwt.sql:/docker-entrypoint-initdb.d/init-scripts/99-jwt.sql:Z - # PGDATA directory is persisted between restarts - - ./volumes/db/data:/var/lib/postgresql/data:Z - # Changes required for Analytics support - - ./volumes/db/logs.sql:/docker-entrypoint-initdb.d/migrations/99-logs.sql:Z - - ./scripts/tables.sql:/docker-entrypoint-initdb.d/seed.sql - - vector: - container_name: supabase-vector - image: timberio/vector:0.28.1-alpine - healthcheck: - test: - [ - "CMD", - "wget", - "--no-verbose", - "--tries=1", - "--spider", - "http://vector:9001/health" - ] - timeout: 5s - interval: 5s - retries: 3 - volumes: - - ./volumes/logs/vector.yml:/etc/vector/vector.yml:ro - - ${DOCKER_SOCKET_LOCATION}:/var/run/docker.sock:ro - - command: [ "--config", "etc/vector/vector.yml" ] \ No newline at end of file diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 096de332d..65b9f70d6 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -1,28 +1,6 @@ version: "3.8" services: - frontend: - pull_policy: never - env_file: - - .env - build: - context: frontend - dockerfile: Dockerfile.dev - args: - - NEXT_PUBLIC_ENV=local - - NEXT_PUBLIC_BACKEND_URL=${NEXT_PUBLIC_BACKEND_URL} - - NEXT_PUBLIC_SUPABASE_URL=${NEXT_PUBLIC_SUPABASE_URL} - - NEXT_PUBLIC_SUPABASE_ANON_KEY=${NEXT_PUBLIC_SUPABASE_ANON_KEY} - - NEXT_PUBLIC_CMS_URL=${NEXT_PUBLIC_CMS_URL} - - NEXT_PUBLIC_FRONTEND_URL=${NEXT_PUBLIC_FRONTEND_URL} - container_name: web - depends_on: - - backend-core - restart: always - ports: - - 3000:3000 - - backend-core: image: backend-base pull_policy: never @@ -36,7 +14,7 @@ services: container_name: backend-core volumes: - ./backend/:/code/ - command: + command: - "uvicorn" - "main:app" - "--reload" @@ -46,12 +24,8 @@ services: - "5050" - "--workers" - "2" - restart: always - depends_on: - db: - condition: service_healthy - kong: - condition: service_healthy + restart: always + ports: - 5050:5050 - 5678:5678 # debug port @@ -62,9 +36,6 @@ services: restart: always ports: - 6379:6379 - depends_on: - db: - condition: service_healthy worker: pull_policy: never @@ -79,7 +50,6 @@ services: restart: always depends_on: - redis - - db beat: image: backend-base @@ -112,406 +82,3 @@ services: - beat ports: - 5555:5555 - - studio: - container_name: supabase-studio - image: supabase/studio:20231123-64a766a - restart: unless-stopped - healthcheck: - test: - [ - "CMD", - "node", - "-e", - "require('http').get('http://localhost:3000/api/profile', (r) => {if (r.statusCode !== 200) throw new Error(r.statusCode)})" - ] - timeout: 5s - interval: 5s - retries: 3 - depends_on: - analytics: - condition: service_healthy - environment: - STUDIO_PG_META_URL: http://meta:8080 - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - - DEFAULT_ORGANIZATION_NAME: ${STUDIO_DEFAULT_ORGANIZATION} - DEFAULT_PROJECT_NAME: ${STUDIO_DEFAULT_PROJECT} - - SUPABASE_URL: http://kong:8000 - SUPABASE_PUBLIC_URL: ${SUPABASE_PUBLIC_URL} - SUPABASE_ANON_KEY: ${ANON_KEY} - SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY} - - LOGFLARE_API_KEY: ${LOGFLARE_API_KEY} - LOGFLARE_URL: http://analytics:4000 - NEXT_PUBLIC_ENABLE_LOGS: true - # Comment to use Big Query backend for analytics - NEXT_ANALYTICS_BACKEND_PROVIDER: postgres - # Uncomment to use Big Query backend for analytics - # NEXT_ANALYTICS_BACKEND_PROVIDER: bigquery - - kong: - container_name: supabase-kong - image: kong:2.8.1 - restart: unless-stopped - # https://unix.stackexchange.com/a/294837 - entrypoint: bash -c 'eval "echo \"$$(cat ~/temp.yml)\"" > ~/kong.yml && /docker-entrypoint.sh kong docker-start' - ports: - - ${KONG_HTTP_PORT}:8000/tcp - - ${KONG_HTTPS_PORT}:8443/tcp - depends_on: - analytics: - condition: service_healthy - environment: - KONG_DATABASE: "off" - KONG_DECLARATIVE_CONFIG: /home/kong/kong.yml - # https://github.com/supabase/cli/issues/14 - KONG_DNS_ORDER: LAST,A,CNAME - KONG_PLUGINS: request-transformer,cors,key-auth,acl,basic-auth - KONG_NGINX_PROXY_PROXY_BUFFER_SIZE: 160k - KONG_NGINX_PROXY_PROXY_BUFFERS: 64 160k - SUPABASE_ANON_KEY: ${ANON_KEY} - SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY} - DASHBOARD_USERNAME: ${DASHBOARD_USERNAME} - DASHBOARD_PASSWORD: ${DASHBOARD_PASSWORD} - volumes: - # https://github.com/supabase/supabase/issues/12661 - - ./volumes/api/kong.yml:/home/kong/temp.yml:ro - - auth: - container_name: supabase-auth - image: supabase/gotrue:v2.99.0 - depends_on: - db: - # Disable this if you are using an external Postgres database - condition: service_healthy - analytics: - condition: service_healthy - healthcheck: - test: - [ - "CMD", - "wget", - "--no-verbose", - "--tries=1", - "--spider", - "http://localhost:9999/health" - ] - timeout: 5s - interval: 5s - retries: 3 - restart: unless-stopped - environment: - GOTRUE_API_HOST: 0.0.0.0 - GOTRUE_API_PORT: 9999 - API_EXTERNAL_URL: ${API_EXTERNAL_URL} - - GOTRUE_DB_DRIVER: postgres - GOTRUE_DB_DATABASE_URL: postgres://supabase_auth_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} - - GOTRUE_SITE_URL: ${SITE_URL} - GOTRUE_URI_ALLOW_LIST: ${ADDITIONAL_REDIRECT_URLS} - GOTRUE_DISABLE_SIGNUP: ${DISABLE_SIGNUP} - - GOTRUE_JWT_ADMIN_ROLES: service_role - GOTRUE_JWT_AUD: authenticated - GOTRUE_JWT_DEFAULT_GROUP_NAME: authenticated - GOTRUE_JWT_EXP: ${JWT_EXPIRY} - GOTRUE_JWT_SECRET: ${JWT_SECRET} - - GOTRUE_EXTERNAL_EMAIL_ENABLED: ${ENABLE_EMAIL_SIGNUP} - GOTRUE_MAILER_AUTOCONFIRM: ${ENABLE_EMAIL_AUTOCONFIRM} - # GOTRUE_MAILER_SECURE_EMAIL_CHANGE_ENABLED: true - # GOTRUE_SMTP_MAX_FREQUENCY: 1s - GOTRUE_SMTP_ADMIN_EMAIL: ${SMTP_ADMIN_EMAIL} - GOTRUE_SMTP_HOST: ${SMTP_HOST} - GOTRUE_SMTP_PORT: ${SMTP_PORT} - GOTRUE_SMTP_USER: ${SMTP_USER} - GOTRUE_SMTP_PASS: ${SMTP_PASS} - GOTRUE_SMTP_SENDER_NAME: ${SMTP_SENDER_NAME} - GOTRUE_MAILER_URLPATHS_INVITE: ${MAILER_URLPATHS_INVITE} - GOTRUE_MAILER_URLPATHS_CONFIRMATION: ${MAILER_URLPATHS_CONFIRMATION} - GOTRUE_MAILER_URLPATHS_RECOVERY: ${MAILER_URLPATHS_RECOVERY} - GOTRUE_MAILER_URLPATHS_EMAIL_CHANGE: ${MAILER_URLPATHS_EMAIL_CHANGE} - - GOTRUE_EXTERNAL_PHONE_ENABLED: ${ENABLE_PHONE_SIGNUP} - GOTRUE_SMS_AUTOCONFIRM: ${ENABLE_PHONE_AUTOCONFIRM} - - rest: - container_name: supabase-rest - image: postgrest/postgrest:v11.2.2 - depends_on: - db: - # Disable this if you are using an external Postgres database - condition: service_healthy - analytics: - condition: service_healthy - restart: unless-stopped - environment: - PGRST_DB_URI: postgres://authenticator:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} - PGRST_DB_SCHEMAS: ${PGRST_DB_SCHEMAS} - PGRST_DB_ANON_ROLE: anon - PGRST_JWT_SECRET: ${JWT_SECRET} - PGRST_DB_USE_LEGACY_GUCS: "false" - PGRST_APP_SETTINGS_JWT_SECRET: ${JWT_SECRET} - PGRST_APP_SETTINGS_JWT_EXP: ${JWT_EXPIRY} - command: "postgrest" - - realtime: - # This container name looks inconsistent but is correct because realtime constructs tenant id by parsing the subdomain - container_name: realtime-dev.supabase-realtime - image: supabase/realtime:v2.25.35 - depends_on: - db: - # Disable this if you are using an external Postgres database - condition: service_healthy - analytics: - condition: service_healthy - healthcheck: - test: - [ - "CMD", - "bash", - "-c", - "printf \\0 > /dev/tcp/localhost/4000" - ] - timeout: 5s - interval: 5s - retries: 3 - restart: unless-stopped - environment: - PORT: 4000 - DB_HOST: ${POSTGRES_HOST} - DB_PORT: ${POSTGRES_PORT} - DB_USER: supabase_admin - DB_PASSWORD: ${POSTGRES_PASSWORD} - DB_NAME: ${POSTGRES_DB} - DB_AFTER_CONNECT_QUERY: 'SET search_path TO _realtime' - DB_ENC_KEY: supabaserealtime - API_JWT_SECRET: ${JWT_SECRET} - FLY_ALLOC_ID: fly123 - FLY_APP_NAME: realtime - SECRET_KEY_BASE: UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq - ERL_AFLAGS: -proto_dist inet_tcp - ENABLE_TAILSCALE: "false" - DNS_NODES: "''" - command: > - sh -c "/app/bin/migrate && /app/bin/realtime eval 'Realtime.Release.seeds(Realtime.Repo)' && /app/bin/server" - - storage: - container_name: supabase-storage - image: supabase/storage-api:v0.43.11 - depends_on: - db: - # Disable this if you are using an external Postgres database - condition: service_healthy - rest: - condition: service_started - imgproxy: - condition: service_started - healthcheck: - test: - [ - "CMD", - "wget", - "--no-verbose", - "--tries=1", - "--spider", - "http://localhost:5000/status" - ] - timeout: 5s - interval: 5s - retries: 3 - restart: unless-stopped - environment: - ANON_KEY: ${ANON_KEY} - SERVICE_KEY: ${SERVICE_ROLE_KEY} - POSTGREST_URL: http://rest:3000 - PGRST_JWT_SECRET: ${JWT_SECRET} - DATABASE_URL: postgres://supabase_storage_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} - FILE_SIZE_LIMIT: 52428800 - STORAGE_BACKEND: file - FILE_STORAGE_BACKEND_PATH: /var/lib/storage - TENANT_ID: stub - # TODO: https://github.com/supabase/storage-api/issues/55 - REGION: stub - GLOBAL_S3_BUCKET: stub - ENABLE_IMAGE_TRANSFORMATION: "true" - IMGPROXY_URL: http://imgproxy:5001 - volumes: - - ./volumes/storage:/var/lib/storage:z - - imgproxy: - container_name: supabase-imgproxy - image: darthsim/imgproxy:v3.8.0 - healthcheck: - test: [ "CMD", "imgproxy", "health" ] - timeout: 5s - interval: 5s - retries: 3 - environment: - IMGPROXY_BIND: ":5001" - IMGPROXY_LOCAL_FILESYSTEM_ROOT: / - IMGPROXY_USE_ETAG: "true" - IMGPROXY_ENABLE_WEBP_DETECTION: ${IMGPROXY_ENABLE_WEBP_DETECTION} - volumes: - - ./volumes/storage:/var/lib/storage:z - - meta: - container_name: supabase-meta - image: supabase/postgres-meta:v0.68.0 - depends_on: - db: - # Disable this if you are using an external Postgres database - condition: service_healthy - analytics: - condition: service_healthy - restart: unless-stopped - environment: - PG_META_PORT: 8080 - PG_META_DB_HOST: ${POSTGRES_HOST} - PG_META_DB_PORT: ${POSTGRES_PORT} - PG_META_DB_NAME: ${POSTGRES_DB} - PG_META_DB_USER: supabase_admin - PG_META_DB_PASSWORD: ${POSTGRES_PASSWORD} - - functions: - container_name: supabase-edge-functions - image: supabase/edge-runtime:v1.22.4 - restart: unless-stopped - depends_on: - analytics: - condition: service_healthy - environment: - JWT_SECRET: ${JWT_SECRET} - SUPABASE_URL: http://kong:8000 - SUPABASE_ANON_KEY: ${ANON_KEY} - SUPABASE_SERVICE_ROLE_KEY: ${SERVICE_ROLE_KEY} - SUPABASE_DB_URL: postgresql://postgres:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} - # TODO: Allow configuring VERIFY_JWT per function. This PR might help: https://github.com/supabase/cli/pull/786 - VERIFY_JWT: "${FUNCTIONS_VERIFY_JWT}" - volumes: - - ./volumes/functions:/home/deno/functions:Z - command: - - start - - --main-service - - /home/deno/functions/main - - analytics: - container_name: supabase-analytics - image: supabase/logflare:1.4.0 - healthcheck: - test: [ "CMD", "curl", "http://localhost:4000/health" ] - timeout: 5s - interval: 5s - retries: 10 - restart: unless-stopped - depends_on: - db: - # Disable this if you are using an external Postgres database - condition: service_healthy - # Uncomment to use Big Query backend for analytics - # volumes: - # - type: bind - # source: ${PWD}/gcloud.json - # target: /opt/app/rel/logflare/bin/gcloud.json - # read_only: true - environment: - LOGFLARE_NODE_HOST: 127.0.0.1 - DB_USERNAME: supabase_admin - DB_DATABASE: ${POSTGRES_DB} - DB_HOSTNAME: ${POSTGRES_HOST} - DB_PORT: ${POSTGRES_PORT} - DB_PASSWORD: ${POSTGRES_PASSWORD} - DB_SCHEMA: _analytics - LOGFLARE_API_KEY: ${LOGFLARE_API_KEY} - LOGFLARE_SINGLE_TENANT: true - LOGFLARE_SUPABASE_MODE: true - LOGFLARE_MIN_CLUSTER_SIZE: 1 - RELEASE_COOKIE: cookie - - # Comment variables to use Big Query backend for analytics - POSTGRES_BACKEND_URL: postgresql://supabase_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} - POSTGRES_BACKEND_SCHEMA: _analytics - LOGFLARE_FEATURE_FLAG_OVERRIDE: multibackend=true - - # Uncomment to use Big Query backend for analytics - # GOOGLE_PROJECT_ID: ${GOOGLE_PROJECT_ID} - # GOOGLE_PROJECT_NUMBER: ${GOOGLE_PROJECT_NUMBER} - ports: - - 4000:4000 - entrypoint: | - sh -c `cat <<'EOF' > run.sh && sh run.sh - ./logflare eval Logflare.Release.migrate - ./logflare start --sname logflare - EOF - ` - - # Comment out everything below this point if you are using an external Postgres database - db: - container_name: supabase-db - image: supabase/postgres:15.1.0.117 - healthcheck: - test: pg_isready -U postgres -h localhost - interval: 5s - timeout: 5s - retries: 10 - depends_on: - vector: - condition: service_healthy - command: - - postgres - - -c - - config_file=/etc/postgresql/postgresql.conf - - -c - - log_min_messages=fatal # prevents Realtime polling queries from appearing in logs - restart: unless-stopped - ports: - # Pass down internal port because it's set dynamically by other services - - ${POSTGRES_PORT}:${POSTGRES_PORT} - environment: - POSTGRES_HOST: /var/run/postgresql - PGPORT: ${POSTGRES_PORT} - POSTGRES_PORT: ${POSTGRES_PORT} - PGPASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - PGDATABASE: ${POSTGRES_DB} - POSTGRES_DB: ${POSTGRES_DB} - JWT_SECRET: ${JWT_SECRET} - JWT_EXP: ${JWT_EXPIRY} - volumes: - - ./volumes/db/realtime.sql:/docker-entrypoint-initdb.d/migrations/99-realtime.sql:Z - # Must be superuser to create event trigger - - ./volumes/db/webhooks.sql:/docker-entrypoint-initdb.d/init-scripts/98-webhooks.sql:Z - # Must be superuser to alter reserved role - - ./volumes/db/roles.sql:/docker-entrypoint-initdb.d/init-scripts/99-roles.sql:Z - # Initialize the database settings with JWT_SECRET and JWT_EXP - - ./volumes/db/jwt.sql:/docker-entrypoint-initdb.d/init-scripts/99-jwt.sql:Z - # PGDATA directory is persisted between restarts - - ./volumes/db/data:/var/lib/postgresql/data:Z - # Changes required for Analytics support - - ./volumes/db/logs.sql:/docker-entrypoint-initdb.d/migrations/99-logs.sql:Z - - ./scripts/tables.sql:/docker-entrypoint-initdb.d/seed.sql - - vector: - container_name: supabase-vector - image: timberio/vector:0.28.1-alpine - healthcheck: - test: - [ - "CMD", - "wget", - "--no-verbose", - "--tries=1", - "--spider", - "http://vector:9001/health" - ] - timeout: 5s - interval: 5s - retries: 3 - volumes: - - ./volumes/logs/vector.yml:/etc/vector/vector.yml:ro - - ${DOCKER_SOCKET_LOCATION}:/var/run/docker.sock:ro - - command: [ "--config", "etc/vector/vector.yml" ] \ No newline at end of file diff --git a/supabase/.gitignore b/supabase/.gitignore new file mode 100644 index 000000000..a3ad88055 --- /dev/null +++ b/supabase/.gitignore @@ -0,0 +1,4 @@ +# Supabase +.branches +.temp +.env diff --git a/supabase/config.toml b/supabase/config.toml new file mode 100644 index 000000000..1cde6fac3 --- /dev/null +++ b/supabase/config.toml @@ -0,0 +1,149 @@ +# A string used to distinguish different Supabase projects on the same host. Defaults to the +# working directory name when running `supabase init`. +project_id = "secondbrain" + +[api] +enabled = true +# Port to use for the API URL. +port = 54321 +# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API +# endpoints. public and storage are always included. +schemas = ["public", "storage", "graphql_public"] +# Extra schemas to add to the search_path of every request. public is always included. +extra_search_path = ["public", "extensions"] +# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size +# for accidental or malicious requests. +max_rows = 1000 + +[db] +# Port to use for the local database URL. +port = 54322 +# Port used by db diff command to initialize the shadow database. +shadow_port = 54320 +# The database major version to use. This has to be the same as your remote database's. Run `SHOW +# server_version;` on the remote database to check. +major_version = 15 + +[db.pooler] +enabled = false +# Port to use for the local connection pooler. +port = 54329 +# Specifies when a server connection can be reused by other clients. +# Configure one of the supported pooler modes: `transaction`, `session`. +pool_mode = "transaction" +# How many server connections to allow per user/database pair. +default_pool_size = 20 +# Maximum number of client connections allowed. +max_client_conn = 100 + +[realtime] +enabled = true +# Bind realtime via either IPv4 or IPv6. (default: IPv6) +# ip_version = "IPv6" + +[studio] +enabled = true +# Port to use for Supabase Studio. +port = 54323 +# External URL of the API server that frontend connects to. +api_url = "http://localhost" + +# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they +# are monitored, and you can view the emails that would have been sent from the web interface. +[inbucket] +enabled = true +# Port to use for the email testing server web interface. +port = 54324 +# Uncomment to expose additional ports for testing user applications that send emails. +# smtp_port = 54325 +# pop3_port = 54326 + +[storage] +enabled = true +# The maximum file size allowed (e.g. "5MB", "500KB"). +file_size_limit = "50MiB" + +[auth] +enabled = true +# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used +# in emails. +site_url = "http://localhost:3000" +# A list of *exact* URLs that auth providers are permitted to redirect to post authentication. +additional_redirect_urls = ["https://localhost:3000"] +# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week). +jwt_expiry = 3600 +# If disabled, the refresh token will never expire. +enable_refresh_token_rotation = true +# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds. +# Requires enable_refresh_token_rotation = true. +refresh_token_reuse_interval = 10 +# Allow/disallow new user signups to your project. +enable_signup = true + +[auth.email] +# Allow/disallow new user signups via email to your project. +enable_signup = true +# If enabled, a user will be required to confirm any email change on both the old, and new email +# addresses. If disabled, only the new email is required to confirm. +double_confirm_changes = true +# If enabled, users need to confirm their email address before signing in. +enable_confirmations = false + +# Uncomment to customize email template +# [auth.email.template.invite] +# subject = "You have been invited" +# content_path = "./supabase/templates/invite.html" + +[auth.sms] +# Allow/disallow new user signups via SMS to your project. +enable_signup = true +# If enabled, users need to confirm their phone number before signing in. +enable_confirmations = false +# Template for sending OTP to users +template = "Your code is {{ .Code }} ." + +# Use pre-defined map of phone number to OTP for testing. +[auth.sms.test_otp] +# 4152127777 = "123456" + +# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`. +[auth.sms.twilio] +enabled = false +account_sid = "" +message_service_sid = "" +# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead: +auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)" + +# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`, +# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin`, `notion`, `twitch`, +# `twitter`, `slack`, `spotify`, `workos`, `zoom`. +[auth.external.apple] +enabled = false +client_id = "" +# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead: +secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)" +# Overrides the default auth redirectUrl. +redirect_uri = "" +# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure, +# or any other third-party OIDC providers. +url = "" + +[analytics] +enabled = false +port = 54327 +vector_port = 54328 +# Configure one of the supported backends: `postgres`, `bigquery`. +backend = "postgres" + +# Experimental features may be deprecated any time +[experimental] +# Configures Postgres storage engine to use OrioleDB (S3) +orioledb_version = "" +# Configures S3 bucket URL, eg. .s3-.amazonaws.com +s3_host = "env(S3_HOST)" +# Configures S3 bucket region, eg. us-east-1 +s3_region = "env(S3_REGION)" +# Configures AWS_ACCESS_KEY_ID for S3 bucket +s3_access_key = "env(S3_ACCESS_KEY)" +# Configures AWS_SECRET_ACCESS_KEY for S3 bucket +s3_secret_key = "env(S3_SECRET_KEY)" diff --git a/supabase/migrations/20240103173626_init.sql b/supabase/migrations/20240103173626_init.sql new file mode 100644 index 000000000..afee9fd24 --- /dev/null +++ b/supabase/migrations/20240103173626_init.sql @@ -0,0 +1,2102 @@ +create extension if not exists "vector" with schema "public" ; + +create extension if not exists "wrappers" with schema "public"; + +create type "public"."brain_type_enum" as enum ('doc', 'api', 'composite'); + +create sequence "public"."summaries_id_seq"; + +create sequence "public"."vectors_id_seq"; + +create table "public"."api_brain_definition" ( + "brain_id" uuid, + "method" character varying(255), + "url" character varying(255), + "params" json, + "search_params" json, + "secrets" json +); + + +create table "public"."api_keys" ( + "key_id" uuid not null default gen_random_uuid(), + "user_id" uuid, + "api_key" text, + "creation_time" timestamp without time zone default CURRENT_TIMESTAMP, + "deleted_time" timestamp without time zone, + "is_active" boolean default true, + "name" text default 'API_KEY'::text, + "days" integer default 30, + "only_chat" boolean default false +); + + +create table "public"."brain_subscription_invitations" ( + "brain_id" uuid not null, + "email" character varying(255) not null, + "rights" character varying(255) +); + + +create table "public"."brains" ( + "brain_id" uuid not null default gen_random_uuid(), + "name" text, + "status" text, + "model" text, + "max_tokens" integer, + "temperature" double precision, + "description" text, + "prompt_id" uuid, + "retrieval_algorithm" text, + "last_update" timestamp without time zone default CURRENT_TIMESTAMP, + "brain_type" brain_type_enum default 'doc'::brain_type_enum +); + + +create table "public"."brains_users" ( + "brain_id" uuid not null, + "rights" character varying(255), + "default_brain" boolean, + "user_id" uuid +); + + +create table "public"."brains_vectors" ( + "brain_id" uuid not null, + "rights" character varying(255), + "file_sha1" text, + "vector_id" uuid +); + + +create table "public"."chat_history" ( + "message_id" uuid not null default uuid_generate_v4(), + "chat_id" uuid not null, + "user_message" text, + "assistant" text, + "message_time" timestamp without time zone default CURRENT_TIMESTAMP, + "brain_id" uuid, + "prompt_id" uuid +); + + +create table "public"."chats" ( + "chat_id" uuid not null default uuid_generate_v4(), + "user_id" uuid, + "creation_time" timestamp without time zone default CURRENT_TIMESTAMP, + "history" jsonb, + "chat_name" text +); + + +create table "public"."composite_brain_connections" ( + "composite_brain_id" uuid not null, + "connected_brain_id" uuid not null +); + + +create table "public"."knowledge" ( + "id" uuid not null default gen_random_uuid(), + "file_name" text, + "url" text, + "brain_id" uuid not null, + "extension" text not null +); + + +create table "public"."knowledge_vectors" ( + "knowledge_id" uuid not null, + "vector_id" uuid not null, + "embedding_model" text not null +); + + +create table "public"."migrations" ( + "name" character varying(255) not null, + "executed_at" timestamp with time zone default CURRENT_TIMESTAMP +); + + +create table "public"."notifications" ( + "id" uuid not null default gen_random_uuid(), + "datetime" timestamp without time zone default CURRENT_TIMESTAMP, + "chat_id" uuid, + "message" text, + "action" character varying(255) not null, + "status" character varying(255) not null +); + + +create table "public"."onboardings" ( + "user_id" uuid not null, + "onboarding_a" boolean not null default true, + "onboarding_b1" boolean not null default true, + "onboarding_b2" boolean not null default true, + "onboarding_b3" boolean not null default true, + "creation_time" timestamp without time zone default CURRENT_TIMESTAMP +); + + +create table "public"."prompts" ( + "id" uuid not null default uuid_generate_v4(), + "title" character varying(255), + "content" text, + "status" character varying(255) default 'private'::character varying +); + + +create table "public"."stats" ( + "time" timestamp without time zone, + "chat" boolean, + "embedding" boolean, + "details" text, + "metadata" jsonb, + "id" integer generated always as identity not null +); + + +create table "public"."summaries" ( + "id" bigint not null default nextval('summaries_id_seq'::regclass), + "content" text, + "metadata" jsonb, + "embedding" vector(1536), + "document_id" uuid +); + + +create table "public"."user_daily_usage" ( + "user_id" uuid not null, + "email" text, + "date" text not null, + "daily_requests_count" integer +); + + +create table "public"."user_identity" ( + "user_id" uuid not null, + "openai_api_key" character varying(255) +); + + +create table "public"."user_settings" ( + "user_id" uuid not null, + "models" jsonb default '["gpt-3.5-turbo-1106"]'::jsonb, + "daily_chat_credit" integer default 20, + "max_brains" integer default 3, + "max_brain_size" integer default 1000000 +); + + +create table "public"."users" ( + "id" uuid not null, + "email" text +); + + +create table "public"."users_old" ( + "user_id" uuid, + "email" text, + "date" text, + "requests_count" integer, + "supabase_id" uuid +); + + +create table "public"."vectors" ( + "id" uuid not null default uuid_generate_v4(), + "content" text, + "file_sha1" text, + "metadata" jsonb, + "embedding" vector(1536) +); + + +create table "public"."vectors_old" ( + "id" bigint not null default nextval('vectors_id_seq'::regclass), + "content" text, + "metadata" jsonb, + "embedding" vector(1536) +); + + +alter sequence "public"."summaries_id_seq" owned by "public"."summaries"."id"; + +alter sequence "public"."vectors_id_seq" owned by "public"."vectors_old"."id"; + +CREATE UNIQUE INDEX api_keys_api_key_key ON public.api_keys USING btree (api_key); + +CREATE UNIQUE INDEX api_keys_pkey ON public.api_keys USING btree (key_id); + +CREATE UNIQUE INDEX brain_subscription_invitations_pkey ON public.brain_subscription_invitations USING btree (brain_id, email); + +CREATE UNIQUE INDEX brains_pkey ON public.brains USING btree (brain_id); + +CREATE UNIQUE INDEX chat_history_pkey ON public.chat_history USING btree (chat_id, message_id); + +CREATE UNIQUE INDEX chats_pkey ON public.chats USING btree (chat_id); + +CREATE UNIQUE INDEX composite_brain_connections_pkey ON public.composite_brain_connections USING btree (composite_brain_id, connected_brain_id); + +CREATE UNIQUE INDEX knowledge_pkey ON public.knowledge USING btree (id); + +CREATE UNIQUE INDEX knowledge_vectors_pkey ON public.knowledge_vectors USING btree (knowledge_id, vector_id, embedding_model); + +CREATE UNIQUE INDEX migrations_pkey ON public.migrations USING btree (name); + +CREATE UNIQUE INDEX notifications_pkey ON public.notifications USING btree (id); + +CREATE UNIQUE INDEX onboardings_pkey ON public.onboardings USING btree (user_id); + +CREATE UNIQUE INDEX prompts_pkey ON public.prompts USING btree (id); + +CREATE UNIQUE INDEX stats_pkey ON public.stats USING btree (id); + +CREATE UNIQUE INDEX summaries_pkey ON public.summaries USING btree (id); + +CREATE UNIQUE INDEX user_daily_usage_pkey ON public.user_daily_usage USING btree (user_id, date); + +CREATE UNIQUE INDEX user_identity_pkey ON public.user_identity USING btree (user_id); + +CREATE UNIQUE INDEX user_settings_pkey ON public.user_settings USING btree (user_id); + +CREATE UNIQUE INDEX users_pkey ON public.users USING btree (id); + +CREATE UNIQUE INDEX vectors_pkey ON public.vectors_old USING btree (id); + +CREATE UNIQUE INDEX vectors_pkey1 ON public.vectors USING btree (id); + +alter table "public"."api_keys" add constraint "api_keys_pkey" PRIMARY KEY using index "api_keys_pkey"; + +alter table "public"."brain_subscription_invitations" add constraint "brain_subscription_invitations_pkey" PRIMARY KEY using index "brain_subscription_invitations_pkey"; + +alter table "public"."brains" add constraint "brains_pkey" PRIMARY KEY using index "brains_pkey"; + +alter table "public"."chat_history" add constraint "chat_history_pkey" PRIMARY KEY using index "chat_history_pkey"; + +alter table "public"."chats" add constraint "chats_pkey" PRIMARY KEY using index "chats_pkey"; + +alter table "public"."composite_brain_connections" add constraint "composite_brain_connections_pkey" PRIMARY KEY using index "composite_brain_connections_pkey"; + +alter table "public"."knowledge" add constraint "knowledge_pkey" PRIMARY KEY using index "knowledge_pkey"; + +alter table "public"."knowledge_vectors" add constraint "knowledge_vectors_pkey" PRIMARY KEY using index "knowledge_vectors_pkey"; + +alter table "public"."migrations" add constraint "migrations_pkey" PRIMARY KEY using index "migrations_pkey"; + +alter table "public"."notifications" add constraint "notifications_pkey" PRIMARY KEY using index "notifications_pkey"; + +alter table "public"."onboardings" add constraint "onboardings_pkey" PRIMARY KEY using index "onboardings_pkey"; + +alter table "public"."prompts" add constraint "prompts_pkey" PRIMARY KEY using index "prompts_pkey"; + +alter table "public"."stats" add constraint "stats_pkey" PRIMARY KEY using index "stats_pkey"; + +alter table "public"."summaries" add constraint "summaries_pkey" PRIMARY KEY using index "summaries_pkey"; + +alter table "public"."user_daily_usage" add constraint "user_daily_usage_pkey" PRIMARY KEY using index "user_daily_usage_pkey"; + +alter table "public"."user_identity" add constraint "user_identity_pkey" PRIMARY KEY using index "user_identity_pkey"; + +alter table "public"."user_settings" add constraint "user_settings_pkey" PRIMARY KEY using index "user_settings_pkey"; + +alter table "public"."users" add constraint "users_pkey" PRIMARY KEY using index "users_pkey"; + +alter table "public"."vectors" add constraint "vectors_pkey1" PRIMARY KEY using index "vectors_pkey1"; + +alter table "public"."vectors_old" add constraint "vectors_pkey" PRIMARY KEY using index "vectors_pkey"; + +alter table "public"."api_brain_definition" add constraint "api_brain_definition_brain_id_fkey" FOREIGN KEY (brain_id) REFERENCES brains(brain_id) not valid; + +alter table "public"."api_brain_definition" validate constraint "api_brain_definition_brain_id_fkey"; + +alter table "public"."api_brain_definition" add constraint "api_brain_definition_method_check" CHECK (((method)::text = ANY ((ARRAY['GET'::character varying, 'POST'::character varying, 'PUT'::character varying, 'DELETE'::character varying])::text[]))) not valid; + +alter table "public"."api_brain_definition" validate constraint "api_brain_definition_method_check"; + +alter table "public"."api_keys" add constraint "api_keys_api_key_key" UNIQUE using index "api_keys_api_key_key"; + +alter table "public"."brain_subscription_invitations" add constraint "brain_subscription_invitations_brain_id_fkey" FOREIGN KEY (brain_id) REFERENCES brains(brain_id) not valid; + +alter table "public"."brain_subscription_invitations" validate constraint "brain_subscription_invitations_brain_id_fkey"; + +alter table "public"."brains" add constraint "brains_prompt_id_fkey" FOREIGN KEY (prompt_id) REFERENCES prompts(id) not valid; + +alter table "public"."brains" validate constraint "brains_prompt_id_fkey"; + +alter table "public"."brains_users" add constraint "brains_users_brain_id_fkey" FOREIGN KEY (brain_id) REFERENCES brains(brain_id) not valid; + +alter table "public"."brains_users" validate constraint "brains_users_brain_id_fkey"; + +alter table "public"."brains_users" add constraint "brains_users_user_id_fkey" FOREIGN KEY (user_id) REFERENCES auth.users(id) not valid; + +alter table "public"."brains_users" validate constraint "brains_users_user_id_fkey"; + +alter table "public"."brains_vectors" add constraint "brains_vectors_brain_id_fkey" FOREIGN KEY (brain_id) REFERENCES brains(brain_id) not valid; + +alter table "public"."brains_vectors" validate constraint "brains_vectors_brain_id_fkey"; + +alter table "public"."chat_history" add constraint "chat_history_brain_id_fkey" FOREIGN KEY (brain_id) REFERENCES brains(brain_id) not valid; + +alter table "public"."chat_history" validate constraint "chat_history_brain_id_fkey"; + +alter table "public"."chat_history" add constraint "chat_history_chat_id_fkey" FOREIGN KEY (chat_id) REFERENCES chats(chat_id) not valid; + +alter table "public"."chat_history" validate constraint "chat_history_chat_id_fkey"; + +alter table "public"."chat_history" add constraint "chat_history_prompt_id_fkey" FOREIGN KEY (prompt_id) REFERENCES prompts(id) not valid; + +alter table "public"."chat_history" validate constraint "chat_history_prompt_id_fkey"; + +alter table "public"."chats" add constraint "chats_user_id_fkey" FOREIGN KEY (user_id) REFERENCES auth.users(id) not valid; + +alter table "public"."chats" validate constraint "chats_user_id_fkey"; + +alter table "public"."composite_brain_connections" add constraint "composite_brain_connections_check" CHECK ((composite_brain_id <> connected_brain_id)) not valid; + +alter table "public"."composite_brain_connections" validate constraint "composite_brain_connections_check"; + +alter table "public"."composite_brain_connections" add constraint "composite_brain_connections_composite_brain_id_fkey" FOREIGN KEY (composite_brain_id) REFERENCES brains(brain_id) not valid; + +alter table "public"."composite_brain_connections" validate constraint "composite_brain_connections_composite_brain_id_fkey"; + +alter table "public"."composite_brain_connections" add constraint "composite_brain_connections_connected_brain_id_fkey" FOREIGN KEY (connected_brain_id) REFERENCES brains(brain_id) not valid; + +alter table "public"."composite_brain_connections" validate constraint "composite_brain_connections_connected_brain_id_fkey"; + +alter table "public"."knowledge" add constraint "knowledge_brain_id_fkey" FOREIGN KEY (brain_id) REFERENCES brains(brain_id) not valid; + +alter table "public"."knowledge" validate constraint "knowledge_brain_id_fkey"; + +alter table "public"."knowledge" add constraint "knowledge_check" CHECK ((((file_name IS NOT NULL) AND (url IS NULL)) OR ((file_name IS NULL) AND (url IS NOT NULL)))) not valid; + +alter table "public"."knowledge" validate constraint "knowledge_check"; + +alter table "public"."knowledge_vectors" add constraint "knowledge_vectors_knowledge_id_fkey" FOREIGN KEY (knowledge_id) REFERENCES knowledge(id) not valid; + +alter table "public"."knowledge_vectors" validate constraint "knowledge_vectors_knowledge_id_fkey"; + +alter table "public"."notifications" add constraint "notifications_chat_id_fkey" FOREIGN KEY (chat_id) REFERENCES chats(chat_id) not valid; + +alter table "public"."notifications" validate constraint "notifications_chat_id_fkey"; + +alter table "public"."onboardings" add constraint "onboardings_user_id_fkey" FOREIGN KEY (user_id) REFERENCES auth.users(id) not valid; + +alter table "public"."onboardings" validate constraint "onboardings_user_id_fkey"; + +alter table "public"."user_daily_usage" add constraint "user_daily_usage_user_id_fkey" FOREIGN KEY (user_id) REFERENCES auth.users(id) not valid; + +alter table "public"."user_daily_usage" validate constraint "user_daily_usage_user_id_fkey"; + +alter table "public"."users" add constraint "users_id_fkey" FOREIGN KEY (id) REFERENCES auth.users(id) not valid; + +alter table "public"."users" validate constraint "users_id_fkey"; + +set check_function_bodies = off; + +CREATE OR REPLACE FUNCTION public.create_user_onboarding() + RETURNS trigger + LANGUAGE plpgsql + SECURITY DEFINER +AS $function$ +BEGIN + INSERT INTO public.onboardings (user_id) + VALUES (NEW.id); + RETURN NEW; +END; +$function$ +; + +CREATE OR REPLACE FUNCTION public.delete_secret(secret_name text) + RETURNS text + LANGUAGE plpgsql + SECURITY DEFINER + SET search_path TO 'public' +AS $function$ +declare + deleted_rows int; +begin + delete from vault.decrypted_secrets where name = secret_name; + get diagnostics deleted_rows = row_count; + if deleted_rows = 0 then + return false; + else + return true; + end if; +end; +$function$ +; + +CREATE OR REPLACE FUNCTION public.get_premium_user(input_email text) + RETURNS TABLE(email text) + LANGUAGE plpgsql +AS $function$ +BEGIN +RETURN QUERY +SELECT c.email +FROM stripe.customers c +WHERE c.email = input_email; +END; +$function$ +; + +CREATE OR REPLACE FUNCTION public.get_user_email_by_user_id(user_id uuid) + RETURNS TABLE(email text) + LANGUAGE plpgsql + SECURITY DEFINER +AS $function$ +BEGIN + RETURN QUERY SELECT au.email::text FROM auth.users au WHERE au.id = user_id; +END; +$function$ +; + +CREATE OR REPLACE FUNCTION public.handle_new_user() + RETURNS trigger + LANGUAGE plpgsql + SECURITY DEFINER +AS $function$ +BEGIN + INSERT INTO public.users (id, email) + VALUES (NEW.id, NEW.email); + RETURN NEW; +END; +$function$ +; + +CREATE OR REPLACE FUNCTION public.insert_secret(name text, secret text) + RETURNS uuid + LANGUAGE plpgsql + SECURITY DEFINER + SET search_path TO 'public' +AS $function$ +begin + return vault.create_secret(secret, name); +end; +$function$ +; + +CREATE OR REPLACE FUNCTION public.match_vectors(query_embedding vector, match_count integer, p_brain_id uuid) + RETURNS TABLE(id uuid, brain_id uuid, content text, metadata jsonb, embedding vector, similarity double precision) + LANGUAGE plpgsql +AS $function$ +#variable_conflict use_column +BEGIN + RETURN QUERY + SELECT + vectors.id, + brains_vectors.brain_id, + vectors.content, + vectors.metadata, + vectors.embedding, + 1 - (vectors.embedding <=> query_embedding) AS similarity + FROM + vectors + INNER JOIN + brains_vectors ON vectors.id = brains_vectors.vector_id + WHERE brains_vectors.brain_id = p_brain_id + ORDER BY + vectors.embedding <=> query_embedding + LIMIT match_count; +END; +$function$ +; + +CREATE OR REPLACE FUNCTION public.read_secret(secret_name text) + RETURNS text + LANGUAGE plpgsql + SECURITY DEFINER + SET search_path TO 'public' +AS $function$ +declare + secret text; +begin + select decrypted_secret from vault.decrypted_secrets where name = + secret_name into secret; + return secret; +end; +$function$ +; + +CREATE OR REPLACE FUNCTION public.update_max_brains() + RETURNS trigger + LANGUAGE plpgsql + SECURITY DEFINER +AS $function$ +DECLARE + userEmail TEXT; +BEGIN + SELECT email INTO userEmail FROM auth.users WHERE id = NEW.user_id; + + IF userEmail LIKE '%@theodo.fr' THEN + -- Ensure the models column is initialized as an array if null + IF NEW.models IS NULL THEN + NEW.models := '[]'::jsonb; + END IF; + + -- Add gpt-4 if not present + IF NOT NEW.models ? 'gpt-4' THEN + NEW.models := NEW.models || '["gpt-4"]'::jsonb; + END IF; + + -- Add gpt-3.5-turbo if not present + IF NOT NEW.models ? 'gpt-3.5-turbo' THEN + NEW.models := NEW.models || '["gpt-3.5-turbo"]'::jsonb; + END IF; + + -- Add gpt-3.5-turbo-16k if not present + IF NOT NEW.models ? 'gpt-3.5-turbo-16k' THEN + NEW.models := NEW.models || '["gpt-3.5-turbo-16k"]'::jsonb; + END IF; + + UPDATE user_settings + SET + max_brains = 30, + max_brain_size = 10000000, + models = NEW.models + WHERE user_id = NEW.user_id; + END IF; + + RETURN NULL; -- for AFTER triggers, the return value is ignored +END; +$function$ +; + +CREATE OR REPLACE FUNCTION public.update_max_brains_theodo() + RETURNS trigger + LANGUAGE plpgsql + SECURITY DEFINER +AS $function$ +DECLARE + userEmail TEXT; + allowedDomains TEXT[] := ARRAY['@theodo.fr', '@theodo.com', '@theodo.co.uk', '@bam.tech', '@padok.fr', '@sicara.fr', '@hokla.com', '@sipios.com']; +BEGIN + SELECT email INTO userEmail FROM auth.users WHERE id = NEW.user_id; + + IF userEmail LIKE ANY(allowedDomains) THEN + -- Ensure the models column is initialized as an array if null + IF NEW.models IS NULL THEN + NEW.models := '[]'::jsonb; + END IF; + + -- Add gpt-4 if not present + IF NOT NEW.models ? 'gpt-4' THEN + NEW.models := NEW.models || '["gpt-4"]'::jsonb; + END IF; + + -- Add gpt-3.5-turbo if not present + IF NOT NEW.models ? 'gpt-3.5-turbo' THEN + NEW.models := NEW.models || '["gpt-3.5-turbo"]'::jsonb; + END IF; + + -- Add gpt-3.5-turbo-16k if not present + IF NOT NEW.models ? 'gpt-3.5-turbo-16k' THEN + NEW.models := NEW.models || '["gpt-3.5-turbo-16k"]'::jsonb; + END IF; + + UPDATE user_settings + SET + max_brains = 30, + max_brain_size = 100000000, + + models = NEW.models + WHERE user_id = NEW.user_id; + END IF; + + RETURN NULL; -- for AFTER triggers, the return value is ignored +END; +$function$ +; + +CREATE OR REPLACE FUNCTION public.update_user_settings() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ +BEGIN + IF NEW.email LIKE '%@theodo.fr' THEN + -- This checks if the models key is present and is of type jsonb array, + -- if not it initializes it with an empty array. + IF NEW.models IS NULL OR NOT jsonb_typeof(NEW.models) = 'array' THEN + NEW.models := '[]'::jsonb; + END IF; + + -- Append new values to the JSONB array. + -- This does not check for duplicates, so you might get repeated values. + NEW.models := NEW.models || '["gpt-4", "gpt-3.5-turbo"]'::jsonb; + END IF; + RETURN NEW; +END; +$function$ +; + +grant delete on table "public"."api_brain_definition" to "anon"; + +grant insert on table "public"."api_brain_definition" to "anon"; + +grant references on table "public"."api_brain_definition" to "anon"; + +grant select on table "public"."api_brain_definition" to "anon"; + +grant trigger on table "public"."api_brain_definition" to "anon"; + +grant truncate on table "public"."api_brain_definition" to "anon"; + +grant update on table "public"."api_brain_definition" to "anon"; + +grant delete on table "public"."api_brain_definition" to "authenticated"; + +grant insert on table "public"."api_brain_definition" to "authenticated"; + +grant references on table "public"."api_brain_definition" to "authenticated"; + +grant select on table "public"."api_brain_definition" to "authenticated"; + +grant trigger on table "public"."api_brain_definition" to "authenticated"; + +grant truncate on table "public"."api_brain_definition" to "authenticated"; + +grant update on table "public"."api_brain_definition" to "authenticated"; + +grant delete on table "public"."api_brain_definition" to "service_role"; + +grant insert on table "public"."api_brain_definition" to "service_role"; + +grant references on table "public"."api_brain_definition" to "service_role"; + +grant select on table "public"."api_brain_definition" to "service_role"; + +grant trigger on table "public"."api_brain_definition" to "service_role"; + +grant truncate on table "public"."api_brain_definition" to "service_role"; + +grant update on table "public"."api_brain_definition" to "service_role"; + +grant delete on table "public"."api_keys" to "anon"; + +grant insert on table "public"."api_keys" to "anon"; + +grant references on table "public"."api_keys" to "anon"; + +grant select on table "public"."api_keys" to "anon"; + +grant trigger on table "public"."api_keys" to "anon"; + +grant truncate on table "public"."api_keys" to "anon"; + +grant update on table "public"."api_keys" to "anon"; + +grant delete on table "public"."api_keys" to "authenticated"; + +grant insert on table "public"."api_keys" to "authenticated"; + +grant references on table "public"."api_keys" to "authenticated"; + +grant select on table "public"."api_keys" to "authenticated"; + +grant trigger on table "public"."api_keys" to "authenticated"; + +grant truncate on table "public"."api_keys" to "authenticated"; + +grant update on table "public"."api_keys" to "authenticated"; + +grant delete on table "public"."api_keys" to "service_role"; + +grant insert on table "public"."api_keys" to "service_role"; + +grant references on table "public"."api_keys" to "service_role"; + +grant select on table "public"."api_keys" to "service_role"; + +grant trigger on table "public"."api_keys" to "service_role"; + +grant truncate on table "public"."api_keys" to "service_role"; + +grant update on table "public"."api_keys" to "service_role"; + +grant delete on table "public"."brain_subscription_invitations" to "anon"; + +grant insert on table "public"."brain_subscription_invitations" to "anon"; + +grant references on table "public"."brain_subscription_invitations" to "anon"; + +grant select on table "public"."brain_subscription_invitations" to "anon"; + +grant trigger on table "public"."brain_subscription_invitations" to "anon"; + +grant truncate on table "public"."brain_subscription_invitations" to "anon"; + +grant update on table "public"."brain_subscription_invitations" to "anon"; + +grant delete on table "public"."brain_subscription_invitations" to "authenticated"; + +grant insert on table "public"."brain_subscription_invitations" to "authenticated"; + +grant references on table "public"."brain_subscription_invitations" to "authenticated"; + +grant select on table "public"."brain_subscription_invitations" to "authenticated"; + +grant trigger on table "public"."brain_subscription_invitations" to "authenticated"; + +grant truncate on table "public"."brain_subscription_invitations" to "authenticated"; + +grant update on table "public"."brain_subscription_invitations" to "authenticated"; + +grant delete on table "public"."brain_subscription_invitations" to "service_role"; + +grant insert on table "public"."brain_subscription_invitations" to "service_role"; + +grant references on table "public"."brain_subscription_invitations" to "service_role"; + +grant select on table "public"."brain_subscription_invitations" to "service_role"; + +grant trigger on table "public"."brain_subscription_invitations" to "service_role"; + +grant truncate on table "public"."brain_subscription_invitations" to "service_role"; + +grant update on table "public"."brain_subscription_invitations" to "service_role"; + +grant delete on table "public"."brains" to "anon"; + +grant insert on table "public"."brains" to "anon"; + +grant references on table "public"."brains" to "anon"; + +grant select on table "public"."brains" to "anon"; + +grant trigger on table "public"."brains" to "anon"; + +grant truncate on table "public"."brains" to "anon"; + +grant update on table "public"."brains" to "anon"; + +grant delete on table "public"."brains" to "authenticated"; + +grant insert on table "public"."brains" to "authenticated"; + +grant references on table "public"."brains" to "authenticated"; + +grant select on table "public"."brains" to "authenticated"; + +grant trigger on table "public"."brains" to "authenticated"; + +grant truncate on table "public"."brains" to "authenticated"; + +grant update on table "public"."brains" to "authenticated"; + +grant delete on table "public"."brains" to "service_role"; + +grant insert on table "public"."brains" to "service_role"; + +grant references on table "public"."brains" to "service_role"; + +grant select on table "public"."brains" to "service_role"; + +grant trigger on table "public"."brains" to "service_role"; + +grant truncate on table "public"."brains" to "service_role"; + +grant update on table "public"."brains" to "service_role"; + +grant delete on table "public"."brains_users" to "anon"; + +grant insert on table "public"."brains_users" to "anon"; + +grant references on table "public"."brains_users" to "anon"; + +grant select on table "public"."brains_users" to "anon"; + +grant trigger on table "public"."brains_users" to "anon"; + +grant truncate on table "public"."brains_users" to "anon"; + +grant update on table "public"."brains_users" to "anon"; + +grant delete on table "public"."brains_users" to "authenticated"; + +grant insert on table "public"."brains_users" to "authenticated"; + +grant references on table "public"."brains_users" to "authenticated"; + +grant select on table "public"."brains_users" to "authenticated"; + +grant trigger on table "public"."brains_users" to "authenticated"; + +grant truncate on table "public"."brains_users" to "authenticated"; + +grant update on table "public"."brains_users" to "authenticated"; + +grant delete on table "public"."brains_users" to "service_role"; + +grant insert on table "public"."brains_users" to "service_role"; + +grant references on table "public"."brains_users" to "service_role"; + +grant select on table "public"."brains_users" to "service_role"; + +grant trigger on table "public"."brains_users" to "service_role"; + +grant truncate on table "public"."brains_users" to "service_role"; + +grant update on table "public"."brains_users" to "service_role"; + +grant delete on table "public"."brains_vectors" to "anon"; + +grant insert on table "public"."brains_vectors" to "anon"; + +grant references on table "public"."brains_vectors" to "anon"; + +grant select on table "public"."brains_vectors" to "anon"; + +grant trigger on table "public"."brains_vectors" to "anon"; + +grant truncate on table "public"."brains_vectors" to "anon"; + +grant update on table "public"."brains_vectors" to "anon"; + +grant delete on table "public"."brains_vectors" to "authenticated"; + +grant insert on table "public"."brains_vectors" to "authenticated"; + +grant references on table "public"."brains_vectors" to "authenticated"; + +grant select on table "public"."brains_vectors" to "authenticated"; + +grant trigger on table "public"."brains_vectors" to "authenticated"; + +grant truncate on table "public"."brains_vectors" to "authenticated"; + +grant update on table "public"."brains_vectors" to "authenticated"; + +grant delete on table "public"."brains_vectors" to "service_role"; + +grant insert on table "public"."brains_vectors" to "service_role"; + +grant references on table "public"."brains_vectors" to "service_role"; + +grant select on table "public"."brains_vectors" to "service_role"; + +grant trigger on table "public"."brains_vectors" to "service_role"; + +grant truncate on table "public"."brains_vectors" to "service_role"; + +grant update on table "public"."brains_vectors" to "service_role"; + +grant delete on table "public"."chat_history" to "anon"; + +grant insert on table "public"."chat_history" to "anon"; + +grant references on table "public"."chat_history" to "anon"; + +grant select on table "public"."chat_history" to "anon"; + +grant trigger on table "public"."chat_history" to "anon"; + +grant truncate on table "public"."chat_history" to "anon"; + +grant update on table "public"."chat_history" to "anon"; + +grant delete on table "public"."chat_history" to "authenticated"; + +grant insert on table "public"."chat_history" to "authenticated"; + +grant references on table "public"."chat_history" to "authenticated"; + +grant select on table "public"."chat_history" to "authenticated"; + +grant trigger on table "public"."chat_history" to "authenticated"; + +grant truncate on table "public"."chat_history" to "authenticated"; + +grant update on table "public"."chat_history" to "authenticated"; + +grant delete on table "public"."chat_history" to "service_role"; + +grant insert on table "public"."chat_history" to "service_role"; + +grant references on table "public"."chat_history" to "service_role"; + +grant select on table "public"."chat_history" to "service_role"; + +grant trigger on table "public"."chat_history" to "service_role"; + +grant truncate on table "public"."chat_history" to "service_role"; + +grant update on table "public"."chat_history" to "service_role"; + +grant delete on table "public"."chats" to "anon"; + +grant insert on table "public"."chats" to "anon"; + +grant references on table "public"."chats" to "anon"; + +grant select on table "public"."chats" to "anon"; + +grant trigger on table "public"."chats" to "anon"; + +grant truncate on table "public"."chats" to "anon"; + +grant update on table "public"."chats" to "anon"; + +grant delete on table "public"."chats" to "authenticated"; + +grant insert on table "public"."chats" to "authenticated"; + +grant references on table "public"."chats" to "authenticated"; + +grant select on table "public"."chats" to "authenticated"; + +grant trigger on table "public"."chats" to "authenticated"; + +grant truncate on table "public"."chats" to "authenticated"; + +grant update on table "public"."chats" to "authenticated"; + +grant delete on table "public"."chats" to "service_role"; + +grant insert on table "public"."chats" to "service_role"; + +grant references on table "public"."chats" to "service_role"; + +grant select on table "public"."chats" to "service_role"; + +grant trigger on table "public"."chats" to "service_role"; + +grant truncate on table "public"."chats" to "service_role"; + +grant update on table "public"."chats" to "service_role"; + +grant delete on table "public"."composite_brain_connections" to "anon"; + +grant insert on table "public"."composite_brain_connections" to "anon"; + +grant references on table "public"."composite_brain_connections" to "anon"; + +grant select on table "public"."composite_brain_connections" to "anon"; + +grant trigger on table "public"."composite_brain_connections" to "anon"; + +grant truncate on table "public"."composite_brain_connections" to "anon"; + +grant update on table "public"."composite_brain_connections" to "anon"; + +grant delete on table "public"."composite_brain_connections" to "authenticated"; + +grant insert on table "public"."composite_brain_connections" to "authenticated"; + +grant references on table "public"."composite_brain_connections" to "authenticated"; + +grant select on table "public"."composite_brain_connections" to "authenticated"; + +grant trigger on table "public"."composite_brain_connections" to "authenticated"; + +grant truncate on table "public"."composite_brain_connections" to "authenticated"; + +grant update on table "public"."composite_brain_connections" to "authenticated"; + +grant delete on table "public"."composite_brain_connections" to "service_role"; + +grant insert on table "public"."composite_brain_connections" to "service_role"; + +grant references on table "public"."composite_brain_connections" to "service_role"; + +grant select on table "public"."composite_brain_connections" to "service_role"; + +grant trigger on table "public"."composite_brain_connections" to "service_role"; + +grant truncate on table "public"."composite_brain_connections" to "service_role"; + +grant update on table "public"."composite_brain_connections" to "service_role"; + +grant delete on table "public"."knowledge" to "anon"; + +grant insert on table "public"."knowledge" to "anon"; + +grant references on table "public"."knowledge" to "anon"; + +grant select on table "public"."knowledge" to "anon"; + +grant trigger on table "public"."knowledge" to "anon"; + +grant truncate on table "public"."knowledge" to "anon"; + +grant update on table "public"."knowledge" to "anon"; + +grant delete on table "public"."knowledge" to "authenticated"; + +grant insert on table "public"."knowledge" to "authenticated"; + +grant references on table "public"."knowledge" to "authenticated"; + +grant select on table "public"."knowledge" to "authenticated"; + +grant trigger on table "public"."knowledge" to "authenticated"; + +grant truncate on table "public"."knowledge" to "authenticated"; + +grant update on table "public"."knowledge" to "authenticated"; + +grant delete on table "public"."knowledge" to "service_role"; + +grant insert on table "public"."knowledge" to "service_role"; + +grant references on table "public"."knowledge" to "service_role"; + +grant select on table "public"."knowledge" to "service_role"; + +grant trigger on table "public"."knowledge" to "service_role"; + +grant truncate on table "public"."knowledge" to "service_role"; + +grant update on table "public"."knowledge" to "service_role"; + +grant delete on table "public"."knowledge_vectors" to "anon"; + +grant insert on table "public"."knowledge_vectors" to "anon"; + +grant references on table "public"."knowledge_vectors" to "anon"; + +grant select on table "public"."knowledge_vectors" to "anon"; + +grant trigger on table "public"."knowledge_vectors" to "anon"; + +grant truncate on table "public"."knowledge_vectors" to "anon"; + +grant update on table "public"."knowledge_vectors" to "anon"; + +grant delete on table "public"."knowledge_vectors" to "authenticated"; + +grant insert on table "public"."knowledge_vectors" to "authenticated"; + +grant references on table "public"."knowledge_vectors" to "authenticated"; + +grant select on table "public"."knowledge_vectors" to "authenticated"; + +grant trigger on table "public"."knowledge_vectors" to "authenticated"; + +grant truncate on table "public"."knowledge_vectors" to "authenticated"; + +grant update on table "public"."knowledge_vectors" to "authenticated"; + +grant delete on table "public"."knowledge_vectors" to "service_role"; + +grant insert on table "public"."knowledge_vectors" to "service_role"; + +grant references on table "public"."knowledge_vectors" to "service_role"; + +grant select on table "public"."knowledge_vectors" to "service_role"; + +grant trigger on table "public"."knowledge_vectors" to "service_role"; + +grant truncate on table "public"."knowledge_vectors" to "service_role"; + +grant update on table "public"."knowledge_vectors" to "service_role"; + +grant delete on table "public"."migrations" to "anon"; + +grant insert on table "public"."migrations" to "anon"; + +grant references on table "public"."migrations" to "anon"; + +grant select on table "public"."migrations" to "anon"; + +grant trigger on table "public"."migrations" to "anon"; + +grant truncate on table "public"."migrations" to "anon"; + +grant update on table "public"."migrations" to "anon"; + +grant delete on table "public"."migrations" to "authenticated"; + +grant insert on table "public"."migrations" to "authenticated"; + +grant references on table "public"."migrations" to "authenticated"; + +grant select on table "public"."migrations" to "authenticated"; + +grant trigger on table "public"."migrations" to "authenticated"; + +grant truncate on table "public"."migrations" to "authenticated"; + +grant update on table "public"."migrations" to "authenticated"; + +grant delete on table "public"."migrations" to "service_role"; + +grant insert on table "public"."migrations" to "service_role"; + +grant references on table "public"."migrations" to "service_role"; + +grant select on table "public"."migrations" to "service_role"; + +grant trigger on table "public"."migrations" to "service_role"; + +grant truncate on table "public"."migrations" to "service_role"; + +grant update on table "public"."migrations" to "service_role"; + +grant delete on table "public"."notifications" to "anon"; + +grant insert on table "public"."notifications" to "anon"; + +grant references on table "public"."notifications" to "anon"; + +grant select on table "public"."notifications" to "anon"; + +grant trigger on table "public"."notifications" to "anon"; + +grant truncate on table "public"."notifications" to "anon"; + +grant update on table "public"."notifications" to "anon"; + +grant delete on table "public"."notifications" to "authenticated"; + +grant insert on table "public"."notifications" to "authenticated"; + +grant references on table "public"."notifications" to "authenticated"; + +grant select on table "public"."notifications" to "authenticated"; + +grant trigger on table "public"."notifications" to "authenticated"; + +grant truncate on table "public"."notifications" to "authenticated"; + +grant update on table "public"."notifications" to "authenticated"; + +grant delete on table "public"."notifications" to "service_role"; + +grant insert on table "public"."notifications" to "service_role"; + +grant references on table "public"."notifications" to "service_role"; + +grant select on table "public"."notifications" to "service_role"; + +grant trigger on table "public"."notifications" to "service_role"; + +grant truncate on table "public"."notifications" to "service_role"; + +grant update on table "public"."notifications" to "service_role"; + +grant delete on table "public"."onboardings" to "anon"; + +grant insert on table "public"."onboardings" to "anon"; + +grant references on table "public"."onboardings" to "anon"; + +grant select on table "public"."onboardings" to "anon"; + +grant trigger on table "public"."onboardings" to "anon"; + +grant truncate on table "public"."onboardings" to "anon"; + +grant update on table "public"."onboardings" to "anon"; + +grant delete on table "public"."onboardings" to "authenticated"; + +grant insert on table "public"."onboardings" to "authenticated"; + +grant references on table "public"."onboardings" to "authenticated"; + +grant select on table "public"."onboardings" to "authenticated"; + +grant trigger on table "public"."onboardings" to "authenticated"; + +grant truncate on table "public"."onboardings" to "authenticated"; + +grant update on table "public"."onboardings" to "authenticated"; + +grant delete on table "public"."onboardings" to "service_role"; + +grant insert on table "public"."onboardings" to "service_role"; + +grant references on table "public"."onboardings" to "service_role"; + +grant select on table "public"."onboardings" to "service_role"; + +grant trigger on table "public"."onboardings" to "service_role"; + +grant truncate on table "public"."onboardings" to "service_role"; + +grant update on table "public"."onboardings" to "service_role"; + +grant delete on table "public"."prompts" to "anon"; + +grant insert on table "public"."prompts" to "anon"; + +grant references on table "public"."prompts" to "anon"; + +grant select on table "public"."prompts" to "anon"; + +grant trigger on table "public"."prompts" to "anon"; + +grant truncate on table "public"."prompts" to "anon"; + +grant update on table "public"."prompts" to "anon"; + +grant delete on table "public"."prompts" to "authenticated"; + +grant insert on table "public"."prompts" to "authenticated"; + +grant references on table "public"."prompts" to "authenticated"; + +grant select on table "public"."prompts" to "authenticated"; + +grant trigger on table "public"."prompts" to "authenticated"; + +grant truncate on table "public"."prompts" to "authenticated"; + +grant update on table "public"."prompts" to "authenticated"; + +grant delete on table "public"."prompts" to "service_role"; + +grant insert on table "public"."prompts" to "service_role"; + +grant references on table "public"."prompts" to "service_role"; + +grant select on table "public"."prompts" to "service_role"; + +grant trigger on table "public"."prompts" to "service_role"; + +grant truncate on table "public"."prompts" to "service_role"; + +grant update on table "public"."prompts" to "service_role"; + +grant delete on table "public"."stats" to "anon"; + +grant insert on table "public"."stats" to "anon"; + +grant references on table "public"."stats" to "anon"; + +grant select on table "public"."stats" to "anon"; + +grant trigger on table "public"."stats" to "anon"; + +grant truncate on table "public"."stats" to "anon"; + +grant update on table "public"."stats" to "anon"; + +grant delete on table "public"."stats" to "authenticated"; + +grant insert on table "public"."stats" to "authenticated"; + +grant references on table "public"."stats" to "authenticated"; + +grant select on table "public"."stats" to "authenticated"; + +grant trigger on table "public"."stats" to "authenticated"; + +grant truncate on table "public"."stats" to "authenticated"; + +grant update on table "public"."stats" to "authenticated"; + +grant delete on table "public"."stats" to "service_role"; + +grant insert on table "public"."stats" to "service_role"; + +grant references on table "public"."stats" to "service_role"; + +grant select on table "public"."stats" to "service_role"; + +grant trigger on table "public"."stats" to "service_role"; + +grant truncate on table "public"."stats" to "service_role"; + +grant update on table "public"."stats" to "service_role"; + +grant delete on table "public"."summaries" to "anon"; + +grant insert on table "public"."summaries" to "anon"; + +grant references on table "public"."summaries" to "anon"; + +grant select on table "public"."summaries" to "anon"; + +grant trigger on table "public"."summaries" to "anon"; + +grant truncate on table "public"."summaries" to "anon"; + +grant update on table "public"."summaries" to "anon"; + +grant delete on table "public"."summaries" to "authenticated"; + +grant insert on table "public"."summaries" to "authenticated"; + +grant references on table "public"."summaries" to "authenticated"; + +grant select on table "public"."summaries" to "authenticated"; + +grant trigger on table "public"."summaries" to "authenticated"; + +grant truncate on table "public"."summaries" to "authenticated"; + +grant update on table "public"."summaries" to "authenticated"; + +grant delete on table "public"."summaries" to "service_role"; + +grant insert on table "public"."summaries" to "service_role"; + +grant references on table "public"."summaries" to "service_role"; + +grant select on table "public"."summaries" to "service_role"; + +grant trigger on table "public"."summaries" to "service_role"; + +grant truncate on table "public"."summaries" to "service_role"; + +grant update on table "public"."summaries" to "service_role"; + +grant delete on table "public"."user_daily_usage" to "anon"; + +grant insert on table "public"."user_daily_usage" to "anon"; + +grant references on table "public"."user_daily_usage" to "anon"; + +grant select on table "public"."user_daily_usage" to "anon"; + +grant trigger on table "public"."user_daily_usage" to "anon"; + +grant truncate on table "public"."user_daily_usage" to "anon"; + +grant update on table "public"."user_daily_usage" to "anon"; + +grant delete on table "public"."user_daily_usage" to "authenticated"; + +grant insert on table "public"."user_daily_usage" to "authenticated"; + +grant references on table "public"."user_daily_usage" to "authenticated"; + +grant select on table "public"."user_daily_usage" to "authenticated"; + +grant trigger on table "public"."user_daily_usage" to "authenticated"; + +grant truncate on table "public"."user_daily_usage" to "authenticated"; + +grant update on table "public"."user_daily_usage" to "authenticated"; + +grant delete on table "public"."user_daily_usage" to "service_role"; + +grant insert on table "public"."user_daily_usage" to "service_role"; + +grant references on table "public"."user_daily_usage" to "service_role"; + +grant select on table "public"."user_daily_usage" to "service_role"; + +grant trigger on table "public"."user_daily_usage" to "service_role"; + +grant truncate on table "public"."user_daily_usage" to "service_role"; + +grant update on table "public"."user_daily_usage" to "service_role"; + +grant delete on table "public"."user_identity" to "anon"; + +grant insert on table "public"."user_identity" to "anon"; + +grant references on table "public"."user_identity" to "anon"; + +grant select on table "public"."user_identity" to "anon"; + +grant trigger on table "public"."user_identity" to "anon"; + +grant truncate on table "public"."user_identity" to "anon"; + +grant update on table "public"."user_identity" to "anon"; + +grant delete on table "public"."user_identity" to "authenticated"; + +grant insert on table "public"."user_identity" to "authenticated"; + +grant references on table "public"."user_identity" to "authenticated"; + +grant select on table "public"."user_identity" to "authenticated"; + +grant trigger on table "public"."user_identity" to "authenticated"; + +grant truncate on table "public"."user_identity" to "authenticated"; + +grant update on table "public"."user_identity" to "authenticated"; + +grant delete on table "public"."user_identity" to "service_role"; + +grant insert on table "public"."user_identity" to "service_role"; + +grant references on table "public"."user_identity" to "service_role"; + +grant select on table "public"."user_identity" to "service_role"; + +grant trigger on table "public"."user_identity" to "service_role"; + +grant truncate on table "public"."user_identity" to "service_role"; + +grant update on table "public"."user_identity" to "service_role"; + +grant delete on table "public"."user_settings" to "anon"; + +grant insert on table "public"."user_settings" to "anon"; + +grant references on table "public"."user_settings" to "anon"; + +grant select on table "public"."user_settings" to "anon"; + +grant trigger on table "public"."user_settings" to "anon"; + +grant truncate on table "public"."user_settings" to "anon"; + +grant update on table "public"."user_settings" to "anon"; + +grant delete on table "public"."user_settings" to "authenticated"; + +grant insert on table "public"."user_settings" to "authenticated"; + +grant references on table "public"."user_settings" to "authenticated"; + +grant select on table "public"."user_settings" to "authenticated"; + +grant trigger on table "public"."user_settings" to "authenticated"; + +grant truncate on table "public"."user_settings" to "authenticated"; + +grant update on table "public"."user_settings" to "authenticated"; + +grant delete on table "public"."user_settings" to "service_role"; + +grant insert on table "public"."user_settings" to "service_role"; + +grant references on table "public"."user_settings" to "service_role"; + +grant select on table "public"."user_settings" to "service_role"; + +grant trigger on table "public"."user_settings" to "service_role"; + +grant truncate on table "public"."user_settings" to "service_role"; + +grant update on table "public"."user_settings" to "service_role"; + +grant delete on table "public"."users" to "anon"; + +grant insert on table "public"."users" to "anon"; + +grant references on table "public"."users" to "anon"; + +grant select on table "public"."users" to "anon"; + +grant trigger on table "public"."users" to "anon"; + +grant truncate on table "public"."users" to "anon"; + +grant update on table "public"."users" to "anon"; + +grant delete on table "public"."users" to "authenticated"; + +grant insert on table "public"."users" to "authenticated"; + +grant references on table "public"."users" to "authenticated"; + +grant select on table "public"."users" to "authenticated"; + +grant trigger on table "public"."users" to "authenticated"; + +grant truncate on table "public"."users" to "authenticated"; + +grant update on table "public"."users" to "authenticated"; + +grant delete on table "public"."users" to "service_role"; + +grant insert on table "public"."users" to "service_role"; + +grant references on table "public"."users" to "service_role"; + +grant select on table "public"."users" to "service_role"; + +grant trigger on table "public"."users" to "service_role"; + +grant truncate on table "public"."users" to "service_role"; + +grant update on table "public"."users" to "service_role"; + +grant delete on table "public"."users_old" to "anon"; + +grant insert on table "public"."users_old" to "anon"; + +grant references on table "public"."users_old" to "anon"; + +grant select on table "public"."users_old" to "anon"; + +grant trigger on table "public"."users_old" to "anon"; + +grant truncate on table "public"."users_old" to "anon"; + +grant update on table "public"."users_old" to "anon"; + +grant delete on table "public"."users_old" to "authenticated"; + +grant insert on table "public"."users_old" to "authenticated"; + +grant references on table "public"."users_old" to "authenticated"; + +grant select on table "public"."users_old" to "authenticated"; + +grant trigger on table "public"."users_old" to "authenticated"; + +grant truncate on table "public"."users_old" to "authenticated"; + +grant update on table "public"."users_old" to "authenticated"; + +grant delete on table "public"."users_old" to "service_role"; + +grant insert on table "public"."users_old" to "service_role"; + +grant references on table "public"."users_old" to "service_role"; + +grant select on table "public"."users_old" to "service_role"; + +grant trigger on table "public"."users_old" to "service_role"; + +grant truncate on table "public"."users_old" to "service_role"; + +grant update on table "public"."users_old" to "service_role"; + +grant delete on table "public"."vectors" to "anon"; + +grant insert on table "public"."vectors" to "anon"; + +grant references on table "public"."vectors" to "anon"; + +grant select on table "public"."vectors" to "anon"; + +grant trigger on table "public"."vectors" to "anon"; + +grant truncate on table "public"."vectors" to "anon"; + +grant update on table "public"."vectors" to "anon"; + +grant delete on table "public"."vectors" to "authenticated"; + +grant insert on table "public"."vectors" to "authenticated"; + +grant references on table "public"."vectors" to "authenticated"; + +grant select on table "public"."vectors" to "authenticated"; + +grant trigger on table "public"."vectors" to "authenticated"; + +grant truncate on table "public"."vectors" to "authenticated"; + +grant update on table "public"."vectors" to "authenticated"; + +grant delete on table "public"."vectors" to "service_role"; + +grant insert on table "public"."vectors" to "service_role"; + +grant references on table "public"."vectors" to "service_role"; + +grant select on table "public"."vectors" to "service_role"; + +grant trigger on table "public"."vectors" to "service_role"; + +grant truncate on table "public"."vectors" to "service_role"; + +grant update on table "public"."vectors" to "service_role"; + +grant delete on table "public"."vectors_old" to "anon"; + +grant insert on table "public"."vectors_old" to "anon"; + +grant references on table "public"."vectors_old" to "anon"; + +grant select on table "public"."vectors_old" to "anon"; + +grant trigger on table "public"."vectors_old" to "anon"; + +grant truncate on table "public"."vectors_old" to "anon"; + +grant update on table "public"."vectors_old" to "anon"; + +grant delete on table "public"."vectors_old" to "authenticated"; + +grant insert on table "public"."vectors_old" to "authenticated"; + +grant references on table "public"."vectors_old" to "authenticated"; + +grant select on table "public"."vectors_old" to "authenticated"; + +grant trigger on table "public"."vectors_old" to "authenticated"; + +grant truncate on table "public"."vectors_old" to "authenticated"; + +grant update on table "public"."vectors_old" to "authenticated"; + +grant delete on table "public"."vectors_old" to "service_role"; + +grant insert on table "public"."vectors_old" to "service_role"; + +grant references on table "public"."vectors_old" to "service_role"; + +grant select on table "public"."vectors_old" to "service_role"; + +grant trigger on table "public"."vectors_old" to "service_role"; + +grant truncate on table "public"."vectors_old" to "service_role"; + +grant update on table "public"."vectors_old" to "service_role"; + + +create schema if not exists "stripe"; + + +-- Create users table +CREATE TABLE IF NOT EXISTS user_daily_usage( + user_id UUID REFERENCES auth.users (id), + email TEXT, + date TEXT, + daily_requests_count INT, + PRIMARY KEY (user_id, date) +); + +-- Create chats table +CREATE TABLE IF NOT EXISTS chats( + chat_id UUID DEFAULT uuid_generate_v4() PRIMARY KEY, + user_id UUID REFERENCES auth.users (id), + creation_time TIMESTAMP DEFAULT current_timestamp, + history JSONB, + chat_name TEXT +); + + +-- Create vector extension +CREATE EXTENSION IF NOT EXISTS vector; + +-- Create vectors table +CREATE TABLE IF NOT EXISTS vectors ( + id UUID DEFAULT uuid_generate_v4() PRIMARY KEY, + content TEXT, + file_sha1 TEXT, + metadata JSONB, + embedding VECTOR(1536) +); + +-- Create function to match vectors +CREATE OR REPLACE FUNCTION match_vectors(query_embedding VECTOR(1536), match_count INT, p_brain_id UUID) +RETURNS TABLE( + id UUID, + brain_id UUID, + content TEXT, + metadata JSONB, + embedding VECTOR(1536), + similarity FLOAT +) LANGUAGE plpgsql AS $$ +#variable_conflict use_column +BEGIN + RETURN QUERY + SELECT + vectors.id, + brains_vectors.brain_id, + vectors.content, + vectors.metadata, + vectors.embedding, + 1 - (vectors.embedding <=> query_embedding) AS similarity + FROM + vectors + INNER JOIN + brains_vectors ON vectors.id = brains_vectors.vector_id + WHERE brains_vectors.brain_id = p_brain_id + ORDER BY + vectors.embedding <=> query_embedding + LIMIT match_count; +END; +$$; + +-- Create stats table +CREATE TABLE IF NOT EXISTS stats ( + time TIMESTAMP, + chat BOOLEAN, + embedding BOOLEAN, + details TEXT, + metadata JSONB, + id INTEGER PRIMARY KEY GENERATED ALWAYS AS IDENTITY +); + +-- Create summaries table +CREATE TABLE IF NOT EXISTS summaries ( + id BIGSERIAL PRIMARY KEY, + document_id UUID REFERENCES vectors(id), + content TEXT, + metadata JSONB, + embedding VECTOR(1536) +); + +-- Create function to match summaries +CREATE OR REPLACE FUNCTION match_summaries(query_embedding VECTOR(1536), match_count INT, match_threshold FLOAT) +RETURNS TABLE( + id BIGINT, + document_id UUID, + content TEXT, + metadata JSONB, + embedding VECTOR(1536), + similarity FLOAT +) LANGUAGE plpgsql AS $$ +#variable_conflict use_column +BEGIN + RETURN QUERY + SELECT + id, + document_id, + content, + metadata, + embedding, + 1 - (summaries.embedding <=> query_embedding) AS similarity + FROM + summaries + WHERE 1 - (summaries.embedding <=> query_embedding) > match_threshold + ORDER BY + summaries.embedding <=> query_embedding + LIMIT match_count; +END; +$$; + +-- Create api_keys table +CREATE TABLE IF NOT EXISTS api_keys( + key_id UUID DEFAULT gen_random_uuid() PRIMARY KEY, + user_id UUID REFERENCES auth.users (id), + name TEXT DEFAULT 'API_KEY', + days INT DEFAULT 30, + only_chat BOOLEAN DEFAULT false, + api_key TEXT UNIQUE, + creation_time TIMESTAMP DEFAULT current_timestamp, + deleted_time TIMESTAMP, + is_active BOOLEAN DEFAULT true +); + +--- Create prompts table +CREATE TABLE IF NOT EXISTS prompts ( + id UUID DEFAULT uuid_generate_v4() PRIMARY KEY, + title VARCHAR(255), + content TEXT, + status VARCHAR(255) DEFAULT 'private' +); + +DO $$ +BEGIN +IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'brain_type_enum') THEN + -- Create the ENUM type 'brain_type' if it doesn't exist + CREATE TYPE brain_type_enum AS ENUM ('doc', 'api', 'composite'); +END IF; +END $$; + +--- Create brains table +CREATE TABLE IF NOT EXISTS brains ( + brain_id UUID DEFAULT gen_random_uuid() PRIMARY KEY, + name TEXT NOT NULL, + status TEXT, + description TEXT, + model TEXT, + max_tokens INT, + temperature FLOAT, + prompt_id UUID REFERENCES prompts(id), + last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + brain_type brain_type_enum DEFAULT 'doc' +); + + +-- Create chat_history table +CREATE TABLE IF NOT EXISTS chat_history ( + message_id UUID DEFAULT uuid_generate_v4(), + chat_id UUID REFERENCES chats(chat_id), + user_message TEXT, + assistant TEXT, + message_time TIMESTAMP DEFAULT current_timestamp, + PRIMARY KEY (chat_id, message_id), + prompt_id UUID REFERENCES prompts(id), + brain_id UUID REFERENCES brains(brain_id) +); + +-- Create notification table + +CREATE TABLE IF NOT EXISTS notifications ( + id UUID DEFAULT gen_random_uuid() PRIMARY KEY, + datetime TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + chat_id UUID REFERENCES chats(chat_id), + message TEXT, + action VARCHAR(255) NOT NULL, + status VARCHAR(255) NOT NULL +); + + +-- Create brains X users table +CREATE TABLE IF NOT EXISTS brains_users ( + brain_id UUID, + user_id UUID, + rights VARCHAR(255), + default_brain BOOLEAN DEFAULT false, + PRIMARY KEY (brain_id, user_id), + FOREIGN KEY (user_id) REFERENCES auth.users (id), + FOREIGN KEY (brain_id) REFERENCES brains (brain_id) +); + +-- Create brains X vectors table +CREATE TABLE IF NOT EXISTS brains_vectors ( + brain_id UUID, + vector_id UUID, + file_sha1 TEXT, + PRIMARY KEY (brain_id, vector_id), + FOREIGN KEY (vector_id) REFERENCES vectors (id), + FOREIGN KEY (brain_id) REFERENCES brains (brain_id) +); + +-- Create brains X vectors table +CREATE TABLE IF NOT EXISTS brain_subscription_invitations ( + brain_id UUID, + email VARCHAR(255), + rights VARCHAR(255), + PRIMARY KEY (brain_id, email), + FOREIGN KEY (brain_id) REFERENCES brains (brain_id) +); + +-- Table for storing the relationship between brains for composite brains +CREATE TABLE IF NOT EXISTS composite_brain_connections ( + composite_brain_id UUID NOT NULL REFERENCES brains(brain_id), + connected_brain_id UUID NOT NULL REFERENCES brains(brain_id), + PRIMARY KEY (composite_brain_id, connected_brain_id), + CHECK (composite_brain_id != connected_brain_id) +); + +--- Create user_identity table +CREATE TABLE IF NOT EXISTS user_identity ( + user_id UUID PRIMARY KEY, + openai_api_key VARCHAR(255) +); + +-- Create the new table with 6 columns +CREATE TABLE IF NOT EXISTS api_brain_definition ( + brain_id UUID REFERENCES brains(brain_id), + method VARCHAR(255) CHECK (method IN ('GET', 'POST', 'PUT', 'DELETE')), + url VARCHAR(255), + params JSON, + search_params JSON, + secrets JSON +); + +CREATE OR REPLACE FUNCTION public.get_user_email_by_user_id(user_id uuid) +RETURNS TABLE (email text) +SECURITY definer +AS $$ +BEGIN + RETURN QUERY SELECT au.email::text FROM auth.users au WHERE au.id = user_id; +END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION public.get_user_id_by_user_email(user_email text) +RETURNS TABLE (user_id uuid) +SECURITY DEFINER +AS $$ +BEGIN + RETURN QUERY SELECT au.id::uuid FROM auth.users au WHERE au.email = user_email; +END; +$$ LANGUAGE plpgsql; + + + + + +CREATE TABLE IF NOT EXISTS user_settings ( + user_id UUID PRIMARY KEY, + models JSONB DEFAULT '["gpt-3.5-turbo-1106","gpt-4"]'::jsonb, + daily_chat_credit INT DEFAULT 300, + max_brains INT DEFAULT 30, + max_brain_size INT DEFAULT 100000000 +); + +-- knowledge table +CREATE TABLE IF NOT EXISTS knowledge ( + id UUID DEFAULT gen_random_uuid() PRIMARY KEY, + file_name TEXT, + url TEXT, + brain_id UUID NOT NULL REFERENCES brains(brain_id), + extension TEXT NOT NULL, + CHECK ((file_name IS NOT NULL AND url IS NULL) OR (file_name IS NULL AND url IS NOT NULL)) +); + + +-- knowledge_vectors table +CREATE TABLE IF NOT EXISTS knowledge_vectors ( + knowledge_id UUID NOT NULL REFERENCES knowledge(id), + vector_id UUID NOT NULL REFERENCES vectors(id), + embedding_model TEXT NOT NULL, + PRIMARY KEY (knowledge_id, vector_id, embedding_model) +); + +-- Create the function to add user_id to the onboardings table +CREATE OR REPLACE FUNCTION public.create_user_onboarding() RETURNS TRIGGER AS $$ +BEGIN + INSERT INTO public.onboardings (user_id) + VALUES (NEW.id); + RETURN NEW; +END; +$$ LANGUAGE plpgsql SECURITY definer; + +-- Revoke all on function handle_new_user_onboarding() from PUBLIC; +REVOKE ALL ON FUNCTION create_user_onboarding() FROM PUBLIC; + +-- Drop the trigger if it exists +DROP TRIGGER IF EXISTS create_user_onboarding_trigger ON auth.users; + +-- Create the trigger on the insert into the auth.users table +CREATE TRIGGER create_user_onboarding_trigger +AFTER INSERT ON auth.users +FOR EACH ROW +EXECUTE FUNCTION public.create_user_onboarding(); + +-- Create the onboarding table +CREATE TABLE IF NOT EXISTS onboardings ( + user_id UUID NOT NULL REFERENCES auth.users (id), + onboarding_a BOOLEAN NOT NULL DEFAULT true, + onboarding_b1 BOOLEAN NOT NULL DEFAULT true, + onboarding_b2 BOOLEAN NOT NULL DEFAULT true, + onboarding_b3 BOOLEAN NOT NULL DEFAULT true, + creation_time TIMESTAMP DEFAULT current_timestamp, + PRIMARY KEY (user_id) +); + + +-- Stripe settings -- +-- Create extension 'wrappers' if it doesn't exist +CREATE EXTENSION IF NOT EXISTS wrappers; + +-- Create foreign data wrapper 'stripe_wrapper' if it doesn't exist +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 + FROM information_schema.foreign_data_wrappers + WHERE foreign_data_wrapper_name = 'stripe_wrapper' + ) THEN + CREATE FOREIGN DATA WRAPPER stripe_wrapper + HANDLER stripe_fdw_handler; + END IF; +END $$; + +-- Check if the server 'stripe_server' exists before creating it +DO $$ +BEGIN + IF NOT EXISTS (SELECT 1 FROM pg_foreign_server WHERE srvname = 'stripe_server') THEN + CREATE SERVER stripe_server + FOREIGN DATA WRAPPER stripe_wrapper + OPTIONS ( + api_key 'sk_test_51NtDTIJglvQxkJ1HVZHZHpKNAm48jAzKfJs93MjpKiML9YHy8G1YoKIf6SpcnGwRFWjmdS664A2Z2dn4LORWpo1P00qt6Jmy8G' -- Replace with your Stripe API key + ); + END IF; +END $$; + +-- Create foreign table 'public.customers' if it doesn't exist +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 + FROM information_schema.tables + WHERE table_name = 'customers' + ) THEN + CREATE FOREIGN TABLE public.customers ( + id text, + email text, + name text, + description text, + created timestamp, + attrs jsonb + ) + SERVER stripe_server + OPTIONS ( + OBJECT 'customers', + ROWID_COLUMN 'id' + ); + END IF; +END $$; + +-- Create table 'users' if it doesn't exist +CREATE TABLE IF NOT EXISTS public.users ( + id uuid REFERENCES auth.users NOT NULL PRIMARY KEY, + email text +); + +-- Create or replace function 'public.handle_new_user' +CREATE OR REPLACE FUNCTION public.handle_new_user() +RETURNS TRIGGER AS $$ +BEGIN + INSERT INTO public.users (id, email) + VALUES (NEW.id, NEW.email); + RETURN NEW; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Check if the trigger 'on_auth_user_created' exists before creating it +DO $$ +BEGIN + IF NOT EXISTS (SELECT 1 FROM pg_trigger WHERE tgname = 'on_auth_user_created') THEN + CREATE TRIGGER on_auth_user_created + AFTER INSERT ON auth.users + FOR EACH ROW EXECUTE FUNCTION public.handle_new_user(); + END IF; +END $$; + +insert into + storage.buckets (id, name) +values + ('quivr', 'quivr'); + +CREATE POLICY "Access Quivr Storage 1jccrwz_0" ON storage.objects FOR INSERT TO anon WITH CHECK (bucket_id = 'quivr'); + +CREATE POLICY "Access Quivr Storage 1jccrwz_1" ON storage.objects FOR SELECT TO anon USING (bucket_id = 'quivr'); + +CREATE POLICY "Access Quivr Storage 1jccrwz_2" ON storage.objects FOR UPDATE TO anon USING (bucket_id = 'quivr'); + +CREATE POLICY "Access Quivr Storage 1jccrwz_3" ON storage.objects FOR DELETE TO anon USING (bucket_id = 'quivr'); + +-- Create functions for secrets in vault +CREATE OR REPLACE FUNCTION insert_secret(name text, secret text) +returns uuid +language plpgsql +security definer +set search_path = public +as $$ +begin + return vault.create_secret(secret, name); +end; +$$; + + +create or replace function read_secret(secret_name text) +returns text +language plpgsql +security definer set search_path = public +as $$ +declare + secret text; +begin + select decrypted_secret from vault.decrypted_secrets where name = + secret_name into secret; + return secret; +end; +$$; + +create or replace function delete_secret(secret_name text) +returns text +language plpgsql +security definer set search_path = public +as $$ +declare + deleted_rows int; +begin + delete from vault.decrypted_secrets where name = secret_name; + get diagnostics deleted_rows = row_count; + if deleted_rows = 0 then + return false; + else + return true; + end if; +end; +$$; + +create schema if not exists extensions; + +create table if not exists + extensions.wrappers_fdw_stats (); + +grant all on extensions.wrappers_fdw_stats to service_role; + + + diff --git a/supabase/migrations/20240103175048_prod.sql b/supabase/migrations/20240103175048_prod.sql new file mode 100644 index 000000000..88e41300e --- /dev/null +++ b/supabase/migrations/20240103175048_prod.sql @@ -0,0 +1,205 @@ +create sequence "public"."documents_id_seq"; + +drop function if exists "public"."get_premium_user"(input_email text); + +drop function if exists "public"."update_max_brains"(); + +drop function if exists "public"."update_user_settings"(); + +drop function if exists "public"."match_summaries"(query_embedding vector, match_count integer, match_threshold double precision); + +alter table "public"."vectors" drop constraint "vectors_pkey1"; + +drop index if exists "public"."vectors_pkey1"; + +create table "public"."documents" ( + "id" bigint not null default nextval('documents_id_seq'::regclass), + "content" text, + "metadata" jsonb, + "embedding" vector(1536) +); + + +alter table "public"."brains" drop column "retrieval_algorithm"; + +alter table "public"."brains" add column "openai_api_key" text; + +alter table "public"."brains" alter column "status" set default 'private'::text; + +alter table "public"."brains_users" alter column "default_brain" set default false; + +alter table "public"."brains_vectors" drop column "rights"; + +alter table "public"."user_settings" alter column "max_brain_size" set default 50000000; + +alter table "public"."vectors" alter column "id" drop default; + +alter sequence "public"."documents_id_seq" owned by "public"."documents"."id"; + +CREATE INDEX brains_vectors_brain_id_idx ON public.brains_vectors USING btree (brain_id); + +CREATE INDEX brains_vectors_vector_id_idx ON public.brains_vectors USING btree (vector_id); + +CREATE UNIQUE INDEX documents_pkey ON public.documents USING btree (id); + +CREATE INDEX idx_brains_vectors_vector_id ON public.brains_vectors USING btree (vector_id); + +CREATE INDEX idx_vectors_id ON public.vectors USING btree (id); + +CREATE INDEX vectors_file_sha1_idx ON public.vectors USING btree (file_sha1); + +CREATE INDEX vectors_id_idx ON public.vectors USING btree (id); + +CREATE UNIQUE INDEX vectors_new_pkey ON public.vectors USING btree (id); + +alter table "public"."documents" add constraint "documents_pkey" PRIMARY KEY using index "documents_pkey"; + +alter table "public"."vectors" add constraint "vectors_new_pkey" PRIMARY KEY using index "vectors_new_pkey"; + +alter table "public"."api_keys" add constraint "api_keys_user_id_fkey" FOREIGN KEY (user_id) REFERENCES auth.users(id) not valid; + +alter table "public"."api_keys" validate constraint "api_keys_user_id_fkey"; + +alter table "public"."brains_vectors" add constraint "brains_vectors_vector_id_fkey" FOREIGN KEY (vector_id) REFERENCES vectors(id) not valid; + +alter table "public"."brains_vectors" validate constraint "brains_vectors_vector_id_fkey"; + +alter table "public"."knowledge_vectors" add constraint "knowledge_vectors_vector_id_fkey" FOREIGN KEY (vector_id) REFERENCES vectors(id) not valid; + +alter table "public"."knowledge_vectors" validate constraint "knowledge_vectors_vector_id_fkey"; + +alter table "public"."summaries" add constraint "summaries_document_id_fkey" FOREIGN KEY (document_id) REFERENCES vectors(id) not valid; + +alter table "public"."summaries" validate constraint "summaries_document_id_fkey"; + +set check_function_bodies = off; + +CREATE OR REPLACE FUNCTION public.match_documents(query_embedding vector, match_count integer) + RETURNS TABLE(id bigint, content text, metadata jsonb, similarity double precision) + LANGUAGE plpgsql +AS $function$ +#variable_conflict use_column +begin + return query + select + id, + content, + metadata, + 1 - (documents.embedding <=> query_embedding) as similarity + from documents + order by documents.embedding <=> query_embedding + limit match_count; +end; +$function$ +; + +CREATE OR REPLACE FUNCTION public.match_summaries(query_embedding vector, match_count integer, match_threshold double precision) + RETURNS TABLE(id bigint, document_id bigint, content text, metadata jsonb, embedding vector, similarity double precision) + LANGUAGE plpgsql +AS $function$ + # variable_conflict use_column +BEGIN + RETURN query + SELECT + id, + document_id, + content, + metadata, + embedding, + 1 -(summaries.embedding <=> query_embedding) AS similarity + FROM + summaries + WHERE 1 - (summaries.embedding <=> query_embedding) > match_threshold + ORDER BY + summaries.embedding <=> query_embedding + LIMIT match_count; +END; +$function$ +; + +CREATE OR REPLACE FUNCTION public.update_max_brains_theodo() + RETURNS trigger + LANGUAGE plpgsql + SECURITY DEFINER +AS $function$ +DECLARE + userEmail TEXT; + allowedDomains TEXT[] := ARRAY['%@theodo.fr', '%@theodo.com', '%@theodo.co.uk', '%@bam.tech', '%@padok.fr', '%@aleios.com', '%@sicara.com', '%@hokla.com', '%@sipios.com']; +BEGIN + SELECT email INTO userEmail FROM auth.users WHERE id = NEW.user_id; + + IF userEmail LIKE ANY(allowedDomains) THEN + -- Ensure the models column is initialized as an array if null + IF NEW.models IS NULL THEN + NEW.models := '[]'::jsonb; + END IF; + + -- Add gpt-4 if not present + IF NOT NEW.models ? 'gpt-4' THEN + NEW.models := NEW.models || '["gpt-4"]'::jsonb; + END IF; + + -- Add gpt-3.5-turbo if not present + IF NOT NEW.models ? 'gpt-3.5-turbo-1106' THEN + NEW.models := NEW.models || '["gpt-3.5-turbo"]'::jsonb; + END IF; + + UPDATE user_settings + SET + max_brains = 30, + max_brain_size = 100000000, + daily_chat_credit = 200, + models = NEW.models + WHERE user_id = NEW.user_id; + END IF; + + RETURN NULL; -- for AFTER triggers, the return value is ignored +END; +$function$ +; + +grant delete on table "public"."documents" to "anon"; + +grant insert on table "public"."documents" to "anon"; + +grant references on table "public"."documents" to "anon"; + +grant select on table "public"."documents" to "anon"; + +grant trigger on table "public"."documents" to "anon"; + +grant truncate on table "public"."documents" to "anon"; + +grant update on table "public"."documents" to "anon"; + +grant delete on table "public"."documents" to "authenticated"; + +grant insert on table "public"."documents" to "authenticated"; + +grant references on table "public"."documents" to "authenticated"; + +grant select on table "public"."documents" to "authenticated"; + +grant trigger on table "public"."documents" to "authenticated"; + +grant truncate on table "public"."documents" to "authenticated"; + +grant update on table "public"."documents" to "authenticated"; + +grant delete on table "public"."documents" to "service_role"; + +grant insert on table "public"."documents" to "service_role"; + +grant references on table "public"."documents" to "service_role"; + +grant select on table "public"."documents" to "service_role"; + +grant trigger on table "public"."documents" to "service_role"; + +grant truncate on table "public"."documents" to "service_role"; + +grant update on table "public"."documents" to "service_role"; + +CREATE TRIGGER update_max_brains_theodo_trigger AFTER INSERT ON public.user_settings FOR EACH ROW EXECUTE FUNCTION update_max_brains_theodo(); + + diff --git a/supabase/migrations/20240103181249_premium.sql b/supabase/migrations/20240103181249_premium.sql new file mode 100644 index 000000000..a95b2eb54 --- /dev/null +++ b/supabase/migrations/20240103181249_premium.sql @@ -0,0 +1,7 @@ +alter table "public"."user_settings" add column "is_premium" boolean not null default false; + +alter table "public"."user_settings" alter column "max_brain_size" set not null; + +alter table "public"."user_settings" alter column "max_brain_size" set data type bigint using "max_brain_size"::bigint; + + diff --git a/supabase/migrations/20240103181925_cleanup.sql b/supabase/migrations/20240103181925_cleanup.sql new file mode 100644 index 000000000..c6f7cbcda --- /dev/null +++ b/supabase/migrations/20240103181925_cleanup.sql @@ -0,0 +1,3 @@ +drop function if exists "public"."match_summaries"(query_embedding vector, match_count integer, match_threshold double precision); + + diff --git a/supabase/migrations/20240103185550_upgrade.sql b/supabase/migrations/20240103185550_upgrade.sql new file mode 100644 index 000000000..d04d4ac46 --- /dev/null +++ b/supabase/migrations/20240103185550_upgrade.sql @@ -0,0 +1,3 @@ +alter extension "wrappers" update to '0.2.0'; + + diff --git a/supabase/migrations/20240103193921_stripe_customers.sql b/supabase/migrations/20240103193921_stripe_customers.sql new file mode 100644 index 000000000..75b60a7de --- /dev/null +++ b/supabase/migrations/20240103193921_stripe_customers.sql @@ -0,0 +1,30 @@ +create foreign table public.subscriptions ( + id text, + customer text, + currency text, + current_period_start timestamp, + current_period_end timestamp, + attrs jsonb +) + server stripe_server + options ( + object 'subscriptions', + rowid_column 'id' + ); + + + create foreign table public.products ( + id text, + name text, + active bool, + default_price text, + description text, + created timestamp, + updated timestamp, + attrs jsonb +) + server stripe_server + options ( + object 'products', + rowid_column 'id' + ); \ No newline at end of file diff --git a/supabase/migrations/20240103194255_api.sql b/supabase/migrations/20240103194255_api.sql new file mode 100644 index 000000000..01df0f691 --- /dev/null +++ b/supabase/migrations/20240103194255_api.sql @@ -0,0 +1,3 @@ +alter table "public"."user_settings" add column "API_ACCESS" boolean not null default false; + + diff --git a/supabase/migrations/20240103204741_product_to_features.sql b/supabase/migrations/20240103204741_product_to_features.sql new file mode 100644 index 000000000..e148d7b6b --- /dev/null +++ b/supabase/migrations/20240103204741_product_to_features.sql @@ -0,0 +1,66 @@ +create table "public"."product_to_features" ( + "id" bigint generated by default as identity not null, + "models" jsonb default '["gpt-3.5-turbo-1106"]'::jsonb, + "daily_chat_credit" integer not null default 20, + "max_brains" integer not null, + "max_brain_size" bigint not null default '50000000'::bigint, + "api_access" boolean not null default false, + "stripe_product_id" text +); + + +alter table "public"."user_settings" drop column "API_ACCESS"; + +alter table "public"."user_settings" add column "api_access" boolean not null default false; + +CREATE UNIQUE INDEX product_to_features_pkey ON public.product_to_features USING btree (id); + +alter table "public"."product_to_features" add constraint "product_to_features_pkey" PRIMARY KEY using index "product_to_features_pkey"; + +alter table "public"."product_to_features" add constraint "product_to_features_max_brains_check" CHECK ((max_brains > 0)) not valid; + +alter table "public"."product_to_features" validate constraint "product_to_features_max_brains_check"; + +grant delete on table "public"."product_to_features" to "anon"; + +grant insert on table "public"."product_to_features" to "anon"; + +grant references on table "public"."product_to_features" to "anon"; + +grant select on table "public"."product_to_features" to "anon"; + +grant trigger on table "public"."product_to_features" to "anon"; + +grant truncate on table "public"."product_to_features" to "anon"; + +grant update on table "public"."product_to_features" to "anon"; + +grant delete on table "public"."product_to_features" to "authenticated"; + +grant insert on table "public"."product_to_features" to "authenticated"; + +grant references on table "public"."product_to_features" to "authenticated"; + +grant select on table "public"."product_to_features" to "authenticated"; + +grant trigger on table "public"."product_to_features" to "authenticated"; + +grant truncate on table "public"."product_to_features" to "authenticated"; + +grant update on table "public"."product_to_features" to "authenticated"; + +grant delete on table "public"."product_to_features" to "service_role"; + +grant insert on table "public"."product_to_features" to "service_role"; + +grant references on table "public"."product_to_features" to "service_role"; + +grant select on table "public"."product_to_features" to "service_role"; + +grant trigger on table "public"."product_to_features" to "service_role"; + +grant truncate on table "public"."product_to_features" to "service_role"; + +grant update on table "public"."product_to_features" to "service_role"; + + diff --git a/supabase/schema.sql b/supabase/schema.sql new file mode 100644 index 000000000..e69de29bb diff --git a/supabase/seed.sql b/supabase/seed.sql new file mode 100644 index 000000000..fd671a837 --- /dev/null +++ b/supabase/seed.sql @@ -0,0 +1,305 @@ +SET session_replication_role = replica; +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 15.1 (Ubuntu 15.1-1.pgdg20.04+1) +-- Dumped by pg_dump version 15.5 (Ubuntu 15.5-1.pgdg20.04+1) + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- Data for Name: audit_log_entries; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + +INSERT INTO "auth"."audit_log_entries" ("instance_id", "id", "payload", "created_at", "ip_address") VALUES + ('00000000-0000-0000-0000-000000000000', '479f1b0e-4e73-4a73-9b0a-349cfc333215', '{"action":"user_signedup","actor_id":"00000000-0000-0000-0000-000000000000","actor_username":"service_role","actor_via_sso":false,"log_type":"team","traits":{"user_email":"admin@quivr.app","user_id":"d777f5d2-1494-460c-82b4-70f445b6344b","user_phone":""}}', '2024-01-03 17:52:45.895193+00', ''), + ('00000000-0000-0000-0000-000000000000', 'b21c9ed5-6a11-4da6-b0ba-86a84ae01d9d', '{"action":"login","actor_id":"d777f5d2-1494-460c-82b4-70f445b6344b","actor_username":"admin@quivr.app","actor_via_sso":false,"log_type":"account","traits":{"provider":"email"}}', '2024-01-03 17:57:52.722055+00', ''), + ('00000000-0000-0000-0000-000000000000', 'f7fbe861-c477-483e-a74a-8dcb9f2df8c5', '{"action":"user_signedup","actor_id":"00000000-0000-0000-0000-000000000000","actor_username":"service_role","actor_via_sso":false,"log_type":"team","traits":{"user_email":"admin@quivr.app","user_id":"bad271c8-973a-4dcc-8e87-1de818ea1234","user_phone":""}}', '2024-01-03 17:59:11.223649+00', ''); + + +-- +-- Data for Name: flow_state; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + + + +-- +-- Data for Name: users; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + +INSERT INTO "auth"."users" ("instance_id", "id", "aud", "role", "email", "encrypted_password", "email_confirmed_at", "invited_at", "confirmation_token", "confirmation_sent_at", "recovery_token", "recovery_sent_at", "email_change_token_new", "email_change", "email_change_sent_at", "last_sign_in_at", "raw_app_meta_data", "raw_user_meta_data", "is_super_admin", "created_at", "updated_at", "phone", "phone_confirmed_at", "phone_change", "phone_change_token", "phone_change_sent_at", "email_change_token_current", "email_change_confirm_status", "banned_until", "reauthentication_token", "reauthentication_sent_at", "is_sso_user", "deleted_at") VALUES + ('00000000-0000-0000-0000-000000000000', 'bad271c8-973a-4dcc-8e87-1de818ea1234', 'authenticated', 'authenticated', 'admin@quivr.app', '$2a$10$fo99ZlLdOex9QJy5cMN8OuQD2EBylfB1dPCfdLeXniDr6a6K1jOEu', '2024-01-03 17:59:11.22809+00', NULL, '', NULL, '', NULL, '', '', NULL, NULL, '{"provider": "email", "providers": ["email"]}', '{}', NULL, '2024-01-03 17:59:11.212675+00', '2024-01-03 17:59:11.228261+00', NULL, NULL, '', '', NULL, '', 0, NULL, '', NULL, false, NULL); + + +-- +-- Data for Name: identities; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + +INSERT INTO "auth"."identities" ("provider_id", "user_id", "identity_data", "provider", "last_sign_in_at", "created_at", "updated_at", "id") VALUES + ('bad271c8-973a-4dcc-8e87-1de818ea1234', 'bad271c8-973a-4dcc-8e87-1de818ea1234', '{"sub": "bad271c8-973a-4dcc-8e87-1de818ea1234", "email": "admin@quivr.app", "email_verified": false, "phone_verified": false}', 'email', '2024-01-03 17:59:11.222255+00', '2024-01-03 17:59:11.222367+00', '2024-01-03 17:59:11.222367+00', 'b22ef918-7d7c-4d30-b51a-0ac15a25ae0c'); + + +-- +-- Data for Name: instances; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + + + +-- +-- Data for Name: sessions; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + + + +-- +-- Data for Name: mfa_amr_claims; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + + + +-- +-- Data for Name: mfa_factors; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + + + +-- +-- Data for Name: mfa_challenges; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + + + +-- +-- Data for Name: refresh_tokens; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + + + +-- +-- Data for Name: sso_providers; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + + + +-- +-- Data for Name: saml_providers; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + + + +-- +-- Data for Name: saml_relay_states; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + + + +-- +-- Data for Name: sso_domains; Type: TABLE DATA; Schema: auth; Owner: supabase_auth_admin +-- + + + +-- +-- Data for Name: prompts; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: brains; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: api_brain_definition; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: api_keys; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: brain_subscription_invitations; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: brains_users; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: vectors; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: brains_vectors; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: chats; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: chat_history; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: composite_brain_connections; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: documents; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: knowledge; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: knowledge_vectors; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: migrations; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: notifications; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: onboardings; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +INSERT INTO "public"."onboardings" ("user_id", "onboarding_a", "onboarding_b1", "onboarding_b2", "onboarding_b3", "creation_time") VALUES + ('bad271c8-973a-4dcc-8e87-1de818ea1234', true, true, true, true, '2024-01-03 17:59:11.212049'); + + +-- +-- Data for Name: stats; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: summaries; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: user_daily_usage; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: user_identity; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: user_settings; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: users; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +INSERT INTO "public"."users" ("id", "email") VALUES + ('bad271c8-973a-4dcc-8e87-1de818ea1234', 'admin@quivr.app'); + + +-- +-- Data for Name: users_old; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Data for Name: vectors_old; Type: TABLE DATA; Schema: public; Owner: postgres +-- + + + +-- +-- Name: refresh_tokens_id_seq; Type: SEQUENCE SET; Schema: auth; Owner: supabase_auth_admin +-- + +SELECT pg_catalog.setval('"auth"."refresh_tokens_id_seq"', 1, true); + + +-- +-- Name: documents_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval('"public"."documents_id_seq"', 1, false); + + +-- +-- Name: stats_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval('"public"."stats_id_seq"', 1, false); + + +-- +-- Name: summaries_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval('"public"."summaries_id_seq"', 1, false); + + +-- +-- Name: vectors_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval('"public"."vectors_id_seq"', 1, false); + + +-- +-- PostgreSQL database dump complete +-- + +RESET ALL;