Revert "feat: Add Google Drive & Sharepoint sync in backend" (#2603)

Reverts QuivrHQ/quivr#2592
This commit is contained in:
Stan Girard 2024-05-21 17:37:08 +02:00 committed by GitHub
parent 8303aca9d9
commit 848aed46ea
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
43 changed files with 381 additions and 3264 deletions

View File

@ -35,7 +35,6 @@ TELEMETRY_ENABLED=true
CELERY_BROKER_URL=redis://redis:6379/0
CELEBRY_BROKER_QUEUE_NAME=quivr-preview.fifo
QUIVR_DOMAIN=http://localhost:3000/
BACKEND_URL=http://localhost:5050
#COHERE_API_KEY=CHANGE_ME
#RESEND
@ -52,16 +51,3 @@ PREMIUM_DAILY_CHAT_CREDIT=100
# BRAVE SEARCH API KEY
BRAVE_SEARCH_API_KEY=CHANGE_ME
# GOOGLE DRIVE
GOOGLE_CLIENT_ID=your-client-id
GOOGLE_CLIENT_SECRET=your-client-secret
GOOGLE_PROJECT_ID=your-project-id
GOOGLE_AUTH_URI=https://accounts.google.com/o/oauth2/auth
GOOGLE_TOKEN_URI=https://oauth2.googleapis.com/token
GOOGLE_AUTH_PROVIDER_CERT_URL=https://www.googleapis.com/oauth2/v1/certs
GOOGLE_REDIRECT_URI=http://localhost
# SHAREPOINT
SHAREPOINT_CLIENT_ID=your-client-id

1
.gitignore vendored
View File

@ -83,4 +83,3 @@ backend/celerybeat-schedule.db
backend/application.log.*
backend/score.json
backend/modules/assistant/ito/utils/simple.pdf
backend/modules/sync/controller/credentials.json

41
.vscode/launch.json vendored
View File

@ -1,41 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Python: Remote Attach",
"type": "python",
"request": "attach",
"connect": {
"host": "localhost",
"port": 5678
},
"pathMappings": [
{
"localRoot": "${workspaceFolder}/backend",
"remoteRoot": "."
}
],
"justMyCode": true
},
{
"name": "Python: Debug Test Script",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/backend/test_process_file_and_notify.py",
"console": "integratedTerminal",
"justMyCode": false
},
{
"name": "Python: Debug",
"type": "debugpy",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"justMyCode": false,
"env": {
"PYTHONPATH": "${workspaceFolder}/backend:${env:PYTHONPATH}"
},
"envFile": "${workspaceFolder}/.env"
}
]
}

60
.vscode/settings.json vendored
View File

@ -1,10 +1,37 @@
{
"python.formatting.provider": "black",
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.fixAll": "explicit",
"source.unusedImports": "explicit",
"source.unusedImports": "explicit"
},
"python.analysis.extraPaths": [
"./backend"
],
"editor.formatOnSave": true,
"[python]": {
"editor.defaultFormatter": "ms-python.black-formatter",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.fixAll": "explicit"
}
},
"[typescriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"[javascriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"editor.formatOnSaveMode": "file",
"files.exclude": {
"**/__pycache__": true,
@ -16,42 +43,17 @@
"**/.docusaurus/": true,
"**/node_modules/": true
},
"json.sortOnSave.enable": true,
"[python]": {
"editor.defaultFormatter": "ms-python.black-formatter",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.fixAll": "explicit"
}
},
"python.formatting.provider": "black",
"python.analysis.extraPaths": [
"./backend"
],
"python.sortImports.path": "isort",
"python.linting.mypyEnabled": true,
"python.defaultInterpreterPath": "python3",
"python.linting.enabled": true,
"python.linting.flake8Enabled": true,
"python.linting.pycodestyleEnabled": true,
"python.linting.pylintEnabled": true,
"python.linting.pycodestyleCategorySeverity.W": "Error",
"python.defaultInterpreterPath": "python3",
"python.linting.flake8CategorySeverity.W": "Error",
"json.sortOnSave.enable": true,
"python.testing.pytestArgs": [
"-v",
"--color=yes",
"--envfile=backend/tests/.env_test",
"backend/"
"backend/",
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true,
"python.testing.autoTestDiscoverOnSaveEnabled": true,
"python.analysis.autoImportCompletions": true,
"python.analysis.typeCheckingMode": "basic",
"python.analysis.diagnosticSeverityOverrides": {
"reportMissingImports": "error",
"reportUnusedImport": "warning",
"reportGeneralTypeIssues": "warning"
}
}

View File

@ -67,10 +67,6 @@ playwright = "*"
langgraph = "*"
tavily-python = "*"
duckduckgo-search = "*"
google-api-python-client = "*"
google-auth-httplib2 = "*"
google-auth-oauthlib = "*"
msal = "*"
[dev-packages]
black = "*"

696
Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "ef45b462930863ab7c38532089d13fe9f0d48fab6397b114e2c61f593ac3e59f"
"sha256": "26105f1e817052cc5e484e0325ce27a2ac6fc3c0d7f5be14be6d2b364be66f16"
},
"pipfile-spec": 6,
"requires": {
@ -279,21 +279,21 @@
},
"boto3": {
"hashes": [
"sha256:2500963ddd7fee0c891db596ed73f203a62e8ee6faf1543c7a14169b1d557bc3",
"sha256:a7280f54babc8e285d85d2558dd6167e4deb069e03abe43a55b643e10c03952b"
"sha256:5b37c8f4ea6f408147994a6e230c49ca755da57f5964ccea8b8fd4ff5f11759e",
"sha256:bec91a3bca63320e5f68a25b5eaa7bab65e35bb9253a544875c2e03679f1d5fb"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==1.34.107"
"version": "==1.34.104"
},
"botocore": {
"hashes": [
"sha256:4620286a814c3c8abc59145203650a6b2a4f538e887552fa1bc6896b6d11b28b",
"sha256:98bfab930391a21d24ed7a7bc86ec2ec076607318ab8de0c37d2587f73a88d89"
"sha256:b68ed482e9b4c313129c9948af5a91d0e84840558e6d232a1a27ab0b9733e5b9",
"sha256:fe36dd3cea4160fbbe27dc1cf89cb7018234350555a26933b2977947052a346a"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==1.34.107"
"version": "==1.34.104"
},
"cachetools": {
"hashes": [
@ -378,7 +378,7 @@
"sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956",
"sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"
],
"markers": "platform_python_implementation != 'PyPy'",
"markers": "python_version >= '3.8'",
"version": "==1.16.0"
},
"chardet": {
@ -518,11 +518,11 @@
},
"cohere": {
"hashes": [
"sha256:00b492ebf8921e83cb2371f2ee36ddf301422daae3024343a87d4316f02b711b",
"sha256:7792e8898c95f2cb955b2d9f23b8602f73f3b698d59f1a1b4896c53809671da0"
"sha256:db9dd0ff8991a44c73ce093ccab2a46b744039e2285e4325d809e7ec76b5a77d",
"sha256:e0f0bd3fe8ead265d4df6365f41c75b921e56f0e73fc2a8bf6c557df36fdcf26"
],
"markers": "python_version >= '3.8' and python_version < '4.0'",
"version": "==5.5.0"
"version": "==5.4.0"
},
"coloredlogs": {
"hashes": [
@ -637,6 +637,22 @@
"markers": "python_version >= '3.7'",
"version": "==1.2.0"
},
"curl-cffi": {
"hashes": [
"sha256:449ab07e07335558997cd62296b5c4f16ce27630de7830e4ad22441049a0ef1e",
"sha256:5d39849371bbf3eab048113693715a8da5c729c494cccfa1128d768d96fdc31e",
"sha256:694d88f7065c59c651970f14bc415431f65ac601a9ba537463d70f432a48ccfc",
"sha256:6faf01aa8d98d322b877d3d801544692c73729ea6eb4a45af83514a4ecd1c8fe",
"sha256:7e3616141a2a0be7896e7dc5da1ed3965e1a78aa2e563d8aba7a641135aeaf1b",
"sha256:b55c53bb6dff713cb63f76e2f147e2d54c984b1b09df66b08f52f3acae1aeca0",
"sha256:bd16cccc0d3e93c2fbc4f4cb7cce0e10cb2ef7f8957352f3f0d770f0d6e05702",
"sha256:c09a062b8aac93d4890d2c33b7053c0e1a5cf275328b80c1fb1a950310df75f2",
"sha256:d65aa649abb24020c2ad7b3ce45e2816d1ffe25df06f1a6b0f52fbf353af82e0",
"sha256:e3a5099b98c4bf12cc1afecb3409a9c57e7ebce9447a03c96dfb661ad8fa5e79"
],
"markers": "python_version >= '3.8'",
"version": "==0.7.0b4"
},
"cycler": {
"hashes": [
"sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30",
@ -838,12 +854,12 @@
},
"duckduckgo-search": {
"hashes": [
"sha256:0f9ea0cb0d23575d98255931c0d437cf67ac958be8e54459704b833fa903e693",
"sha256:88e008e46b8c6037559f0617dfd9f034e5dd9b384b2795343a9ef25782ca053d"
"sha256:2ee309e76b7e34ee84bddd5e046df723faecf7f999acdb499f3dad7e8a614c21",
"sha256:a07d94babe45c9a9bd0ce2dc185346b47fe95dab516d414f21f06a0a1200aca9"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==6.1.0"
"version": "==5.3.1"
},
"ecdsa": {
"hashes": [
@ -995,12 +1011,12 @@
},
"flashrank": {
"hashes": [
"sha256:3f36289211930299ebff8fe8dd836240a1263ce9e75137a6098a7d9bbb6945a5",
"sha256:8617beb3519abfe79825b6d373066bd1c2b9c233cb74a32ebc1b0dea3d5dde63"
"sha256:39ec57dddd8b97d15dd99f4383dd7c6a89c8d1f11bbba4a45823dcbfb1194507",
"sha256:3b4ead39e61e747ff503b914792b341c51d454ccc3bd26e8e07d9a9cdfb8e85a"
],
"index": "pypi",
"markers": "python_version >= '3.6'",
"version": "==0.2.5"
"version": "==0.2.4"
},
"flatbuffers": {
"hashes": [
@ -1197,15 +1213,6 @@
"markers": "python_version >= '3.7'",
"version": "==2.19.0"
},
"google-api-python-client": {
"hashes": [
"sha256:984cc8cc8eb4923468b1926d2b8effc5b459a4dda3c845896eb87c153b28ef84",
"sha256:d50f7e2dfdbb7fc2732f6a0cba1c54d7bb676390679526c6bb628c901e43ec86"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
"version": "==2.129.0"
},
"google-auth": {
"hashes": [
"sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360",
@ -1214,23 +1221,6 @@
"markers": "python_version >= '3.7'",
"version": "==2.29.0"
},
"google-auth-httplib2": {
"hashes": [
"sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05",
"sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"
],
"index": "pypi",
"version": "==0.2.0"
},
"google-auth-oauthlib": {
"hashes": [
"sha256:292d2d3783349f2b0734a0a0207b1e1e322ac193c2c09d8f7c613fb7cc501ea8",
"sha256:297c1ce4cb13a99b5834c74a1fe03252e1e499716718b190f56bcb9c4abc4faf"
],
"index": "pypi",
"markers": "python_version >= '3.6'",
"version": "==1.2.0"
},
"google-cloud-vision": {
"hashes": [
"sha256:044330ad618c810333ff2296cd27ffd145f249638d1b35b270de6b460b00e8d2",
@ -1401,14 +1391,6 @@
"markers": "python_version >= '3.8'",
"version": "==1.0.5"
},
"httplib2": {
"hashes": [
"sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc",
"sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.22.0"
},
"httptools": {
"hashes": [
"sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563",
@ -1787,12 +1769,12 @@
},
"langchain-cohere": {
"hashes": [
"sha256:d0be4e76079a74c4259fe4db2bab535d690efe0efac5e9e2fbf486476c0a85c8",
"sha256:f07bd53fadbebf744b8de1eebf977353f340f2010156821623a0c6247032ab9b"
"sha256:59058032158c9d94ff251bc9264cd69249878e6d4b1b5ed80f66c4f56e23ac9f",
"sha256:c50b33fb6dc6552f696f03e4e2b9ab7bd5ffea4d8c14c37771dc765e9122dde8"
],
"index": "pypi",
"markers": "python_version < '4.0' and python_full_version >= '3.8.1'",
"version": "==0.1.5"
"version": "==0.1.4"
},
"langchain-community": {
"hashes": [
@ -1813,20 +1795,20 @@
},
"langchain-openai": {
"hashes": [
"sha256:39c3cb22bb739900ae8294d4d9939a6138c0ca7ad11198e57038eb14c08d04ec",
"sha256:fd7e1c33ba8e2cab4b2154f3a2fd4a0d9cc6518b41cf49bb87255f9f732a4896"
"sha256:7d2e838e57ef231cb7689fd58ac5fa8a6e9e504174f8c5698c837739786e2030",
"sha256:7f62ecb12d3cdd0d96679abea00e4e3ceb1f829f6d1f127a5f7b97c1315d157f"
],
"index": "pypi",
"markers": "python_version < '4.0' and python_full_version >= '3.8.1'",
"version": "==0.1.7"
"version": "==0.1.6"
},
"langchain-text-splitters": {
"hashes": [
"sha256:13887f32705862c1e1454213cb7834a63aae57c26fcd80346703a1d09c46168d",
"sha256:ac8927dc0ba08eba702f6961c9ed7df7cead8de19a9f7101ab2b5ea34201b3c1"
"sha256:ac459fa98799f5117ad5425a9330b21961321e30bc19a2a2f9f761ddadd62aa1",
"sha256:f5b802f873f5ff6a8b9259ff34d53ed989666ef4e1582e6d1adb3b5520e3839a"
],
"markers": "python_version < '4.0' and python_full_version >= '3.8.1'",
"version": "==0.0.2"
"version": "==0.0.1"
},
"langdetect": {
"hashes": [
@ -1837,29 +1819,29 @@
},
"langfuse": {
"hashes": [
"sha256:07dcbb8fa9f754928d6af377dbea530d591680e3f50340d687018d8bcb83ba34",
"sha256:ecdd06fae46637d635249dfaf8f0564ac8e8769519712b11e777d2905309e5d7"
"sha256:22b2612ba1a78f6bbf173a287e7d77ba9ad9a640d440949a3a7f9c249a777278",
"sha256:f5bb91f6fc8f16d99a0f685e57e2d2b2c00edc51c4238f499916a2e7f6810fb5"
],
"index": "pypi",
"markers": "python_version < '4.0' and python_full_version >= '3.8.1'",
"version": "==2.32.0"
"version": "==2.30.0"
},
"langgraph": {
"hashes": [
"sha256:229f29959d44c6bc4cec9f42ba68766cadedd25f0b3052afb2a43cbfac4f8d4d",
"sha256:fa10cf938b1fbc270e2f597d79fbb7ab196f9c0a56d6a96a479e65879729d70e"
"sha256:6cc8dea7091718fc232dae23b0590138c41f55887d8c2fda8ffaab878157a2d0",
"sha256:b903e0164de35ac3655ae1d57e159c8b5cf10fbc497a540ccffd6e3c14aae3e2"
],
"index": "pypi",
"markers": "python_version < '4.0' and python_full_version >= '3.9.0'",
"version": "==0.0.49"
"version": "==0.0.48"
},
"langsmith": {
"hashes": [
"sha256:445e3bc1d3baa1e5340cd979907a19483b9763a2ed37b863a01113d406f69345",
"sha256:e748a89f4dd6aa441349143e49e546c03b5dfb43376a25bfef6a5ca792fe1437"
"sha256:4682204de19f0218029c2b8445ce2cc3485c8d0df9796b31e2ce4c9051fce365",
"sha256:dbd83b0944a2fbea4151f0aa053530d93fcf6784a580621bc60633cb890b57dc"
],
"markers": "python_version < '4.0' and python_full_version >= '3.8.1'",
"version": "==0.1.59"
"version": "==0.1.57"
},
"layoutparser": {
"extras": [
@ -1875,12 +1857,12 @@
},
"litellm": {
"hashes": [
"sha256:3ae9c217dcfb6c2ab4b9592ed736482475b14e3dfb4d445076be717c48131791",
"sha256:fc3344c6606c3261bd9ae2640f970c32cf5ff8ee596fd790209a30a8f3315a36"
"sha256:22d7292d2952d82992ebebc3b7dfa1a97393f603ce652f3223f2742123ba7f2b",
"sha256:a444dad4079d3d4c49037fe37581cd04b2135e674e9e9d1cfdbda32facd546ec"
],
"index": "pypi",
"markers": "python_version not in '2.7, 3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7' and python_version >= '3.8'",
"version": "==1.37.13"
"version": "==1.37.5"
},
"llama-cpp-python": {
"hashes": [
@ -1891,20 +1873,20 @@
},
"llama-index": {
"hashes": [
"sha256:d5057fd609e2423e75a4695242ab030d1647e4f07cb46faf9476ab504005f033",
"sha256:da8871c5c8e5d038e56c0e5cb8c18a81ddc4117bf403bace95b4cec212f88fb9"
"sha256:275309a2317e9279b296e552c334e566c4f011223f6ed39e342f5264a05c4d9a",
"sha256:e57779f332323b00576cf9e8fee0ab5b978aaf35902288691da01a7839b99e58"
],
"index": "pypi",
"markers": "python_version < '4.0' and python_full_version >= '3.8.1'",
"version": "==0.10.37"
"version": "==0.10.36"
},
"llama-index-agent-openai": {
"hashes": [
"sha256:45f4cc670d037a8a67f541d3a4d095f7f61caff6ed2c25702441eb1116d4b495",
"sha256:67536bb104b24734f79324207034d948a2ca7e4cc20dd60cf05d6eeb4b12a586"
"sha256:b05eb7f0331d40a7a2bcaabaa84c9c7ebe6837a72038d03cbb71c083a4301a81",
"sha256:cd4a58f8bf233728ceda554cbb34de56a2b6bbbbff6ce801c3f8ff0c8280bf55"
],
"markers": "python_version < '4.0' and python_full_version >= '3.8.1'",
"version": "==0.2.5"
"version": "==0.2.4"
},
"llama-index-cli": {
"hashes": [
@ -1996,12 +1978,12 @@
},
"llama-parse": {
"hashes": [
"sha256:01836147b5238873b24a7dd41c5ab942b01b09b92d75570f30cf2861c084a0eb",
"sha256:c48c53a3080daeede293df620dddb1f381e084c31ee2dd44dce3f8615df723e8"
"sha256:5ce0390141f216dcd88c1123fea7f2a4f561d177f791a97217a3db3509dec4ff",
"sha256:fa04c09730b102155f6505de9cf91998c86d334581f0f12597c5eb47ca5db859"
],
"index": "pypi",
"markers": "python_version < '4.0' and python_full_version >= '3.8.1'",
"version": "==0.4.3"
"version": "==0.4.2"
},
"llamaindex-py-client": {
"hashes": [
@ -2256,38 +2238,37 @@
},
"matplotlib": {
"hashes": [
"sha256:063af8587fceeac13b0936c42a2b6c732c2ab1c98d38abc3337e430e1ff75e38",
"sha256:06a478f0d67636554fa78558cfbcd7b9dba85b51f5c3b5a0c9be49010cf5f321",
"sha256:0a490715b3b9984fa609116481b22178348c1a220a4499cda79132000a79b4db",
"sha256:0fc51eaa5262553868461c083d9adadb11a6017315f3a757fc45ec6ec5f02888",
"sha256:13beb4840317d45ffd4183a778685e215939be7b08616f431c7795276e067463",
"sha256:290d304e59be2b33ef5c2d768d0237f5bd132986bdcc66f80bc9bcc300066a03",
"sha256:2bcee1dffaf60fe7656183ac2190bd630842ff87b3153afb3e384d966b57fe56",
"sha256:2e7f03e5cbbfacdd48c8ea394d365d91ee8f3cae7e6ec611409927b5ed997ee4",
"sha256:3f988bafb0fa39d1074ddd5bacd958c853e11def40800c5824556eb630f94d3b",
"sha256:52146fc3bd7813cc784562cb93a15788be0b2875c4655e2cc6ea646bfa30344b",
"sha256:550cdda3adbd596078cca7d13ed50b77879104e2e46392dcd7c75259d8f00e85",
"sha256:616fabf4981a3b3c5a15cd95eba359c8489c4e20e03717aea42866d8d0465956",
"sha256:76cce0f31b351e3551d1f3779420cf8f6ec0d4a8cf9c0237a3b549fd28eb4abb",
"sha256:7ff2e239c26be4f24bfa45860c20ffccd118d270c5b5d081fa4ea409b5469fcd",
"sha256:8146ce83cbc5dc71c223a74a1996d446cd35cfb6a04b683e1446b7e6c73603b7",
"sha256:81c40af649d19c85f8073e25e5806926986806fa6d54be506fbf02aef47d5a89",
"sha256:9a2fa6d899e17ddca6d6526cf6e7ba677738bf2a6a9590d702c277204a7c6152",
"sha256:a5be985db2596d761cdf0c2eaf52396f26e6a64ab46bd8cd810c48972349d1be",
"sha256:af4001b7cae70f7eaacfb063db605280058246de590fa7874f00f62259f2df7e",
"sha256:bd4f2831168afac55b881db82a7730992aa41c4f007f1913465fb182d6fb20c0",
"sha256:bdd1ecbe268eb3e7653e04f451635f0fb0f77f07fd070242b44c076c9106da84",
"sha256:c53aeb514ccbbcbab55a27f912d79ea30ab21ee0531ee2c09f13800efb272674",
"sha256:c79f3a585f1368da6049318bdf1f85568d8d04b2e89fc24b7e02cc9b62017382",
"sha256:cd53c79fd02f1c1808d2cfc87dd3cf4dbc63c5244a58ee7944497107469c8d8a",
"sha256:d38e85a1a6d732f645f1403ce5e6727fd9418cd4574521d5803d3d94911038e5",
"sha256:d91a4ffc587bacf5c4ce4ecfe4bcd23a4b675e76315f2866e588686cc97fccdf",
"sha256:e6d29ea6c19e34b30fb7d88b7081f869a03014f66fe06d62cc77d5a6ea88ed7a",
"sha256:eaf3978060a106fab40c328778b148f590e27f6fa3cd15a19d6892575bce387d",
"sha256:fe428e191ea016bb278758c8ee82a8129c51d81d8c4bc0846c09e7e8e9057241"
"sha256:1c13f041a7178f9780fb61cc3a2b10423d5e125480e4be51beaf62b172413b67",
"sha256:232ce322bfd020a434caaffbd9a95333f7c2491e59cfc014041d95e38ab90d1c",
"sha256:493e9f6aa5819156b58fce42b296ea31969f2aab71c5b680b4ea7a3cb5c07d94",
"sha256:50bac6e4d77e4262c4340d7a985c30912054745ec99756ce213bfbc3cb3808eb",
"sha256:606e3b90897554c989b1e38a258c626d46c873523de432b1462f295db13de6f9",
"sha256:6209e5c9aaccc056e63b547a8152661324404dd92340a6e479b3a7f24b42a5d0",
"sha256:6485ac1f2e84676cff22e693eaa4fbed50ef5dc37173ce1f023daef4687df616",
"sha256:6addbd5b488aedb7f9bc19f91cd87ea476206f45d7116fcfe3d31416702a82fa",
"sha256:72f9322712e4562e792b2961971891b9fbbb0e525011e09ea0d1f416c4645661",
"sha256:7a6769f58ce51791b4cb8b4d7642489df347697cd3e23d88266aaaee93b41d9a",
"sha256:8080d5081a86e690d7688ffa542532e87f224c38a6ed71f8fbed34dd1d9fedae",
"sha256:843cbde2f0946dadd8c5c11c6d91847abd18ec76859dc319362a0964493f0ba6",
"sha256:8aac397d5e9ec158960e31c381c5ffc52ddd52bd9a47717e2a694038167dffea",
"sha256:8f65c9f002d281a6e904976007b2d46a1ee2bcea3a68a8c12dda24709ddc9106",
"sha256:90df07db7b599fe7035d2f74ab7e438b656528c68ba6bb59b7dc46af39ee48ef",
"sha256:9bb0189011785ea794ee827b68777db3ca3f93f3e339ea4d920315a0e5a78d54",
"sha256:a0e47eda4eb2614300fc7bb4657fced3e83d6334d03da2173b09e447418d499f",
"sha256:abc9d838f93583650c35eca41cfcec65b2e7cb50fd486da6f0c49b5e1ed23014",
"sha256:ac24233e8f2939ac4fd2919eed1e9c0871eac8057666070e94cbf0b33dd9c338",
"sha256:b12ba985837e4899b762b81f5b2845bd1a28f4fdd1a126d9ace64e9c4eb2fb25",
"sha256:b7a2a253d3b36d90c8993b4620183b55665a429da8357a4f621e78cd48b2b30b",
"sha256:c7064120a59ce6f64103c9cefba8ffe6fba87f2c61d67c401186423c9a20fd35",
"sha256:c89ee9314ef48c72fe92ce55c4e95f2f39d70208f9f1d9db4e64079420d8d732",
"sha256:cc4ccdc64e3039fc303defd119658148f2349239871db72cd74e2eeaa9b80b71",
"sha256:ce1edd9f5383b504dbc26eeea404ed0a00656c526638129028b758fd43fc5f10",
"sha256:ecd79298550cba13a43c340581a3ec9c707bd895a6a061a78fa2524660482fc0",
"sha256:f51c4c869d4b60d769f7b4406eec39596648d9d70246428745a681c327a8ad30",
"sha256:fb44f53af0a62dc80bba4443d9b27f2fde6acfdac281d95bc872dc148a6509cc"
],
"markers": "python_version >= '3.9'",
"version": "==3.9.0"
"version": "==3.8.4"
},
"mccabe": {
"hashes": [
@ -2319,15 +2300,6 @@
],
"version": "==1.3.0"
},
"msal": {
"hashes": [
"sha256:3064f80221a21cd535ad8c3fafbb3a3582cd9c7e9af0bb789ae14f726a0ca99b",
"sha256:80bbabe34567cb734efd2ec1869b2d98195c927455369d8077b3c542088c5c9d"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
"version": "==1.28.0"
},
"msg-parser": {
"hashes": [
"sha256:0de858d4fcebb6c8f6f028da83a17a20fe01cdce67c490779cf43b3b0162aa66",
@ -2539,14 +2511,6 @@
"markers": "python_version >= '3.9'",
"version": "==1.26.4"
},
"oauthlib": {
"hashes": [
"sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca",
"sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"
],
"markers": "python_version >= '3.6'",
"version": "==3.2.2"
},
"olefile": {
"hashes": [
"sha256:543c7da2a7adadf21214938bb79c83ea12b473a4b6ee4ad4bf854e7715e13d1f",
@ -2631,12 +2595,12 @@
},
"openai": {
"hashes": [
"sha256:4f85190e577cba0b066e1950b8eb9b11d25bc7ebcc43a86b326ce1bfa564ec74",
"sha256:c9fb3c3545c118bbce8deb824397b9433a66d0d0ede6a96f7009c95b76de4a46"
"sha256:c61cd12376c84362d406341f9e2f9a9d6b81c082b133b44484dc0f43954496b1",
"sha256:d5a769f485610cff8bae14343fa45a8b1d346be3d541fa5b28ccd040dbc8baf8"
],
"index": "pypi",
"markers": "python_full_version >= '3.7.1'",
"version": "==1.30.1"
"version": "==1.29.0"
},
"opencv-python": {
"hashes": [
@ -2761,12 +2725,12 @@
},
"pandasai": {
"hashes": [
"sha256:6d79381bcd1d89fd03e994cc33c4b24600fcfbc90fd842134b70d44277bd5fa9",
"sha256:cea4480edf1251b6b1648940ab74183eb0820610c7d3c001d219973aafa625c8"
"sha256:16ab0c1cf19a6bcd6ecca53d4925a019952409ad3b4382a61e79a2df16744ee9",
"sha256:6e2c59d954e6e474afd2308953a46f1bdbdbb8af469adf64a1fb648b7c3667a3"
],
"index": "pypi",
"markers": "python_version not in '2.7, 3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7, 3.8' and python_version >= '3.9'",
"version": "==2.0.42"
"version": "==2.0.37"
},
"pathspec": {
"hashes": [
@ -2975,11 +2939,11 @@
},
"platformdirs": {
"hashes": [
"sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee",
"sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"
"sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf",
"sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"
],
"markers": "python_version >= '3.8'",
"version": "==4.2.2"
"version": "==4.2.1"
},
"playwright": {
"hashes": [
@ -3199,45 +3163,45 @@
},
"pyarrow": {
"hashes": [
"sha256:06ebccb6f8cb7357de85f60d5da50e83507954af617d7b05f48af1621d331c9a",
"sha256:0d07de3ee730647a600037bc1d7b7994067ed64d0eba797ac74b2bc77384f4c2",
"sha256:0d27bf89dfc2576f6206e9cd6cf7a107c9c06dc13d53bbc25b0bd4556f19cf5f",
"sha256:0d32000693deff8dc5df444b032b5985a48592c0697cb6e3071a5d59888714e2",
"sha256:15fbb22ea96d11f0b5768504a3f961edab25eaf4197c341720c4a387f6c60315",
"sha256:17e23b9a65a70cc733d8b738baa6ad3722298fa0c81d88f63ff94bf25eaa77b9",
"sha256:185d121b50836379fe012753cf15c4ba9638bda9645183ab36246923875f8d1b",
"sha256:18da9b76a36a954665ccca8aa6bd9f46c1145f79c0bb8f4f244f5f8e799bca55",
"sha256:19741c4dbbbc986d38856ee7ddfdd6a00fc3b0fc2d928795b95410d38bb97d15",
"sha256:25233642583bf658f629eb230b9bb79d9af4d9f9229890b3c878699c82f7d11e",
"sha256:2e51ca1d6ed7f2e9d5c3c83decf27b0d17bb207a7dea986e8dc3e24f80ff7d6f",
"sha256:2e73cfc4a99e796727919c5541c65bb88b973377501e39b9842ea71401ca6c1c",
"sha256:31a1851751433d89a986616015841977e0a188662fcffd1a5677453f1df2de0a",
"sha256:3b20bd67c94b3a2ea0a749d2a5712fc845a69cb5d52e78e6449bbd295611f3aa",
"sha256:4740cc41e2ba5d641071d0ab5e9ef9b5e6e8c7611351a5cb7c1d175eaf43674a",
"sha256:48be160782c0556156d91adbdd5a4a7e719f8d407cb46ae3bb4eaee09b3111bd",
"sha256:8785bb10d5d6fd5e15d718ee1d1f914fe768bf8b4d1e5e9bf253de8a26cb1628",
"sha256:98100e0268d04e0eec47b73f20b39c45b4006f3c4233719c3848aa27a03c1aef",
"sha256:99f7549779b6e434467d2aa43ab2b7224dd9e41bdde486020bae198978c9e05e",
"sha256:9cf389d444b0f41d9fe1444b70650fea31e9d52cfcb5f818b7888b91b586efff",
"sha256:a33a64576fddfbec0a44112eaf844c20853647ca833e9a647bfae0582b2ff94b",
"sha256:a8914cd176f448e09746037b0c6b3a9d7688cef451ec5735094055116857580c",
"sha256:b04707f1979815f5e49824ce52d1dceb46e2f12909a48a6a753fe7cafbc44a0c",
"sha256:b5f5705ab977947a43ac83b52ade3b881eb6e95fcc02d76f501d549a210ba77f",
"sha256:ba8ac20693c0bb0bf4b238751d4409e62852004a8cf031c73b0e0962b03e45e3",
"sha256:bf9251264247ecfe93e5f5a0cd43b8ae834f1e61d1abca22da55b20c788417f6",
"sha256:d0ebea336b535b37eee9eee31761813086d33ed06de9ab6fc6aaa0bace7b250c",
"sha256:ddf5aace92d520d3d2a20031d8b0ec27b4395cab9f74e07cc95edf42a5cc0147",
"sha256:ddfe389a08ea374972bd4065d5f25d14e36b43ebc22fc75f7b951f24378bf0b5",
"sha256:e1369af39587b794873b8a307cc6623a3b1194e69399af0efd05bb202195a5a7",
"sha256:e6b6d3cd35fbb93b70ade1336022cc1147b95ec6af7d36906ca7fe432eb09710",
"sha256:f07fdffe4fd5b15f5ec15c8b64584868d063bc22b86b46c9695624ca3505b7b4",
"sha256:f2c5fb249caa17b94e2b9278b36a05ce03d3180e6da0c4c3b3ce5b2788f30eed",
"sha256:f68f409e7b283c085f2da014f9ef81e885d90dcd733bd648cfba3ef265961848",
"sha256:fbef391b63f708e103df99fbaa3acf9f671d77a183a07546ba2f2c297b361e83",
"sha256:febde33305f1498f6df85e8020bca496d0e9ebf2093bab9e0f65e2b4ae2b3444"
"sha256:00a1dcb22ad4ceb8af87f7bd30cc3354788776c417f493089e0a0af981bc8d80",
"sha256:1ab8b9050752b16a8b53fcd9853bf07d8daf19093533e990085168f40c64d978",
"sha256:20ce707d9aa390593ea93218b19d0eadab56390311cb87aad32c9a869b0e958c",
"sha256:22a1fdb1254e5095d629e29cd1ea98ed04b4bbfd8e42cc670a6b639ccc208b60",
"sha256:266ddb7e823f03733c15adc8b5078db2df6980f9aa93d6bb57ece615df4e0ba7",
"sha256:2a7abdee4a4a7cfa239e2e8d721224c4b34ffe69a0ca7981354fe03c1328789b",
"sha256:35692ce8ad0b8c666aa60f83950957096d92f2a9d8d7deda93fb835e6053307e",
"sha256:3c2f5e239db7ed43e0ad2baf46a6465f89c824cc703f38ef0fde927d8e0955f7",
"sha256:42e56557bc7c5c10d3e42c3b32f6cff649a29d637e8f4e8b311d334cc4326730",
"sha256:5448564754c154997bc09e95a44b81b9e31ae918a86c0fcb35c4aa4922756f55",
"sha256:56850a0afe9ef37249d5387355449c0f94d12ff7994af88f16803a26d38f2016",
"sha256:574a00260a4ed9d118a14770edbd440b848fcae5a3024128be9d0274dbcaf858",
"sha256:5823275c8addbbb50cd4e6a6839952682a33255b447277e37a6f518d6972f4e1",
"sha256:59bb1f1edbbf4114c72415f039f1359f1a57d166a331c3229788ccbfbb31689a",
"sha256:5cc23090224b6594f5a92d26ad47465af47c1d9c079dd4a0061ae39551889efe",
"sha256:705db70d3e2293c2f6f8e84874b5b775f690465798f66e94bb2c07bab0a6bb55",
"sha256:71d52561cd7aefd22cf52538f262850b0cc9e4ec50af2aaa601da3a16ef48877",
"sha256:729f7b262aa620c9df8b9967db96c1575e4cfc8c25d078a06968e527b8d6ec05",
"sha256:91d28f9a40f1264eab2af7905a4d95320ac2f287891e9c8b0035f264fe3c3a4b",
"sha256:99af421ee451a78884d7faea23816c429e263bd3618b22d38e7992c9ce2a7ad9",
"sha256:9dd3151d098e56f16a8389c1247137f9e4c22720b01c6f3aa6dec29a99b74d80",
"sha256:b93c9a50b965ee0bf4fef65e53b758a7e8dcc0c2d86cebcc037aaaf1b306ecc0",
"sha256:bd40467bdb3cbaf2044ed7a6f7f251c8f941c8b31275aaaf88e746c4f3ca4a7a",
"sha256:c0815d0ddb733b8c1b53a05827a91f1b8bde6240f3b20bf9ba5d650eb9b89cdf",
"sha256:cc8814310486f2a73c661ba8354540f17eef51e1b6dd090b93e3419d3a097b3a",
"sha256:d22d0941e6c7bafddf5f4c0662e46f2075850f1c044bf1a03150dd9e189427ce",
"sha256:d831690844706e374c455fba2fb8cfcb7b797bfe53ceda4b54334316e1ac4fa4",
"sha256:d91073d1e2fef2c121154680e2ba7e35ecf8d4969cc0af1fa6f14a8675858159",
"sha256:dd9334a07b6dc21afe0857aa31842365a62eca664e415a3f9536e3a8bb832c07",
"sha256:df0080339387b5d30de31e0a149c0c11a827a10c82f0c67d9afae3981d1aabb7",
"sha256:ed66e5217b4526fa3585b5e39b0b82f501b88a10d36bd0d2a4d8aa7b5a48e2df",
"sha256:edf38cce0bf0dcf726e074159c60516447e4474904c0033f018c1f33d7dac6c5",
"sha256:ef2f309b68396bcc5a354106741d333494d6a0d3e1951271849787109f0229a6",
"sha256:f293e92d1db251447cb028ae12f7bc47526e4649c3a9924c8376cab4ad6b98bd",
"sha256:fb8065dbc0d051bf2ae2453af0484d99a43135cadabacf0af588a3be81fbbb9b",
"sha256:fda9a7cebd1b1d46c97b511f60f73a5b766a6de4c5236f144f41a5d5afec1f35"
],
"markers": "python_version >= '3.8'",
"version": "==16.1.0"
"version": "==16.0.0"
},
"pyarrow-hotfix": {
"hashes": [
@ -3539,17 +3503,6 @@
"markers": "python_version >= '3.7'",
"version": "==4.6.2"
},
"pyjwt": {
"extras": [
"crypto"
],
"hashes": [
"sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de",
"sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"
],
"markers": "python_version >= '3.7'",
"version": "==2.8.0"
},
"pypandoc": {
"hashes": [
"sha256:31652073c7960c2b03570bd1e94f602ca9bc3e70099df5ead4cea98ff5151c1e",
@ -3564,7 +3517,7 @@
"sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad",
"sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"
],
"markers": "python_version >= '3.1'",
"markers": "python_full_version >= '3.6.8'",
"version": "==3.1.2"
},
"pypdf": {
@ -3595,28 +3548,14 @@
"markers": "python_version >= '3.6'",
"version": "==4.30.0"
},
"pyreqwest-impersonate": {
"hashes": [
"sha256:27db49d0e244d5ff31cce7f66ba3a4fac4a53c33445970e6c7467672d9c5f56f",
"sha256:482e6254fcc666234cc3a53cdbfa8642991a7e454e45233f040687f9c96ffa88",
"sha256:539d9a14f308a778a9a30afeb308207588703d1d2e74cd9b890cf9f5c4f86bce",
"sha256:9012709b52d84d940f193e77d5e73d4b61c782ad60220b6648a54865a372eece",
"sha256:910fb19e209a4a49156410226e010dff674c69e8c570817cc3fec9e8d80b6ea5",
"sha256:9ed0e07181a47af12624e50db388b0eb4e6c985be9a276ac5c7494fef492a100",
"sha256:d1b2f7a44c907699a8c8a40bcc9b25b5a8db6f37f292f106374314ef4d469f0b",
"sha256:f0aedda1b8b3d62c627c8a8921a2fc702977108f83c90d675fa1900e68ea8fe5"
],
"markers": "python_version >= '3.8'",
"version": "==0.4.5"
},
"pyright": {
"hashes": [
"sha256:00a8f0ae0e339473bb0488f8a2a2dcdf574e94a16cd7b4390d49d144714d8db2",
"sha256:d3b8d73c8d230e26cc3523862f3398032a0c39a00d7bb69dc0f595f8e888fd01"
"sha256:6a477e448d4a07a6a0eab58b2a15a1bbed031eb3169fa809edee79cca168d83a",
"sha256:969957cff45154d8a45a4ab1dae5bdc8223d8bd3c64654fa608ab3194dfff319"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
"version": "==1.1.363"
"version": "==1.1.362"
},
"pysbd": {
"hashes": [
@ -3928,88 +3867,88 @@
},
"regex": {
"hashes": [
"sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649",
"sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35",
"sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb",
"sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68",
"sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5",
"sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133",
"sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0",
"sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d",
"sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da",
"sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f",
"sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d",
"sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53",
"sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa",
"sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a",
"sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890",
"sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67",
"sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c",
"sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2",
"sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced",
"sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741",
"sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f",
"sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa",
"sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf",
"sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4",
"sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5",
"sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2",
"sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384",
"sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7",
"sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014",
"sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704",
"sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5",
"sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2",
"sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49",
"sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1",
"sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694",
"sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629",
"sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6",
"sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435",
"sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c",
"sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835",
"sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e",
"sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201",
"sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62",
"sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5",
"sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16",
"sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f",
"sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1",
"sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f",
"sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f",
"sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145",
"sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3",
"sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed",
"sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143",
"sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca",
"sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9",
"sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa",
"sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850",
"sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80",
"sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe",
"sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656",
"sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388",
"sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1",
"sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294",
"sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3",
"sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d",
"sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b",
"sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40",
"sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600",
"sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c",
"sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569",
"sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456",
"sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9",
"sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb",
"sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e",
"sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f",
"sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d",
"sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a",
"sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a",
"sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"
"sha256:031219782d97550c2098d9a68ce9e9eaefe67d2d81d8ff84c8354f9c009e720c",
"sha256:0709ba544cf50bd5cb843df4b8bb6701bae2b70a8e88da9add8386cbca5c1385",
"sha256:0a9f89d7db5ef6bdf53e5cc8e6199a493d0f1374b3171796b464a74ebe8e508a",
"sha256:0bc94873ba11e34837bffd7e5006703abeffc4514e2f482022f46ce05bd25e67",
"sha256:0ce56a923f4c01d7568811bfdffe156268c0a7aae8a94c902b92fe34c4bde785",
"sha256:0faecb6d5779753a6066a3c7a0471a8d29fe25d9981ca9e552d6d1b8f8b6a594",
"sha256:1118ba9def608250250f4b3e3f48c62f4562ba16ca58ede491b6e7554bfa09ff",
"sha256:12446827f43c7881decf2c126762e11425de5eb93b3b0d8b581344c16db7047a",
"sha256:14905ed75c7a6edf423eb46c213ed3f4507c38115f1ed3c00f4ec9eafba50e58",
"sha256:15e593386ec6331e0ab4ac0795b7593f02ab2f4b30a698beb89fbdc34f92386a",
"sha256:160ba087232c5c6e2a1e7ad08bd3a3f49b58c815be0504d8c8aacfb064491cd8",
"sha256:161a206c8f3511e2f5fafc9142a2cc25d7fe9a1ec5ad9b4ad2496a7c33e1c5d2",
"sha256:169fd0acd7a259f58f417e492e93d0e15fc87592cd1e971c8c533ad5703b5830",
"sha256:193b7c6834a06f722f0ce1ba685efe80881de7c3de31415513862f601097648c",
"sha256:1a3903128f9e17a500618e80c68165c78c741ebb17dd1a0b44575f92c3c68b02",
"sha256:1d5bd666466c8f00a06886ce1397ba8b12371c1f1c6d1bef11013e9e0a1464a8",
"sha256:224a9269f133564109ce668213ef3cb32bc72ccf040b0b51c72a50e569e9dc9e",
"sha256:236cace6c1903effd647ed46ce6dd5d76d54985fc36dafc5256032886736c85d",
"sha256:249fbcee0a277c32a3ce36d8e36d50c27c968fdf969e0fbe342658d4e010fbc8",
"sha256:29d839829209f3c53f004e1de8c3113efce6d98029f044fa5cfee666253ee7e6",
"sha256:2c8982ee19ccecabbaeac1ba687bfef085a6352a8c64f821ce2f43e6d76a9298",
"sha256:2f30a5ab8902f93930dc6f627c4dd5da2703333287081c85cace0fc6e21c25af",
"sha256:304e7e2418146ae4d0ef0e9ffa28f881f7874b45b4994cc2279b21b6e7ae50c8",
"sha256:32e5f3b8e32918bfbdd12eca62e49ab3031125c454b507127ad6ecbd86e62fca",
"sha256:334b79ce9c08f26b4659a53f42892793948a613c46f1b583e985fd5a6bf1c149",
"sha256:33d19f0cde6838c81acffff25c7708e4adc7dd02896c9ec25c3939b1500a1778",
"sha256:3799e36d60a35162bb35b2246d8bb012192b7437dff807ef79c14e7352706306",
"sha256:42be5de7cc8c1edac55db92d82b68dc8e683b204d6f5414c5a51997a323d7081",
"sha256:44b3267cea873684af022822195298501568ed44d542f9a2d9bebc0212e99069",
"sha256:458d68d34fb74b906709735c927c029e62f7d06437a98af1b5b6258025223210",
"sha256:45cc13d398b6359a7708986386f72bd156ae781c3e83a68a6d4cee5af04b1ce9",
"sha256:4e7eaf9df15423d07b6050fb91f86c66307171b95ea53e2d87a7993b6d02c7f7",
"sha256:4fad420b14ae1970a1f322e8ae84a1d9d89375eb71e1b504060ab2d1bfe68f3c",
"sha256:504b5116e2bd1821efd815941edff7535e93372a098e156bb9dffde30264e798",
"sha256:50e7e96a527488334379e05755b210b7da4a60fc5d6481938c1fa053e0c92184",
"sha256:51d27844763c273a122e08a3e86e7aefa54ee09fb672d96a645ece0454d8425e",
"sha256:5253dcb0bfda7214523de58b002eb0090cb530d7c55993ce5f6d17faf953ece7",
"sha256:534efd2653ebc4f26fc0e47234e53bf0cb4715bb61f98c64d2774a278b58c846",
"sha256:560278c9975694e1f0bc50da187abf2cdc1e4890739ea33df2bc4a85eeef143e",
"sha256:571452362d552de508c37191b6abbbb660028b8b418e2d68c20779e0bc8eaaa8",
"sha256:62b5f7910b639f3c1d122d408421317c351e213ca39c964ad4121f27916631c6",
"sha256:696639a73ca78a380acfaa0a1f6dd8220616a99074c05bba9ba8bb916914b224",
"sha256:6ccdeef4584450b6f0bddd5135354908dacad95425fcb629fe36d13e48b60f32",
"sha256:70364a097437dd0a90b31cd77f09f7387ad9ac60ef57590971f43b7fca3082a5",
"sha256:7117cb7d6ac7f2e985f3d18aa8a1728864097da1a677ffa69e970ca215baebf1",
"sha256:7467ad8b0eac0b28e52679e972b9b234b3de0ea5cee12eb50091d2b68145fe36",
"sha256:7d35d4cc9270944e95f9c88af757b0c9fc43f396917e143a5756608462c5223b",
"sha256:7dda3091838206969c2b286f9832dff41e2da545b99d1cfaea9ebd8584d02708",
"sha256:853cc36e756ff673bf984e9044ccc8fad60b95a748915dddeab9488aea974c73",
"sha256:8722f72068b3e1156a4b2e1afde6810f1fc67155a9fa30a4b9d5b4bc46f18fb0",
"sha256:8c6c71cf92b09e5faa72ea2c68aa1f61c9ce11cb66fdc5069d712f4392ddfd00",
"sha256:903350bf44d7e4116b4d5898b30b15755d61dcd3161e3413a49c7db76f0bee5a",
"sha256:91b53dea84415e8115506cc62e441a2b54537359c63d856d73cb1abe05af4c9a",
"sha256:951be1eae7b47660412dc4938777a975ebc41936d64e28081bf2e584b47ec246",
"sha256:972b49f2fe1047b9249c958ec4fa1bdd2cf8ce305dc19d27546d5a38e57732d8",
"sha256:9a8625849387b9d558d528e263ecc9c0fbde86cfa5c2f0eef43fff480ae24d71",
"sha256:9cdbb1998da94607d5eec02566b9586f0e70d6438abf1b690261aac0edda7ab6",
"sha256:9e6d4d6ae1827b2f8c7200aaf7501c37cf3f3896c86a6aaf2566448397c823dd",
"sha256:aab65121229c2ecdf4a31b793d99a6a0501225bd39b616e653c87b219ed34a49",
"sha256:ab98016541543692a37905871a5ffca59b16e08aacc3d7d10a27297b443f572d",
"sha256:ad45f3bccfcb00868f2871dce02a755529838d2b86163ab8a246115e80cfb7d6",
"sha256:b43b78f9386d3d932a6ce5af4b45f393d2e93693ee18dc4800d30a8909df700e",
"sha256:b66421f8878a0c82fc0c272a43e2121c8d4c67cb37429b764f0d5ad70b82993b",
"sha256:ba034c8db4b264ef1601eb33cd23d87c5013b8fb48b8161debe2e5d3bd9156b0",
"sha256:bbdc5db2c98ac2bf1971ffa1410c87ca7a15800415f788971e8ba8520fc0fda9",
"sha256:bc0db93ad039fc2fe32ccd3dd0e0e70c4f3d6e37ae83f0a487e1aba939bd2fbd",
"sha256:bf7c8ee4861d9ef5b1120abb75846828c811f932d63311596ad25fa168053e00",
"sha256:bf9596cba92ce7b1fd32c7b07c6e3212c7eed0edc271757e48bfcd2b54646452",
"sha256:c43395a3b7cc9862801a65c6994678484f186ce13c929abab44fb8a9e473a55a",
"sha256:c46a76a599fcbf95f98755275c5527304cc4f1bb69919434c1e15544d7052910",
"sha256:ca23b41355ba95929e9505ee04e55495726aa2282003ed9b012d86f857d3e49b",
"sha256:cd832bd9b6120d6074f39bdfbb3c80e416848b07ac72910f1c7f03131a6debc3",
"sha256:cfa6d61a76c77610ba9274c1a90a453062bdf6887858afbe214d18ad41cf6bde",
"sha256:d8a0f0ab5453e409586b11ebe91c672040bc804ca98d03a656825f7890cbdf88",
"sha256:e91b1976358e17197157b405cab408a5f4e33310cda211c49fc6da7cffd0b2f0",
"sha256:ea057306ab469130167014b662643cfaed84651c792948891d003cf0039223a5",
"sha256:eda3dd46df535da787ffb9036b5140f941ecb91701717df91c9daf64cabef953",
"sha256:f03b1dbd4d9596dd84955bb40f7d885204d6aac0d56a919bb1e0ff2fb7e1735a",
"sha256:fa9335674d7c819674467c7b46154196c51efbaf5f5715187fd366814ba3fa39"
],
"markers": "python_version >= '3.8'",
"version": "==2024.5.15"
"version": "==2024.5.10"
},
"requests": {
"hashes": [
@ -4026,22 +3965,14 @@
],
"version": "==2.0.0"
},
"requests-oauthlib": {
"hashes": [
"sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36",
"sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"
],
"markers": "python_version >= '3.4'",
"version": "==2.0.0"
},
"resend": {
"hashes": [
"sha256:4ade3dfe972175e32504a2b347274cb665b428f63e9b4c247c71b683ee8ba2ff",
"sha256:f68c7066343167e25714560dba773ef5529ed4b875056c06d6e97e1a2935f04b"
"sha256:12c7ec373e40ff5f4b107c1a72eee3df9ddf5f95f682b491681f8175df2962c1",
"sha256:c6d8ad5d00f39cf830e8f8ba1bb31424745be2c98010e599ad758e872acd1d5e"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
"version": "==1.0.2"
"version": "==1.0.1"
},
"retry": {
"hashes": [
@ -4216,11 +4147,11 @@
"fastapi"
],
"hashes": [
"sha256:674f58da37835ea7447fe0e34c57b4a4277fad558b0a7cb4a6c83bcb263086be",
"sha256:70eca103cf4c6302365a9d7cf522e7ed7720828910eb23d43ada8e50d1ecda9d"
"sha256:95d8c0bb41c8b0bc37ab202c2c4a295bb84398ee05f4cdce55051cd75b926ec1",
"sha256:99aeb78fb76771513bd3b2829d12613130152620768d00cd3e45ac00cb17950f"
],
"markers": "python_version >= '3.6'",
"version": "==2.2.0"
"version": "==2.1.1"
},
"setuptools": {
"hashes": [
@ -4459,11 +4390,11 @@
},
"timm": {
"hashes": [
"sha256:83920a7efe2cfd503b2a1257dc8808d6ff7dcd18a4b79f451c283e7d71497329",
"sha256:d1ec86f7765aa79fbc7491508fa6e285d38a38f10bf4fe44ba2e9c70f91f0f5b"
"sha256:891e54f375d55adf31a71ab0c117761f0e472f9f3971858ecdd1e7376b7071e6",
"sha256:bf5704014476ab011589d3c14172ee4c901fd18f9110a928019cac5be2945914"
],
"markers": "python_version >= '3.8'",
"version": "==1.0.3"
"version": "==0.9.16"
},
"tinysegmenter": {
"hashes": [
@ -4709,87 +4640,74 @@
},
"ujson": {
"hashes": [
"sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e",
"sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b",
"sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6",
"sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7",
"sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9",
"sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd",
"sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569",
"sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f",
"sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51",
"sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20",
"sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1",
"sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf",
"sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc",
"sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e",
"sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a",
"sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539",
"sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27",
"sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165",
"sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126",
"sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1",
"sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816",
"sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64",
"sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8",
"sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e",
"sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287",
"sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3",
"sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb",
"sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0",
"sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043",
"sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557",
"sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e",
"sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21",
"sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d",
"sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd",
"sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0",
"sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337",
"sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753",
"sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804",
"sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f",
"sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f",
"sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5",
"sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5",
"sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1",
"sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00",
"sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2",
"sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050",
"sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e",
"sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4",
"sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8",
"sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996",
"sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6",
"sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1",
"sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f",
"sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1",
"sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4",
"sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b",
"sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88",
"sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518",
"sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5",
"sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770",
"sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4",
"sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a",
"sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76",
"sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe",
"sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988",
"sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1",
"sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5",
"sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b",
"sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7",
"sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8",
"sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc",
"sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a",
"sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720",
"sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3",
"sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b",
"sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9",
"sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1",
"sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"
"sha256:07e0cfdde5fd91f54cd2d7ffb3482c8ff1bf558abf32a8b953a5d169575ae1cd",
"sha256:0b159efece9ab5c01f70b9d10bbb77241ce111a45bc8d21a44c219a2aec8ddfd",
"sha256:0c4d6adb2c7bb9eb7c71ad6f6f612e13b264942e841f8cc3314a21a289a76c4e",
"sha256:10ca3c41e80509fd9805f7c149068fa8dbee18872bbdc03d7cca928926a358d5",
"sha256:20509a8c9f775b3a511e308bbe0b72897ba6b800767a7c90c5cca59d20d7c42c",
"sha256:25fa46e4ff0a2deecbcf7100af3a5d70090b461906f2299506485ff31d9ec437",
"sha256:2a8ea0f55a1396708e564595aaa6696c0d8af532340f477162ff6927ecc46e21",
"sha256:2fbb90aa5c23cb3d4b803c12aa220d26778c31b6e4b7a13a1f49971f6c7d088e",
"sha256:323279e68c195110ef85cbe5edce885219e3d4a48705448720ad925d88c9f851",
"sha256:32bba5870c8fa2a97f4a68f6401038d3f1922e66c34280d710af00b14a3ca562",
"sha256:3382a3ce0ccc0558b1c1668950008cece9bf463ebb17463ebf6a8bfc060dae34",
"sha256:37ef92e42535a81bf72179d0e252c9af42a4ed966dc6be6967ebfb929a87bc60",
"sha256:3b23bbb46334ce51ddb5dded60c662fbf7bb74a37b8f87221c5b0fec1ec6454b",
"sha256:473fb8dff1d58f49912323d7cb0859df5585cfc932e4b9c053bf8cf7f2d7c5c4",
"sha256:4a566e465cb2fcfdf040c2447b7dd9718799d0d90134b37a20dff1e27c0e9096",
"sha256:4e35d7885ed612feb6b3dd1b7de28e89baaba4011ecdf995e88be9ac614765e9",
"sha256:506a45e5fcbb2d46f1a51fead991c39529fc3737c0f5d47c9b4a1d762578fc30",
"sha256:5635b78b636a54a86fdbf6f027e461aa6c6b948363bdf8d4fbb56a42b7388320",
"sha256:5ca35f484622fd208f55041b042d9d94f3b2c9c5add4e9af5ee9946d2d30db01",
"sha256:60718f1720a61560618eff3b56fd517d107518d3c0160ca7a5a66ac949c6cf1c",
"sha256:63fb2e6599d96fdffdb553af0ed3f76b85fda63281063f1cb5b1141a6fcd0617",
"sha256:6974b3a7c17bbf829e6c3bfdc5823c67922e44ff169851a755eab79a3dd31ec0",
"sha256:6adef377ed583477cf005b58c3025051b5faa6b8cc25876e594afbb772578f21",
"sha256:6bbd91a151a8f3358c29355a491e915eb203f607267a25e6ab10531b3b157c5e",
"sha256:6eecbd09b316cea1fd929b1e25f70382917542ab11b692cb46ec9b0a26c7427f",
"sha256:70e06849dfeb2548be48fdd3ceb53300640bc8100c379d6e19d78045e9c26120",
"sha256:7309d063cd392811acc49b5016728a5e1b46ab9907d321ebbe1c2156bc3c0b99",
"sha256:779a2a88c53039bebfbccca934430dabb5c62cc179e09a9c27a322023f363e0d",
"sha256:7a365eac66f5aa7a7fdf57e5066ada6226700884fc7dce2ba5483538bc16c8c5",
"sha256:7b1c0991c4fe256f5fdb19758f7eac7f47caac29a6c57d0de16a19048eb86bad",
"sha256:7cc7e605d2aa6ae6b7321c3ae250d2e050f06082e71ab1a4200b4ae64d25863c",
"sha256:829a69d451a49c0de14a9fecb2a2d544a9b2c884c2b542adb243b683a6f15908",
"sha256:829b824953ebad76d46e4ae709e940bb229e8999e40881338b3cc94c771b876c",
"sha256:82b5a56609f1235d72835ee109163c7041b30920d70fe7dac9176c64df87c164",
"sha256:89cc92e73d5501b8a7f48575eeb14ad27156ad092c2e9fc7e3cf949f07e75532",
"sha256:8ba7cac47dd65ff88571eceeff48bf30ed5eb9c67b34b88cb22869b7aa19600d",
"sha256:8fc2aa18b13d97b3c8ccecdf1a3c405f411a6e96adeee94233058c44ff92617d",
"sha256:9ac92d86ff34296f881e12aa955f7014d276895e0e4e868ba7fddebbde38e378",
"sha256:9d302bd17989b6bd90d49bade66943c78f9e3670407dbc53ebcf61271cadc399",
"sha256:9f21315f51e0db8ee245e33a649dd2d9dce0594522de6f278d62f15f998e050e",
"sha256:a6d3f10eb8ccba4316a6b5465b705ed70a06011c6f82418b59278fbc919bef6f",
"sha256:a807ae73c46ad5db161a7e883eec0fbe1bebc6a54890152ccc63072c4884823b",
"sha256:ab71bf27b002eaf7d047c54a68e60230fbd5cd9da60de7ca0aa87d0bccead8fa",
"sha256:b048aa93eace8571eedbd67b3766623e7f0acbf08ee291bef7d8106210432427",
"sha256:b28407cfe315bd1b34f1ebe65d3bd735d6b36d409b334100be8cdffae2177b2f",
"sha256:b5964ea916edfe24af1f4cc68488448fbb1ec27a3ddcddc2b236da575c12c8ae",
"sha256:b68a0caab33f359b4cbbc10065c88e3758c9f73a11a65a91f024b2e7a1257106",
"sha256:ba0823cb70866f0d6a4ad48d998dd338dce7314598721bc1b7986d054d782dfd",
"sha256:bd4ea86c2afd41429751d22a3ccd03311c067bd6aeee2d054f83f97e41e11d8f",
"sha256:bdf7fc21a03bafe4ba208dafa84ae38e04e5d36c0e1c746726edf5392e9f9f36",
"sha256:c4eec2ddc046360d087cf35659c7ba0cbd101f32035e19047013162274e71fcf",
"sha256:cdcb02cabcb1e44381221840a7af04433c1dc3297af76fde924a50c3054c708c",
"sha256:d0fd2eba664a22447102062814bd13e63c6130540222c0aa620701dd01f4be81",
"sha256:d581db9db9e41d8ea0b2705c90518ba623cbdc74f8d644d7eb0d107be0d85d9c",
"sha256:dc80f0f5abf33bd7099f7ac94ab1206730a3c0a2d17549911ed2cb6b7aa36d2d",
"sha256:e015122b337858dba5a3dc3533af2a8fc0410ee9e2374092f6a5b88b182e9fcc",
"sha256:e208d3bf02c6963e6ef7324dadf1d73239fb7008491fdf523208f60be6437402",
"sha256:e2f909bc08ce01f122fd9c24bc6f9876aa087188dfaf3c4116fe6e4daf7e194f",
"sha256:f0cb4a7814940ddd6619bdce6be637a4b37a8c4760de9373bac54bb7b229698b",
"sha256:f4b3917296630a075e04d3d07601ce2a176479c23af838b6cf90a2d6b39b0d95",
"sha256:f69f16b8f1c69da00e38dc5f2d08a86b0e781d0ad3e4cc6a13ea033a439c4844",
"sha256:f833c529e922577226a05bc25b6a8b3eb6c4fb155b72dd88d33de99d53113124",
"sha256:f91719c6abafe429c1a144cfe27883eace9fb1c09a9c5ef1bcb3ae80a3076a4e",
"sha256:ff741a5b4be2d08fceaab681c9d4bc89abf3c9db600ab435e20b9b6d4dfef12e",
"sha256:ffdfebd819f492e48e4f31c97cb593b9c1a8251933d8f8972e81697f00326ff1"
],
"markers": "python_version >= '3.8'",
"version": "==5.10.0"
"version": "==5.9.0"
},
"unidecode": {
"hashes": [
@ -4834,14 +4752,6 @@
],
"version": "==0.3.12"
},
"uritemplate": {
"hashes": [
"sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0",
"sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"
],
"markers": "python_version >= '3.6'",
"version": "==4.1.1"
},
"urllib3": {
"hashes": [
"sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d",
@ -5419,11 +5329,11 @@
},
"zipp": {
"hashes": [
"sha256:6278d9ddbcfb1f1089a88fde84481528b07b0e10474e09dcfe53dad4069fa059",
"sha256:dce197b859eb796242b0622af1b8beb0a722d52aa2f57133ead08edd5bf5374e"
"sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b",
"sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"
],
"markers": "python_version >= '3.8'",
"version": "==3.18.2"
"version": "==3.18.1"
}
},
"develop": {
@ -5630,11 +5540,11 @@
},
"platformdirs": {
"hashes": [
"sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee",
"sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"
"sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf",
"sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"
],
"markers": "python_version >= '3.8'",
"version": "==4.2.2"
"version": "==4.2.1"
},
"prompt-toolkit": {
"hashes": [

View File

@ -36,6 +36,3 @@ elif CELERY_BROKER_URL.startswith("redis"):
)
else:
raise ValueError(f"Unsupported broker URL: {CELERY_BROKER_URL}")
celery.autodiscover_tasks(["modules.sync.task"])

View File

@ -217,8 +217,4 @@ celery.conf.beat_schedule = {
"task": f"{__name__}.ping_telemetry",
"schedule": crontab(minute="*/30", hour="*"),
},
"process_sync_active": {
"task": "process_sync_active",
"schedule": crontab(minute="*/5", hour="*"),
},
}

View File

@ -24,7 +24,6 @@ from modules.knowledge.controller import knowledge_router
from modules.misc.controller import misc_router
from modules.onboarding.controller import onboarding_router
from modules.prompt.controller import prompt_router
from modules.sync.controller import sync_router
from modules.upload.controller import upload_router
from modules.user.controller import user_router
from packages.utils import handle_request_validation_error
@ -79,7 +78,6 @@ app.include_router(brain_router)
app.include_router(chat_router)
app.include_router(crawl_router)
app.include_router(assistant_router)
app.include_router(sync_router)
app.include_router(onboarding_router)
app.include_router(misc_router)
app.include_router(analytics_router)

View File

@ -1,7 +1,7 @@
from typing import Optional
from uuid import UUID
from langchain_community.embeddings.ollama import OllamaEmbeddings
from langchain_community.embeddings import OllamaEmbeddings
from langchain_openai import OpenAIEmbeddings
from logger import get_logger
from models.databases.supabase.supabase import SupabaseDB
@ -9,7 +9,8 @@ from posthog import Posthog
from pydantic_settings import BaseSettings, SettingsConfigDict
from sqlalchemy import Engine, create_engine
from supabase.client import Client, create_client
from langchain_community.vectorstores.supabase import SupabaseVectorStore
from vectorstore.supabase import SupabaseVectorStore
logger = get_logger(__name__)

View File

@ -1,4 +1,7 @@
from .api_brain_definitions import ApiBrainDefinitions
from .brains import Brains
from .brains_users import BrainsUsers
from .brains_vectors import BrainsVectors
from .composite_brains_connections import CompositeBrainsConnections
from .external_api_secrets import ExternalApiSecrets
from .integration_brains import IntegrationBrain, IntegrationDescription

View File

@ -11,6 +11,8 @@ from modules.brain.repository import (
Brains,
BrainsUsers,
BrainsVectors,
CompositeBrainsConnections,
ExternalApiSecrets,
IntegrationBrain,
IntegrationDescription,
)
@ -18,6 +20,7 @@ from modules.brain.repository.interfaces import (
BrainsInterface,
BrainsUsersInterface,
BrainsVectorsInterface,
CompositeBrainsConnectionsInterface,
ExternalApiSecretsInterface,
IntegrationBrainInterface,
IntegrationDescriptionInterface,
@ -39,6 +42,7 @@ class BrainService:
brain_user_repository: BrainsUsersInterface
brain_vector_repository: BrainsVectorsInterface
external_api_secrets_repository: ExternalApiSecretsInterface
composite_brains_connections_repository: CompositeBrainsConnectionsInterface
integration_brains_repository: IntegrationBrainInterface
integration_description_repository: IntegrationDescriptionInterface
@ -46,6 +50,8 @@ class BrainService:
self.brain_repository = Brains()
self.brain_user_repository = BrainsUsers()
self.brain_vector = BrainsVectors()
self.external_api_secrets_repository = ExternalApiSecrets()
self.composite_brains_connections_repository = CompositeBrainsConnections()
self.integration_brains_repository = IntegrationBrain()
self.integration_description_repository = IntegrationDescription()

View File

@ -10,8 +10,8 @@ from modules.brain.service.brain_authorization_service import (
)
from modules.brain.service.brain_vector_service import BrainVectorService
from modules.knowledge.service.knowledge_service import KnowledgeService
from modules.upload.service.generate_file_signed_url import generate_file_signed_url
from modules.user.entity.user_identity import UserIdentity
from modules.upload.service.generate_file_signed_url import generate_file_signed_url
knowledge_router = APIRouter()
logger = get_logger(__name__)

View File

@ -1,8 +1,6 @@
from models.settings import get_supabase_client
from modules.notification.dto.inputs import (
CreateNotification,
NotificationUpdatableProperties,
)
from modules.notification.dto.inputs import NotificationUpdatableProperties
from modules.notification.entity.notification import Notification
from modules.notification.repository.notifications import Notifications
from modules.notification.repository.notifications_interface import (
NotificationInterface,
@ -16,7 +14,7 @@ class NotificationService:
supabase_client = get_supabase_client()
self.repository = Notifications(supabase_client)
def add_notification(self, notification: CreateNotification):
def add_notification(self, notification: Notification):
"""
Add a notification
"""

View File

@ -1 +0,0 @@
from .sync_routes import sync_router

View File

@ -1,113 +0,0 @@
import os
from fastapi import APIRouter, Depends, HTTPException, Request
from logger import get_logger
from middlewares.auth import AuthBearer, get_current_user
from modules.sync.dto.inputs import SyncsUserInput, SyncUserUpdateInput
from modules.sync.service.sync_service import SyncService, SyncUserService
from modules.user.entity.user_identity import UserIdentity
from msal import PublicClientApplication
# Initialize logger
logger = get_logger(__name__)
# Initialize sync service
sync_service = SyncService()
sync_user_service = SyncUserService()
# Initialize API router
azure_sync_router = APIRouter()
# Constants
CLIENT_ID = os.getenv("SHAREPOINT_CLIENT_ID")
AUTHORITY = "https://login.microsoftonline.com/common"
BACKEND_URL = os.getenv("BACKEND_URL", "http://localhost:5050")
REDIRECT_URI = f"{BACKEND_URL}/sync/azure/oauth2callback"
SCOPE = [
"https://graph.microsoft.com/Files.Read",
"https://graph.microsoft.com/User.Read",
"https://graph.microsoft.com/Sites.Read.All",
]
@azure_sync_router.get(
"/sync/azure/authorize",
dependencies=[Depends(AuthBearer())],
tags=["Sync"],
)
def authorize_azure(
request: Request, name: str, current_user: UserIdentity = Depends(get_current_user)
):
"""
Authorize Azure sync for the current user.
Args:
request (Request): The request object.
current_user (UserIdentity): The current authenticated user.
Returns:
dict: A dictionary containing the authorization URL.
"""
client = PublicClientApplication(CLIENT_ID, authority=AUTHORITY)
logger.debug(f"Authorizing Azure sync for user: {current_user.id}")
state = f"user_id={current_user.id}"
authorization_url = client.get_authorization_request_url(
scopes=SCOPE, redirect_uri=REDIRECT_URI, state=state
)
sync_user_input = SyncsUserInput(
user_id=str(current_user.id),
name=name,
provider="Azure",
credentials={},
state={"state": state},
)
sync_user_service.create_sync_user(sync_user_input)
return {"authorization_url": authorization_url}
@azure_sync_router.get("/sync/azure/oauth2callback", tags=["Sync"])
def oauth2callback_azure(request: Request):
"""
Handle OAuth2 callback from Azure.
Args:
request (Request): The request object.
Returns:
dict: A dictionary containing a success message.
"""
client = PublicClientApplication(CLIENT_ID, authority=AUTHORITY)
state = request.query_params.get("state")
state_dict = {"state": state}
current_user = state.split("=")[1] # Extract user_id from state
logger.debug(
f"Handling OAuth2 callback for user: {current_user} with state: {state}"
)
sync_user_state = sync_user_service.get_sync_user_by_state(state_dict)
logger.info(f"Retrieved sync user state: {sync_user_state}")
if state_dict != sync_user_state["state"]:
logger.error("Invalid state parameter")
raise HTTPException(status_code=400, detail="Invalid state parameter")
if sync_user_state.get("user_id") != current_user:
logger.error("Invalid user")
raise HTTPException(status_code=400, detail="Invalid user")
result = client.acquire_token_by_authorization_code(
request.query_params.get("code"), scopes=SCOPE, redirect_uri=REDIRECT_URI
)
if "access_token" not in result:
logger.error("Failed to acquire token")
raise HTTPException(status_code=400, detail="Failed to acquire token")
creds = result
logger.info(f"Fetched OAuth2 token for user: {current_user}")
sync_user_input = SyncUserUpdateInput(
credentials=creds,
state={},
)
sync_user_service.update_sync_user(current_user, state_dict, sync_user_input)
logger.info(f"Azure sync created successfully for user: {current_user}")
return {"message": "Azure sync created successfully"}

View File

@ -1,131 +0,0 @@
import json
import os
from fastapi import APIRouter, Depends, HTTPException, Request
from google_auth_oauthlib.flow import Flow
from logger import get_logger
from middlewares.auth import AuthBearer, get_current_user
from modules.sync.dto.inputs import SyncsUserInput, SyncUserUpdateInput
from modules.sync.service.sync_service import SyncService, SyncUserService
from modules.user.entity.user_identity import UserIdentity
# Set environment variable for OAuthlib
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
# Initialize logger
logger = get_logger(__name__)
# Initialize sync service
sync_service = SyncService()
sync_user_service = SyncUserService()
# Initialize API router
google_sync_router = APIRouter()
# Constants
SCOPES = [
"https://www.googleapis.com/auth/drive.metadata.readonly",
"https://www.googleapis.com/auth/drive.readonly",
]
BACKEND_URL = os.getenv("BACKEND_URL", "http://localhost:5050")
BASE_REDIRECT_URI = f"{BACKEND_URL}/sync/google/oauth2callback"
# Create credentials content from environment variables
CLIENT_SECRETS_FILE_CONTENT = {
"installed": {
"client_id": os.getenv("GOOGLE_CLIENT_ID"),
"project_id": os.getenv("GOOGLE_PROJECT_ID"),
"auth_uri": os.getenv("GOOGLE_AUTH_URI"),
"token_uri": os.getenv("GOOGLE_TOKEN_URI"),
"auth_provider_x509_cert_url": os.getenv("GOOGLE_AUTH_PROVIDER_CERT_URL"),
"client_secret": os.getenv("GOOGLE_CLIENT_SECRET"),
"redirect_uris": [os.getenv("GOOGLE_REDIRECT_URI")],
}
}
@google_sync_router.get(
"/sync/google/authorize",
dependencies=[Depends(AuthBearer())],
tags=["Sync"],
)
def authorize_google(
request: Request, name: str, current_user: UserIdentity = Depends(get_current_user)
):
"""
Authorize Google Drive sync for the current user.
Args:
request (Request): The request object.
current_user (UserIdentity): The current authenticated user.
Returns:
dict: A dictionary containing the authorization URL.
"""
logger.debug(f"Authorizing Google Drive sync for user: {current_user.id}")
redirect_uri = f"{BASE_REDIRECT_URI}?user_id={current_user.id}"
flow = Flow.from_client_config(
CLIENT_SECRETS_FILE_CONTENT, scopes=SCOPES, redirect_uri=redirect_uri
)
authorization_url, state = flow.authorization_url(
access_type="offline", include_granted_scopes="true"
)
logger.info(
f"Generated authorization URL: {authorization_url} for user: {current_user.id}"
)
sync_user_input = SyncsUserInput(
name=name,
user_id=str(current_user.id),
provider="Google",
credentials={},
state={"state": state},
)
sync_user_service.create_sync_user(sync_user_input)
return {"authorization_url": authorization_url}
@google_sync_router.get("/sync/google/oauth2callback", tags=["Sync"])
def oauth2callback_google(request: Request):
"""
Handle OAuth2 callback from Google.
Args:
request (Request): The request object.
Returns:
dict: A dictionary containing a success message.
"""
state = request.query_params.get("state")
state_dict = {"state": state}
current_user = request.query_params.get("user_id")
logger.debug(
f"Handling OAuth2 callback for user: {current_user} with state: {state}"
)
sync_user_state = sync_user_service.get_sync_user_by_state(state_dict)
logger.info(f"Retrieved sync user state: {sync_user_state}")
if state_dict != sync_user_state["state"]:
logger.error("Invalid state parameter")
raise HTTPException(status_code=400, detail="Invalid state parameter")
if sync_user_state.get("user_id") != current_user:
logger.error("Invalid user")
raise HTTPException(status_code=400, detail="Invalid user")
redirect_uri = f"{BASE_REDIRECT_URI}?user_id={current_user}"
flow = Flow.from_client_config(
CLIENT_SECRETS_FILE_CONTENT,
scopes=SCOPES,
state=state,
redirect_uri=redirect_uri,
)
flow.fetch_token(authorization_response=str(request.url))
creds = flow.credentials
logger.info(f"Fetched OAuth2 token for user: {current_user}")
sync_user_input = SyncUserUpdateInput(
credentials=json.loads(creds.to_json()),
state={},
)
sync_user_service.update_sync_user(current_user, state_dict, sync_user_input)
logger.info(f"Google Drive sync created successfully for user: {current_user}")
return {"message": "Google Drive sync created successfully"}

View File

@ -1,231 +0,0 @@
import os
from typing import List
from fastapi import APIRouter, Depends, status
from logger import get_logger
from middlewares.auth import AuthBearer, get_current_user
from modules.sync.controller.azure_sync_routes import azure_sync_router
from modules.sync.controller.google_sync_routes import google_sync_router
from modules.sync.dto import SyncsDescription
from modules.sync.dto.inputs import SyncsActiveInput, SyncsActiveUpdateInput
from modules.sync.dto.outputs import AuthMethodEnum
from modules.sync.entity.sync import SyncsActive
from modules.sync.service.sync_service import SyncService, SyncUserService
from modules.user.entity.user_identity import UserIdentity
# Set environment variable for OAuthlib
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
# Initialize logger
logger = get_logger(__name__)
# Initialize sync service
sync_service = SyncService()
sync_user_service = SyncUserService()
# Initialize API router
sync_router = APIRouter()
# Add Google routes here
sync_router.include_router(google_sync_router)
sync_router.include_router(azure_sync_router)
# Google sync description
google_sync = SyncsDescription(
name="Google",
description="Sync your Google Drive with Quivr",
auth_method=AuthMethodEnum.URI_WITH_CALLBACK,
)
azure_sync = SyncsDescription(
name="Azure",
description="Sync your Azure Drive with Quivr",
auth_method=AuthMethodEnum.URI_WITH_CALLBACK,
)
@sync_router.get(
"/sync/all",
response_model=List[SyncsDescription],
dependencies=[Depends(AuthBearer())],
tags=["Sync"],
)
async def get_syncs(current_user: UserIdentity = Depends(get_current_user)):
"""
Get all available sync descriptions.
Args:
current_user (UserIdentity): The current authenticated user.
Returns:
List[SyncsDescription]: A list of available sync descriptions.
"""
logger.debug(f"Fetching all sync descriptions for user: {current_user.id}")
return [google_sync, azure_sync]
@sync_router.get(
"/sync",
dependencies=[Depends(AuthBearer())],
tags=["Sync"],
)
async def get_user_syncs(current_user: UserIdentity = Depends(get_current_user)):
"""
Get syncs for the current user.
Args:
current_user (UserIdentity): The current authenticated user.
Returns:
List: A list of syncs for the user.
"""
logger.debug(f"Fetching user syncs for user: {current_user.id}")
return sync_user_service.get_syncs_user(str(current_user.id))
@sync_router.post(
"/sync/active",
response_model=SyncsActive,
dependencies=[Depends(AuthBearer())],
tags=["Sync"],
)
async def create_sync_active(
sync_active_input: SyncsActiveInput,
current_user: UserIdentity = Depends(get_current_user),
):
"""
Create a new active sync for the current user.
Args:
sync_active_input (SyncsActiveInput): The sync active input data.
current_user (UserIdentity): The current authenticated user.
Returns:
SyncsActive: The created sync active data.
"""
logger.debug(
f"Creating active sync for user: {current_user.id} with data: {sync_active_input}"
)
return sync_service.create_sync_active(sync_active_input, str(current_user.id))
@sync_router.put(
"/sync/active/{sync_id}",
response_model=SyncsActive,
dependencies=[Depends(AuthBearer())],
tags=["Sync"],
)
async def update_sync_active(
sync_id: str,
sync_active_input: SyncsActiveUpdateInput,
current_user: UserIdentity = Depends(get_current_user),
):
"""
Update an existing active sync for the current user.
Args:
sync_id (str): The ID of the active sync to update.
sync_active_input (SyncsActiveUpdateInput): The updated sync active input data.
current_user (UserIdentity): The current authenticated user.
Returns:
SyncsActive: The updated sync active data.
"""
logger.debug(
f"Updating active sync for user: {current_user.id} with data: {sync_active_input}"
)
return sync_service.update_sync_active(sync_id, sync_active_input)
@sync_router.delete(
"/sync/active/{sync_id}",
status_code=status.HTTP_204_NO_CONTENT,
dependencies=[Depends(AuthBearer())],
tags=["Sync"],
)
async def delete_sync_active(
sync_id: str, current_user: UserIdentity = Depends(get_current_user)
):
"""
Delete an existing active sync for the current user.
Args:
sync_id (str): The ID of the active sync to delete.
current_user (UserIdentity): The current authenticated user.
Returns:
None
"""
logger.debug(
f"Deleting active sync for user: {current_user.id} with sync ID: {sync_id}"
)
sync_service.delete_sync_active(sync_id, str(current_user.id))
return None
@sync_router.get(
"/sync/active",
response_model=List[SyncsActive],
dependencies=[Depends(AuthBearer())],
tags=["Sync"],
)
async def get_active_syncs_for_user(
current_user: UserIdentity = Depends(get_current_user),
):
"""
Get all active syncs for the current user.
Args:
current_user (UserIdentity): The current authenticated user.
Returns:
List[SyncsActive]: A list of active syncs for the current user.
"""
logger.debug(f"Fetching active syncs for user: {current_user.id}")
return sync_service.get_syncs_active(str(current_user.id))
@sync_router.get(
"/sync/{sync_id}/files",
dependencies=[Depends(AuthBearer())],
tags=["Sync"],
)
async def get_files_folder_user_sync(
user_sync_id: int,
folder_id: str = None,
current_user: UserIdentity = Depends(get_current_user),
):
"""
Get files for an active sync.
Args:
sync_id (str): The ID of the active sync.
folder_id (str): The ID of the folder to get files from.
current_user (UserIdentity): The current authenticated user.
Returns:
SyncsActive: The active sync data.
"""
logger.debug(
f"Fetching files for user sync: {user_sync_id} for user: {current_user.id}"
)
return sync_user_service.get_files_folder_user_sync(
user_sync_id, str(current_user.id), folder_id
)
@sync_router.get(
"/sync/active/interval",
dependencies=[Depends(AuthBearer())],
tags=["Sync"],
)
async def get_syncs_active_in_interval() -> List[SyncsActive]:
"""
Get all active syncs that need to be synced.
Returns:
List: A list of active syncs that need to be synced.
"""
logger.debug("Fetching active syncs in interval")
return await sync_service.get_syncs_active_in_interval()

View File

@ -1 +0,0 @@
from .outputs import SyncsDescription, SyncsUserOutput

View File

@ -1,105 +0,0 @@
from typing import List, Optional
from pydantic import BaseModel
class SyncsUserInput(BaseModel):
"""
Input model for creating a new sync user.
Attributes:
user_id (str): The unique identifier for the user.
name (str): The name of the user.
provider (str): The provider of the sync service (e.g., Google, Azure).
credentials (dict): The credentials required for the sync service.
state (dict): The state information for the sync user.
"""
user_id: str
name: str
provider: str
credentials: dict
state: dict
class SyncUserUpdateInput(BaseModel):
"""
Input model for updating an existing sync user.
Attributes:
credentials (dict): The updated credentials for the sync service.
state (dict): The updated state information for the sync user.
"""
credentials: dict
state: dict
class SyncActiveSettings(BaseModel):
"""
Sync active settings.
Attributes:
folders (List[str] | None): A list of folder paths to be synced, or None if not applicable.
files (List[str] | None): A list of file paths to be synced, or None if not applicable.
"""
folders: Optional[List[str]] = None
files: Optional[List[str]] = None
class SyncsActiveInput(BaseModel):
"""
Input model for creating a new active sync.
Attributes:
name (str): The name of the sync.
syncs_user_id (int): The ID of the sync user associated with this sync.
settings (SyncActiveSettings): The settings for the active sync.
"""
name: str
syncs_user_id: int
settings: SyncActiveSettings
brain_id: str
class SyncsActiveUpdateInput(BaseModel):
"""
Input model for updating an existing active sync.
Attributes:
name (str): The updated name of the sync.
sync_interval_minutes (int): The updated sync interval in minutes.
settings (dict): The updated settings for the active sync.
"""
name: Optional[str] = None
settings: Optional[SyncActiveSettings] = None
last_synced: Optional[str] = None
class SyncFileInput(BaseModel):
"""
Input model for creating a new sync file.
Attributes:
path (str): The path of the file.
syncs_active_id (int): The ID of the active sync associated with this file.
"""
path: str
syncs_active_id: int
last_modified: str
brain_id: str
class SyncFileUpdateInput(BaseModel):
"""
Input model for updating an existing sync file.
Attributes:
last_modified (datetime.datetime): The updated last modified date and time.
"""
last_modified: str

View File

@ -1,20 +0,0 @@
from enum import Enum
from pydantic import BaseModel
class AuthMethodEnum(str, Enum):
URI_WITH_CALLBACK = "uri_with_callback"
class SyncsDescription(BaseModel):
name: str
description: str
auth_method: AuthMethodEnum
class SyncsUserOutput(BaseModel):
user_id: str
provider: str
state: dict
credentials: dict

View File

@ -1,22 +0,0 @@
from datetime import datetime
from pydantic import BaseModel
class SyncsActive(BaseModel):
id: int
name: str
syncs_user_id: int
user_id: str
settings: dict
last_synced: datetime
sync_interval_minutes: int
brain_id: str
class SyncsFiles(BaseModel):
id: int
path: str
syncs_active_id: int
last_modified: str
brain_id: str

View File

@ -1,193 +0,0 @@
from datetime import datetime, timedelta
from typing import List
from logger import get_logger
from models.settings import get_supabase_client
from modules.knowledge.service.knowledge_service import KnowledgeService
from modules.notification.service.notification_service import NotificationService
from modules.sync.dto.inputs import SyncsActiveInput, SyncsActiveUpdateInput
from modules.sync.entity.sync import SyncsActive
from modules.sync.repository.sync_interfaces import SyncInterface
notification_service = NotificationService()
knowledge_service = KnowledgeService()
logger = get_logger(__name__)
class Sync(SyncInterface):
def __init__(self):
"""
Initialize the Sync class with a Supabase client.
"""
supabase_client = get_supabase_client()
self.db = supabase_client # type: ignore
logger.debug("Supabase client initialized")
def create_sync_active(
self, sync_active_input: SyncsActiveInput, user_id: str
) -> SyncsActive:
"""
Create a new active sync in the database.
Args:
sync_active_input (SyncsActiveInput): The input data for creating an active sync.
user_id (str): The user ID associated with the active sync.
Returns:
SyncsActive or None: The created active sync data or None if creation failed.
"""
logger.info(
"Creating active sync for user_id: %s with input: %s",
user_id,
sync_active_input,
)
sync_active_input_dict = sync_active_input.model_dump()
sync_active_input_dict["user_id"] = user_id
response = (
self.db.from_("syncs_active").insert(sync_active_input_dict).execute()
)
if response.data:
logger.info("Active sync created successfully: %s", response.data[0])
return SyncsActive(**response.data[0])
logger.warning("Failed to create active sync for user_id: %s", user_id)
return None
def get_syncs_active(self, user_id: str) -> List[SyncsActive]:
"""
Retrieve active syncs from the database.
Args:
user_id (str): The user ID to filter active syncs.
Returns:
List[SyncsActive]: A list of active syncs matching the criteria.
"""
logger.info("Retrieving active syncs for user_id: %s", user_id)
response = (
self.db.from_("syncs_active").select("*").eq("user_id", user_id).execute()
)
if response.data:
logger.info("Active syncs retrieved successfully: %s", response.data)
return [SyncsActive(**sync) for sync in response.data]
logger.warning("No active syncs found for user_id: %s", user_id)
return []
def update_sync_active(
self, sync_id: int, sync_active_input: SyncsActiveUpdateInput
):
"""
Update an active sync in the database.
Args:
sync_id (int): The ID of the active sync.
sync_active_input (SyncsActiveUpdateInput): The input data for updating the active sync.
Returns:
dict or None: The updated active sync data or None if update failed.
"""
logger.info(
"Updating active sync with sync_id: %s, input: %s",
sync_id,
sync_active_input,
)
response = (
self.db.from_("syncs_active")
.update(sync_active_input.model_dump(exclude_unset=True))
.eq("id", sync_id)
.execute()
)
if response.data:
logger.info("Active sync updated successfully: %s", response.data[0])
return response.data[0]
logger.warning("Failed to update active sync with sync_id: %s", sync_id)
return None
def delete_sync_active(self, sync_active_id: int, user_id: str):
"""
Delete an active sync from the database.
Args:
sync_active_id (int): The ID of the active sync.
user_id (str): The user ID associated with the active sync.
Returns:
dict or None: The deleted active sync data or None if deletion failed.
"""
logger.info(
"Deleting active sync with sync_active_id: %s, user_id: %s",
sync_active_id,
user_id,
)
response = (
self.db.from_("syncs_active")
.delete()
.eq("id", sync_active_id)
.eq("user_id", user_id)
.execute()
)
if response.data:
logger.info("Active sync deleted successfully: %s", response.data[0])
return response.data[0]
logger.warning(
"Failed to delete active sync with sync_active_id: %s, user_id: %s",
sync_active_id,
user_id,
)
return None
def get_details_sync_active(self, sync_active_id: int):
"""
Retrieve details of an active sync, including associated sync user data.
Args:
sync_active_id (int): The ID of the active sync.
Returns:
dict or None: The detailed active sync data or None if not found.
"""
logger.info(
"Retrieving details for active sync with sync_active_id: %s", sync_active_id
)
response = (
self.db.table("syncs_active")
.select("*, syncs_user(provider, credentials)")
.eq("id", sync_active_id)
.execute()
)
if response.data:
logger.info(
"Details for active sync retrieved successfully: %s", response.data[0]
)
return response.data[0]
logger.warning(
"No details found for active sync with sync_active_id: %s", sync_active_id
)
return None
async def get_syncs_active_in_interval(self) -> List[SyncsActive]:
"""
Retrieve active syncs that are due for synchronization based on their interval.
Returns:
list: A list of active syncs that are due for synchronization.
"""
logger.info("Retrieving active syncs due for synchronization")
current_time = datetime.now()
# The Query filters the active syncs based on the sync_interval_minutes field and last_synced timestamp
response = (
self.db.table("syncs_active")
.select("*")
.lt("last_synced", (current_time - timedelta(minutes=360)).isoformat())
.execute()
)
if response.data:
logger.info("Active syncs retrieved successfully: %s", response.data)
for sync in response.data:
# Now we can call the sync_google_drive_if_not_synced method to sync the Google Drive files
logger.info("Syncing Google Drive for sync_active_id: %s", sync["id"])
return [SyncsActive(**sync) for sync in response.data]
logger.warning("No active syncs found due for synchronization")
return []

View File

@ -1,98 +0,0 @@
from logger import get_logger
from models.settings import get_supabase_client
from modules.sync.dto.inputs import SyncFileInput, SyncFileUpdateInput
from modules.sync.entity.sync import SyncsFiles
from modules.sync.repository.sync_interfaces import SyncFileInterface
logger = get_logger(__name__)
class SyncFiles(SyncFileInterface):
def __init__(self):
"""
Initialize the SyncFiles class with a Supabase client.
"""
supabase_client = get_supabase_client()
self.db = supabase_client # type: ignore
logger.debug("Supabase client initialized")
def create_sync_file(self, sync_file_input: SyncFileInput) -> SyncsFiles:
"""
Create a new sync file in the database.
Args:
sync_file_input (SyncFileInput): The input data for creating a sync file.
Returns:
SyncsFiles: The created sync file data.
"""
logger.info("Creating sync file with input: %s", sync_file_input)
response = (
self.db.from_("syncs_files")
.insert(
{
"path": sync_file_input.path,
"syncs_active_id": sync_file_input.syncs_active_id,
"last_modified": sync_file_input.last_modified,
"brain_id": sync_file_input.brain_id,
}
)
.execute()
)
if response.data:
logger.info("Sync file created successfully: %s", response.data[0])
return SyncsFiles(**response.data[0])
logger.warning("Failed to create sync file")
return None
def get_sync_files(self, sync_active_id: int) -> list[SyncsFiles]:
"""
Retrieve sync files from the database.
Args:
sync_active_id (int): The ID of the active sync.
Returns:
list[SyncsFiles]: A list of sync files matching the criteria.
"""
logger.info("Retrieving sync files for sync_active_id: %s", sync_active_id)
response = (
self.db.from_("syncs_files")
.select("*")
.eq("syncs_active_id", sync_active_id)
.execute()
)
if response.data:
# logger.info("Sync files retrieved successfully: %s", response.data)
return [SyncsFiles(**file) for file in response.data]
logger.warning("No sync files found for sync_active_id: %s", sync_active_id)
return []
def update_sync_file(self, sync_file_id: int, sync_file_input: SyncFileUpdateInput):
"""
Update a sync file in the database.
Args:
sync_file_id (int): The ID of the sync file.
sync_file_input (SyncFileUpdateInput): The input data for updating the sync file.
"""
logger.info(
"Updating sync file with sync_file_id: %s, input: %s",
sync_file_id,
sync_file_input,
)
self.db.from_("syncs_files").update(sync_file_input.model_dump()).eq(
"id", sync_file_id
).execute()
logger.info("Sync file updated successfully")
def delete_sync_file(self, sync_file_id: int):
"""
Delete a sync file from the database.
Args:
sync_file_id (int): The ID of the sync file.
"""
logger.info("Deleting sync file with sync_file_id: %s", sync_file_id)
self.db.from_("syncs_files").delete().eq("id", sync_file_id).execute()
logger.info("Sync file deleted successfully")

View File

@ -1,101 +0,0 @@
from abc import ABC, abstractmethod
from typing import List
from uuid import UUID
from modules.sync.dto.inputs import (
SyncFileInput,
SyncFileUpdateInput,
SyncsActiveInput,
SyncsActiveUpdateInput,
SyncsUserInput,
SyncUserUpdateInput,
)
from modules.sync.entity.sync import SyncsActive, SyncsFiles
class SyncUserInterface(ABC):
@abstractmethod
def create_sync_user(
self,
sync_user_input: SyncsUserInput,
):
pass
@abstractmethod
def get_syncs_user(self, user_id: str, sync_user_id: int = None):
pass
@abstractmethod
def get_sync_user_by_id(self, sync_id: int):
pass
@abstractmethod
def delete_sync_user(self, sync_user_id: UUID, user_id: UUID):
pass
@abstractmethod
def get_sync_user_by_state(self, state: dict):
pass
@abstractmethod
def update_sync_user(
self, sync_user_id: str, state: dict, sync_user_input: SyncUserUpdateInput
):
pass
@abstractmethod
def get_files_folder_user_sync(
self, sync_active_id: int, user_id: str, folder_id: int = None
):
pass
class SyncInterface(ABC):
@abstractmethod
def create_sync_active(
self,
sync_active_input: SyncsActiveInput,
user_id: str,
) -> SyncsActive:
pass
@abstractmethod
def get_syncs_active(self, user_id: UUID) -> list[SyncsActive]:
pass
@abstractmethod
def update_sync_active(
self, sync_id: UUID, sync_active_input: SyncsActiveUpdateInput
):
pass
@abstractmethod
def delete_sync_active(self, sync_active_id: int, user_id: str):
pass
@abstractmethod
def get_details_sync_active(self, sync_active_id: int):
pass
@abstractmethod
async def get_syncs_active_in_interval(self) -> List[SyncsActive]:
pass
class SyncFileInterface(ABC):
@abstractmethod
def create_sync_file(self, sync_file_input: SyncFileInput) -> SyncsFiles:
pass
@abstractmethod
def get_sync_files(self, sync_active_id: int) -> list[SyncsFiles]:
pass
@abstractmethod
def update_sync_file(self, sync_file_id: int, sync_file_input: SyncFileUpdateInput):
pass
@abstractmethod
def delete_sync_file(self, sync_file_id: int):
pass

View File

@ -1,204 +0,0 @@
import json
from logger import get_logger
from models.settings import get_supabase_client
from modules.knowledge.service.knowledge_service import KnowledgeService
from modules.notification.service.notification_service import NotificationService
from modules.sync.dto.inputs import SyncsUserInput, SyncUserUpdateInput
from modules.sync.repository.sync_interfaces import SyncUserInterface
from modules.sync.utils.list_files import get_google_drive_files, list_azure_files
notification_service = NotificationService()
knowledge_service = KnowledgeService()
logger = get_logger(__name__)
class SyncUser(SyncUserInterface):
def __init__(self):
"""
Initialize the Sync class with a Supabase client.
"""
supabase_client = get_supabase_client()
self.db = supabase_client # type: ignore
logger.debug("Supabase client initialized")
def create_sync_user(
self,
sync_user_input: SyncsUserInput,
):
"""
Create a new sync user in the database.
Args:
sync_user_input (SyncsUserInput): The input data for creating a sync user.
Returns:
dict or None: The created sync user data or None if creation failed.
"""
logger.info("Creating sync user with input: %s", sync_user_input)
response = (
self.db.from_("syncs_user")
.insert(
{
"user_id": sync_user_input.user_id,
"provider": sync_user_input.provider,
"credentials": sync_user_input.credentials,
"state": sync_user_input.state,
"name": sync_user_input.name,
}
)
.execute()
)
if response.data:
logger.info("Sync user created successfully: %s", response.data[0])
return response.data[0]
logger.warning("Failed to create sync user")
return None
def get_sync_user_by_id(self, sync_id: int):
"""
Retrieve sync users from the database.
"""
response = self.db.from_("syncs_user").select("*").eq("id", sync_id).execute()
if response.data:
logger.info("Sync user found: %s", response.data[0])
return response.data[0]
logger.warning("No sync user found for sync_id: %s", sync_id)
return None
def get_syncs_user(self, user_id: str, sync_user_id: int = None):
"""
Retrieve sync users from the database.
Args:
user_id (str): The user ID to filter sync users.
sync_user_id (int, optional): The sync user ID to filter sync users. Defaults to None.
Returns:
list: A list of sync users matching the criteria.
"""
logger.info(
"Retrieving sync users for user_id: %s, sync_user_id: %s",
user_id,
sync_user_id,
)
query = self.db.from_("syncs_user").select("*").eq("user_id", user_id)
if sync_user_id:
query = query.eq("id", sync_user_id)
response = query.execute()
if response.data:
logger.info("Sync users retrieved successfully: %s", response.data)
return response.data
logger.warning(
"No sync users found for user_id: %s, sync_user_id: %s",
user_id,
sync_user_id,
)
return []
def get_sync_user_by_state(self, state: dict):
"""
Retrieve a sync user by their state.
Args:
state (dict): The state to filter sync users.
Returns:
dict or None: The sync user data matching the state or None if not found.
"""
logger.info("Getting sync user by state: %s", state)
state_str = json.dumps(state)
response = (
self.db.from_("syncs_user").select("*").eq("state", state_str).execute()
)
if response.data:
logger.info("Sync user found by state: %s", response.data[0])
return response.data[0]
logger.warning("No sync user found for state: %s", state)
return []
def delete_sync_user(self, provider: str, user_id: str):
"""
Delete a sync user from the database.
Args:
provider (str): The provider of the sync user.
user_id (str): The user ID of the sync user.
"""
logger.info(
"Deleting sync user with provider: %s, user_id: %s", provider, user_id
)
self.db.from_("syncs_user").delete().eq("provider", provider).eq(
"user_id", user_id
).execute()
logger.info("Sync user deleted successfully")
def update_sync_user(
self, sync_user_id: str, state: dict, sync_user_input: SyncUserUpdateInput
):
"""
Update a sync user in the database.
Args:
sync_user_id (str): The user ID of the sync user.
state (dict): The state to filter sync users.
sync_user_input (SyncUserUpdateInput): The input data for updating the sync user.
"""
logger.info(
"Updating sync user with user_id: %s, state: %s, input: %s",
sync_user_id,
state,
sync_user_input,
)
state_str = json.dumps(state)
self.db.from_("syncs_user").update(sync_user_input.model_dump()).eq(
"user_id", sync_user_id
).eq("state", state_str).execute()
logger.info("Sync user updated successfully")
def get_files_folder_user_sync(
self, sync_active_id: int, user_id: str, folder_id: str = None
):
"""
Retrieve files from a user's sync folder, either from Google Drive or Azure.
Args:
sync_active_id (int): The ID of the active sync.
user_id (str): The user ID associated with the active sync.
folder_id (str, optional): The folder ID to filter files. Defaults to None.
Returns:
dict or str: A dictionary containing the list of files or a string indicating the sync provider.
"""
logger.info(
"Retrieving files for user sync with sync_active_id: %s, user_id: %s, folder_id: %s",
sync_active_id,
user_id,
folder_id,
)
# Check whether the sync is Google or Azure
sync_user = self.get_syncs_user(user_id=user_id, sync_user_id=sync_active_id)
if not sync_user:
logger.warning(
"No sync user found for sync_active_id: %s, user_id: %s",
sync_active_id,
user_id,
)
return None
sync_user = sync_user[0]
logger.info("Sync user found: %s", sync_user)
provider = sync_user["provider"].lower()
if provider == "google":
logger.info("Getting files for Google sync")
return get_google_drive_files(sync_user["credentials"], folder_id)
elif provider == "azure":
logger.info("Getting files for Azure sync")
return list_azure_files(sync_user["credentials"], folder_id)
else:
logger.warning("No sync found for provider: %s", sync_user["provider"])
return "No sync found"

View File

@ -1,82 +0,0 @@
from typing import List
from logger import get_logger
from modules.sync.dto.inputs import (
SyncsActiveInput,
SyncsActiveUpdateInput,
SyncsUserInput,
SyncUserUpdateInput,
)
from modules.sync.entity.sync import SyncsActive
from modules.sync.repository.sync import Sync, SyncInterface
from modules.sync.repository.sync_interfaces import SyncInterface, SyncUserInterface
from modules.sync.repository.sync_user import SyncUser
from modules.user.service.user_service import UserService
logger = get_logger(__name__)
user_service = UserService()
class SyncUserService:
repository: SyncUserInterface
def __init__(self):
self.repository = SyncUser()
def get_syncs_user(self, user_id: str, sync_user_id: int = None):
return self.repository.get_syncs_user(user_id, sync_user_id)
def create_sync_user(self, sync_user_input: SyncsUserInput):
return self.repository.create_sync_user(sync_user_input)
def delete_sync_user(self, provider: str, user_id: str):
return self.repository.delete_sync_user(provider, user_id)
def get_sync_user_by_state(self, state: dict):
return self.repository.get_sync_user_by_state(state)
def get_sync_user_by_id(self, sync_id: int):
return self.repository.get_sync_user_by_id(sync_id)
def update_sync_user(
self, sync_user_id: str, state: dict, sync_user_input: SyncUserUpdateInput
):
return self.repository.update_sync_user(sync_user_id, state, sync_user_input)
def get_files_folder_user_sync(
self, sync_active_id: int, user_id: str, folder_id: str = None
):
return self.repository.get_files_folder_user_sync(
sync_active_id, user_id, folder_id
)
class SyncService:
repository: SyncInterface
def __init__(self):
self.repository = Sync()
def create_sync_active(
self, sync_active_input: SyncsActiveInput, user_id: str
) -> SyncsActive:
return self.repository.create_sync_active(sync_active_input, user_id)
def get_syncs_active(self, user_id: str) -> List[SyncsActive]:
return self.repository.get_syncs_active(user_id)
def update_sync_active(
self, sync_id: str, sync_active_input: SyncsActiveUpdateInput
):
return self.repository.update_sync_active(sync_id, sync_active_input)
def delete_sync_active(self, sync_active_id: str, user_id: str):
return self.repository.delete_sync_active(sync_active_id, user_id)
async def get_syncs_active_in_interval(self) -> List[SyncsActive]:
return await self.repository.get_syncs_active_in_interval()
def get_details_sync_active(self, sync_active_id: int):
return self.repository.get_details_sync_active(sync_active_id)

View File

@ -1,48 +0,0 @@
import asyncio
from celery_config import celery
from logger import get_logger
from modules.knowledge.repository.storage import Storage
from modules.sync.repository.sync_files import SyncFiles
from modules.sync.service.sync_service import SyncService, SyncUserService
from modules.sync.utils.googleutils import GoogleSyncUtils
from modules.sync.utils.sharepointutils import AzureSyncUtils
logger = get_logger(__name__)
@celery.task(name="process_sync_active")
def process_sync_active():
loop = asyncio.get_event_loop()
loop.run_until_complete(_process_sync_active())
async def _process_sync_active():
sync_active_service = SyncService()
sync_user_service = SyncUserService()
sync_files_repo_service = SyncFiles()
storage = Storage()
google_sync_utils = GoogleSyncUtils(
sync_user_service=sync_user_service,
sync_active_service=sync_active_service,
sync_files_repo=sync_files_repo_service,
storage=storage,
)
azure_sync_utils = AzureSyncUtils(
sync_user_service=sync_user_service,
sync_active_service=sync_active_service,
sync_files_repo=sync_files_repo_service,
storage=storage,
)
active = await sync_active_service.get_syncs_active_in_interval()
for sync in active:
details_user_sync = sync_user_service.get_sync_user_by_id(sync.syncs_user_id)
if details_user_sync["provider"].lower() == "google":
await google_sync_utils.sync(sync_active_id=sync.id, user_id=sync.user_id)
elif details_user_sync["provider"].lower() == "azure":
await azure_sync_utils.sync(sync_active_id=sync.id, user_id=sync.user_id)
else:
logger.info("Provider not supported: %s", details_user_sync["provider"])

View File

@ -1,301 +0,0 @@
from datetime import datetime, timedelta, timezone
from io import BytesIO
from fastapi import UploadFile
from google.auth.transport.requests import Request as GoogleRequest
from google.oauth2.credentials import Credentials
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from logger import get_logger
from modules.brain.repository.brains_vectors import BrainsVectors
from modules.knowledge.repository.storage import Storage
from modules.sync.dto.inputs import (
SyncFileInput,
SyncFileUpdateInput,
SyncsActiveUpdateInput,
)
from modules.sync.repository.sync_files import SyncFiles
from modules.sync.service.sync_service import SyncService, SyncUserService
from modules.sync.utils.list_files import get_google_drive_files
from modules.sync.utils.upload import upload_file
from modules.upload.service.upload_file import check_file_exists
from pydantic import BaseModel, ConfigDict
logger = get_logger(__name__)
class GoogleSyncUtils(BaseModel):
model_config = ConfigDict(arbitrary_types_allowed=True)
sync_user_service: SyncUserService
sync_active_service: SyncService
sync_files_repo: SyncFiles
storage: Storage
async def _upload_files(
self,
credentials: dict,
files: list,
current_user: str,
brain_id: str,
sync_active_id: int,
):
"""
Download files from Google Drive.
Args:
credentials (dict): The credentials for accessing Google Drive.
files (list): The list of file metadata to download.
Returns:
dict: A dictionary containing the status of the download or an error message.
"""
logger.info("Downloading Google Drive files with metadata: %s", files)
creds = Credentials.from_authorized_user_info(credentials)
if creds.expired and creds.refresh_token:
creds.refresh(GoogleRequest())
logger.info("Google Drive credentials refreshed")
# Updating the credentials in the database
try:
service = build("drive", "v3", credentials=creds)
downloaded_files = []
for file in files:
file_id = file["id"]
file_name = file["name"]
mime_type = file["mime_type"]
modified_time = file["last_modified"]
# Convert Google Docs files to appropriate formats before downloading
if mime_type == "application/vnd.google-apps.document":
logger.debug(
"Converting Google Docs file with file_id: %s to DOCX.", file_id
)
request = service.files().export_media(
fileId=file_id,
mimeType="application/vnd.openxmlformats-officedocument.wordprocessingml.document",
)
file_name += ".docx"
elif mime_type == "application/vnd.google-apps.spreadsheet":
logger.debug(
"Converting Google Sheets file with file_id: %s to XLSX.",
file_id,
)
request = service.files().export_media(
fileId=file_id,
mimeType="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
)
file_name += ".xlsx"
elif mime_type == "application/vnd.google-apps.presentation":
logger.debug(
"Converting Google Slides file with file_id: %s to PPTX.",
file_id,
)
request = service.files().export_media(
fileId=file_id,
mimeType="application/vnd.openxmlformats-officedocument.presentationml.presentation",
)
file_name += ".pptx"
### Elif pdf, txt, md, csv, docx, xlsx, pptx, doc
elif file_name.split(".")[-1] in [
"pdf",
"txt",
"md",
"csv",
"docx",
"xlsx",
"pptx",
"doc",
]:
request = service.files().get_media(fileId=file_id)
else:
logger.warning(
"Skipping unsupported file type: %s for file_id: %s",
mime_type,
file_id,
)
continue
file_data = request.execute()
# Check if the file already exists in the storage
if check_file_exists(brain_id, file_name):
logger.info("🔥 File already exists in the storage: %s", file_name)
self.storage.remove_file(brain_id + "/" + file_name)
BrainsVectors().delete_file_from_brain(brain_id, file_name)
to_upload_file = UploadFile(
file=BytesIO(file_data),
filename=file_name,
)
await upload_file(to_upload_file, brain_id, current_user)
# Check if the file already exists in the database
existing_files = self.sync_files_repo.get_sync_files(sync_active_id)
existing_file = next(
(f for f in existing_files if f.path == file_name), None
)
if existing_file:
# Update the existing file record
self.sync_files_repo.update_sync_file(
existing_file.id,
SyncFileUpdateInput(
last_modified=modified_time,
),
)
else:
# Create a new file record
self.sync_files_repo.create_sync_file(
SyncFileInput(
path=file_name,
syncs_active_id=sync_active_id,
last_modified=modified_time,
brain_id=brain_id,
)
)
downloaded_files.append(file_name)
return {"downloaded_files": downloaded_files}
except HttpError as error:
logger.error(
"An error occurred while downloading Google Drive files: %s", error
)
return {"error": f"An error occurred: {error}"}
async def sync(self, sync_active_id: int, user_id: str):
"""
Check if the Google sync has not been synced and download the folders and files based on the settings.
Args:
sync_active_id (int): The ID of the active sync.
user_id (str): The user ID associated with the active sync.
"""
# Retrieve the active sync details
sync_active = self.sync_active_service.get_details_sync_active(sync_active_id)
if not sync_active:
logger.warning(
"No active sync found for sync_active_id: %s", sync_active_id
)
return None
# Check if the sync is due
last_synced = sync_active.get("last_synced")
sync_interval_minutes = sync_active.get("sync_interval_minutes", 0)
if last_synced:
last_synced_time = datetime.fromisoformat(last_synced).astimezone(
timezone.utc
)
current_time = datetime.now().astimezone()
# Debug logging to check the values
logger.debug("Last synced time (UTC): %s", last_synced_time)
logger.debug("Current time (local timezone): %s", current_time)
# Convert current_time to UTC for comparison
current_time_utc = current_time.astimezone(timezone.utc)
logger.debug("Current time (UTC): %s", current_time_utc)
time_difference = current_time_utc - last_synced_time
if time_difference < timedelta(minutes=sync_interval_minutes):
logger.info(
"Google sync is not due for sync_active_id: %s", sync_active_id
)
return None
# Retrieve the sync user details
sync_user = self.sync_user_service.get_syncs_user(
user_id=user_id, sync_user_id=sync_active["syncs_user_id"]
)
if not sync_user:
logger.warning(
"No sync user found for sync_active_id: %s, user_id: %s",
sync_active_id,
user_id,
)
return None
sync_user = sync_user[0]
if sync_user["provider"].lower() != "google":
logger.warning(
"Sync provider is not Google for sync_active_id: %s", sync_active_id
)
return None
# Download the folders and files from Google Drive
logger.info(
"Downloading folders and files from Google Drive for sync_active_id: %s",
sync_active_id,
)
# Get the folder id from the settings from sync_active
settings = sync_active.get("settings", {})
folders = settings.get("folders", [])
files = get_google_drive_files(
sync_user["credentials"], folder_id=folders[0] if folders else None
)
if "error" in files:
logger.error(
"Failed to download files from Google Drive for sync_active_id: %s",
sync_active_id,
)
return None
# Filter files that have been modified since the last sync
last_synced_time = datetime.fromisoformat(last_synced) if last_synced else None
files_to_download = [
file
for file in files.get("files", [])
if not file["is_folder"]
and (
not last_synced_time
or datetime.fromisoformat(file["last_modified"]) > last_synced_time
)
]
downloaded_files = await self._upload_files(
sync_user["credentials"],
files_to_download,
user_id,
sync_active["brain_id"],
sync_active_id,
)
if "error" in downloaded_files:
logger.error(
"Failed to download files from Google Drive for sync_active_id: %s",
sync_active_id,
)
return None
# Update the last_synced timestamp
self.sync_active_service.update_sync_active(
sync_active_id,
SyncsActiveUpdateInput(last_synced=datetime.now().astimezone().isoformat()),
)
logger.info(
"Google Drive sync completed for sync_active_id: %s", sync_active_id
)
return downloaded_files
import asyncio
async def main():
sync_user_service = SyncUserService()
sync_active_service = SyncService()
sync_files_repo = SyncFiles()
storage = Storage()
google_sync_utils = GoogleSyncUtils(
sync_user_service=sync_user_service,
sync_active_service=sync_active_service,
sync_files_repo=sync_files_repo,
storage=storage,
)
await google_sync_utils.sync(2, "39418e3b-0258-4452-af60-7acfcc1263ff")
if __name__ == "__main__":
asyncio.run(main())

View File

@ -1,138 +0,0 @@
import os
import msal
import requests
from fastapi import HTTPException
from google.auth.transport.requests import Request as GoogleRequest
from google.oauth2.credentials import Credentials
from googleapiclient.discovery import build
from logger import get_logger
from requests import HTTPError
logger = get_logger(__name__)
def get_google_drive_files(credentials: dict, folder_id: str = None):
"""
Retrieve files from Google Drive.
Args:
credentials (dict): The credentials for accessing Google Drive.
folder_id (str, optional): The folder ID to filter files. Defaults to None.
Returns:
dict: A dictionary containing the list of files or an error message.
"""
logger.info("Retrieving Google Drive files with folder_id: %s", folder_id)
creds = Credentials.from_authorized_user_info(credentials)
if creds.expired and creds.refresh_token:
creds.refresh(GoogleRequest())
logger.info("Google Drive credentials refreshed")
# Updating the credentials in the database
try:
service = build("drive", "v3", credentials=creds)
query = f"'{folder_id}' in parents" if folder_id else None
results = (
service.files()
.list(
q=query,
pageSize=10,
fields="nextPageToken, files(id, name, mimeType, modifiedTime)",
)
.execute()
)
items = results.get("files", [])
if not items:
logger.info("No files found in Google Drive")
return {"files": "No files found."}
files = [
{
"name": item["name"],
"id": item["id"],
"is_folder": item["mimeType"] == "application/vnd.google-apps.folder",
"last_modified": item["modifiedTime"],
"mime_type": item["mimeType"],
}
for item in items
]
logger.info("Google Drive files retrieved successfully: %s", files)
return {"files": files}
except HTTPError as error:
logger.error("An error occurred while retrieving Google Drive files: %s", error)
return {"error": f"An error occurred: {error}"}
CLIENT_ID = os.getenv("SHAREPOINT_CLIENT_ID")
AUTHORITY = "https://login.microsoftonline.com/common"
BACKEND_URL = os.getenv("BACKEND_URL", "http://localhost:5050")
REDIRECT_URI = f"{BACKEND_URL}/sync/azure/oauth2callback"
SCOPE = [
"https://graph.microsoft.com/Files.Read",
"https://graph.microsoft.com/User.Read",
"https://graph.microsoft.com/Sites.Read.All",
]
def get_azure_token_data(credentials):
if "access_token" not in credentials:
raise HTTPException(status_code=401, detail="Invalid token data")
return credentials
def refresh_azure_token(credentials):
if "refresh_token" not in credentials:
raise HTTPException(status_code=401, detail="No refresh token available")
client = msal.PublicClientApplication(CLIENT_ID, authority=AUTHORITY)
result = client.acquire_token_by_refresh_token(
credentials["refresh_token"], scopes=SCOPE
)
if "access_token" not in result:
raise HTTPException(status_code=400, detail="Failed to refresh token")
return result
def get_azure_headers(token_data):
return {
"Authorization": f"Bearer {token_data['access_token']}",
"Accept": "application/json",
}
def list_azure_files(credentials, folder_id=None):
token_data = get_azure_token_data(credentials)
headers = get_azure_headers(token_data)
endpoint = f"https://graph.microsoft.com/v1.0/me/drive/root/children"
if folder_id:
endpoint = (
f"https://graph.microsoft.com/v1.0/me/drive/items/{folder_id}/children"
)
response = requests.get(endpoint, headers=headers)
if response.status_code == 401:
token_data = refresh_azure_token(credentials)
headers = get_azure_headers(token_data)
response = requests.get(endpoint, headers=headers)
if response.status_code != 200:
return {"error": response.text}
items = response.json().get("value", [])
if not items:
logger.info("No files found in Azure Drive")
return {"files": "No files found."}
files = [
{
"name": item["name"],
"id": item["id"],
"is_folder": "folder" in item,
"last_modified": item["lastModifiedDateTime"],
"mime_type": item.get("file", {}).get("mimeType", "folder"),
}
for item in items
]
logger.info("Azure Drive files retrieved successfully: %s", files)
return {"files": files}

View File

@ -1,303 +0,0 @@
import os
from datetime import datetime, timedelta, timezone
from io import BytesIO
import msal
import requests
from fastapi import HTTPException, UploadFile
from logger import get_logger
from modules.brain.repository.brains_vectors import BrainsVectors
from modules.knowledge.repository.storage import Storage
from modules.sync.dto.inputs import (
SyncFileInput,
SyncFileUpdateInput,
SyncsActiveUpdateInput,
)
from modules.sync.repository.sync_files import SyncFiles
from modules.sync.service.sync_service import SyncService, SyncUserService
from modules.sync.utils.list_files import list_azure_files
from modules.sync.utils.upload import upload_file
from modules.upload.service.upload_file import check_file_exists
from pydantic import BaseModel, ConfigDict
logger = get_logger(__name__)
CLIENT_ID = os.getenv("SHAREPOINT_CLIENT_ID")
AUTHORITY = "https://login.microsoftonline.com/common"
BACKEND_URL = os.getenv("BACKEND_URL", "http://localhost:5050")
REDIRECT_URI = f"{BACKEND_URL}/sync/azure/oauth2callback"
SCOPE = [
"https://graph.microsoft.com/Files.Read",
"https://graph.microsoft.com/User.Read",
"https://graph.microsoft.com/Sites.Read.All",
]
class AzureSyncUtils(BaseModel):
model_config = ConfigDict(arbitrary_types_allowed=True)
sync_user_service: SyncUserService
sync_active_service: SyncService
sync_files_repo: SyncFiles
storage: Storage
def get_headers(self, token_data):
return {
"Authorization": f"Bearer {token_data['access_token']}",
"Accept": "application/json",
}
def refresh_token(self, refresh_token):
client = msal.PublicClientApplication(CLIENT_ID, authority=AUTHORITY)
result = client.acquire_token_by_refresh_token(refresh_token, scopes=SCOPE)
if "access_token" not in result:
raise HTTPException(status_code=400, detail="Failed to refresh token")
return result
async def _upload_files(
self,
token_data: dict,
files: list,
current_user: str,
brain_id: str,
sync_active_id: int,
):
"""
Download files from Azure.
Args:
token_data (dict): The token data for accessing Azure.
files (list): The list of file metadata to download.
Returns:
dict: A dictionary containing the status of the download or an error message.
"""
logger.info("Downloading Azure files with metadata: %s", files)
headers = self.get_headers(token_data)
try:
downloaded_files = []
for file in files:
file_id = file["id"]
file_name = file["name"]
modified_time = file["last_modified"]
download_endpoint = (
f"https://graph.microsoft.com/v1.0/me/drive/items/{file_id}/content"
)
logger.info("Downloading file: %s", file_name)
download_response = requests.get(
download_endpoint, headers=headers, stream=True
)
if download_response.status_code == 401:
token_data = self.refresh_token(token_data["refresh_token"])
headers = self.get_headers(token_data)
download_response = requests.get(
download_endpoint, headers=headers, stream=True
)
if download_response.status_code != 200:
logger.error("Failed to download file: %s", file_name)
continue
file_data = BytesIO(download_response.content)
# Check if the file already exists in the storage
if check_file_exists(brain_id, file_name):
logger.info("🔥 File already exists in the storage: %s", file_name)
self.storage.remove_file(brain_id + "/" + file_name)
BrainsVectors().delete_file_from_brain(brain_id, file_name)
# Check if the file extension is compatible
if file_name.split(".")[-1] not in [
"pdf",
"txt",
"md",
"csv",
"docx",
"xlsx",
"pptx",
"doc",
]:
logger.info("File is not compatible: %s", file_name)
continue
to_upload_file = UploadFile(
file=file_data,
filename=file_name,
)
await upload_file(to_upload_file, brain_id, current_user)
# Check if the file already exists in the database
existing_files = self.sync_files_repo.get_sync_files(sync_active_id)
existing_file = next(
(f for f in existing_files if f.path == file_name), None
)
if existing_file:
# Update the existing file record
self.sync_files_repo.update_sync_file(
existing_file.id,
SyncFileUpdateInput(
last_modified=modified_time,
),
)
else:
# Create a new file record
self.sync_files_repo.create_sync_file(
SyncFileInput(
path=file_name,
syncs_active_id=sync_active_id,
last_modified=modified_time,
brain_id=brain_id,
)
)
downloaded_files.append(file_name)
return {"downloaded_files": downloaded_files}
except Exception as error:
logger.error("An error occurred while downloading Azure files: %s", error)
return {"error": f"An error occurred: {error}"}
async def sync(self, sync_active_id: int, user_id: str):
"""
Check if the Azure sync has not been synced and download the folders and files based on the settings.
Args:
sync_active_id (int): The ID of the active sync.
user_id (str): The user ID associated with the active sync.
"""
# Retrieve the active sync details
sync_active = self.sync_active_service.get_details_sync_active(sync_active_id)
if not sync_active:
logger.warning(
"No active sync found for sync_active_id: %s", sync_active_id
)
return None
# Check if the sync is due
last_synced = sync_active.get("last_synced")
sync_interval_minutes = sync_active.get("sync_interval_minutes", 0)
if last_synced:
last_synced_time = datetime.fromisoformat(last_synced).astimezone(
timezone.utc
)
current_time = datetime.now().astimezone()
# Debug logging to check the values
logger.debug("Last synced time (UTC): %s", last_synced_time)
logger.debug("Current time (local timezone): %s", current_time)
# Convert current_time to UTC for comparison
current_time_utc = current_time.astimezone(timezone.utc)
logger.debug("Current time (UTC): %s", current_time_utc)
time_difference = current_time_utc - last_synced_time
if time_difference < timedelta(minutes=sync_interval_minutes):
logger.info(
"Azure sync is not due for sync_active_id: %s", sync_active_id
)
return None
# Retrieve the sync user details
sync_user = self.sync_user_service.get_syncs_user(
user_id=user_id, sync_user_id=sync_active["syncs_user_id"]
)
if not sync_user:
logger.warning(
"No sync user found for sync_active_id: %s, user_id: %s",
sync_active_id,
user_id,
)
return None
sync_user = sync_user[0]
if sync_user["provider"].lower() != "azure":
logger.warning(
"Sync provider is not Azure for sync_active_id: %s", sync_active_id
)
return None
# Download the folders and files from Azure
logger.info(
"Downloading folders and files from Azure for sync_active_id: %s",
sync_active_id,
)
# Get the folder id from the settings from sync_active
settings = sync_active.get("settings", {})
folders = settings.get("folders", [])
files = list_azure_files(
sync_user["credentials"], folder_id=folders[0] if folders else None
)
if "error" in files:
logger.error(
"Failed to download files from Azure for sync_active_id: %s",
sync_active_id,
)
return None
# Filter files that have been modified since the last sync
last_synced_time = (
datetime.fromisoformat(last_synced).astimezone(timezone.utc)
if last_synced
else None
)
logger.info("Files retrieved from Azure: %s", files.get("files", []))
files_to_download = [
file
for file in files.get("files", [])
if not file["is_folder"]
and (
not last_synced_time
or datetime.strptime(
file["last_modified"], "%Y-%m-%dT%H:%M:%SZ"
).replace(tzinfo=timezone.utc)
> last_synced_time
)
]
downloaded_files = await self._upload_files(
sync_user["credentials"],
files_to_download,
user_id,
sync_active["brain_id"],
sync_active_id,
)
if "error" in downloaded_files:
logger.error(
"Failed to download files from Azure for sync_active_id: %s",
sync_active_id,
)
return None
# Update the last_synced timestamp
self.sync_active_service.update_sync_active(
sync_active_id,
SyncsActiveUpdateInput(last_synced=datetime.now().astimezone().isoformat()),
)
logger.info("Azure sync completed for sync_active_id: %s", sync_active_id)
return downloaded_files
import asyncio
async def main():
sync_user_service = SyncUserService()
sync_active_service = SyncService()
sync_files_repo = SyncFiles()
storage = Storage()
azure_sync_utils = AzureSyncUtils(
sync_user_service=sync_user_service,
sync_active_service=sync_active_service,
sync_files_repo=sync_files_repo,
storage=storage,
)
await azure_sync_utils.sync(3, "39418e3b-0258-4452-af60-7acfcc1263ff")
if __name__ == "__main__":
asyncio.run(main())

View File

@ -1,98 +0,0 @@
import os
from uuid import UUID
from celery_worker import process_file_and_notify
from fastapi import HTTPException, UploadFile
from modules.brain.entity.brain_entity import RoleEnum
from modules.brain.service.brain_authorization_service import (
validate_brain_authorization,
)
from modules.knowledge.dto.inputs import CreateKnowledgeProperties
from modules.knowledge.service.knowledge_service import KnowledgeService
from modules.notification.dto.inputs import (
CreateNotification,
NotificationUpdatableProperties,
)
from modules.notification.entity.notification import NotificationsStatusEnum
from modules.notification.service.notification_service import NotificationService
from modules.upload.service.upload_file import upload_file_storage
from modules.user.service.user_usage import UserUsage
from packages.files.file import convert_bytes, get_file_size
from packages.utils.telemetry import maybe_send_telemetry
knowledge_service = KnowledgeService()
notification_service = NotificationService()
async def upload_file(
upload_file: UploadFile,
brain_id: UUID,
current_user: str,
):
validate_brain_authorization(
brain_id, current_user, [RoleEnum.Editor, RoleEnum.Owner]
)
user_daily_usage = UserUsage(
id=current_user,
)
upload_notification = notification_service.add_notification(
CreateNotification(
user_id=current_user,
status=NotificationsStatusEnum.INFO,
title=f"Processing File {upload_file.filename}",
)
)
user_settings = user_daily_usage.get_user_settings()
remaining_free_space = user_settings.get("max_brain_size", 1000000000)
maybe_send_telemetry("upload_file", {"file_name": upload_file.filename})
file_size = get_file_size(upload_file)
if remaining_free_space - file_size < 0:
message = f"Brain will exceed maximum capacity. Maximum file allowed is : {convert_bytes(remaining_free_space)}"
raise HTTPException(status_code=403, detail=message)
file_content = await upload_file.read()
filename_with_brain_id = str(brain_id) + "/" + str(upload_file.filename)
try:
file_in_storage = upload_file_storage(file_content, filename_with_brain_id)
except Exception as e:
print(e)
notification_service.update_notification_by_id(
upload_notification.id if upload_notification else None,
NotificationUpdatableProperties(
status=NotificationsStatusEnum.ERROR,
description=f"There was an error uploading the file: {e}",
),
)
if "The resource already exists" in str(e):
raise HTTPException(
status_code=403,
detail=f"File {upload_file.filename} already exists in storage.",
)
else:
raise HTTPException(
status_code=500, detail=f"Failed to upload file to storage. {e}"
)
knowledge_to_add = CreateKnowledgeProperties(
brain_id=brain_id,
file_name=upload_file.filename,
extension=os.path.splitext(
upload_file.filename # pyright: ignore reportPrivateUsage=none
)[-1].lower(),
)
added_knowledge = knowledge_service.add_knowledge(knowledge_to_add)
process_file_and_notify.delay(
file_name=filename_with_brain_id,
file_original_name=upload_file.filename,
brain_id=brain_id,
notification_id=upload_notification.id,
)
return {"message": "File processing has started."}

View File

@ -4,12 +4,10 @@ from multiprocessing import get_logger
from langchain.pydantic_v1 import Field
from langchain.schema import Document
from logger import get_logger
from models import get_supabase_client
from supabase.client import Client
logger = get_logger(__name__)
logger = get_logger()
# Mapping of file extensions to MIME types
mime_types = {
@ -38,30 +36,6 @@ mime_types = {
}
def check_file_exists(brain_id: str, file_identifier: str) -> bool:
supabase_client: Client = get_supabase_client()
try:
# Check if the file exists
logger.info(f"Checking if file {file_identifier} exists.")
# This needs to be converted into a file_identifier that is safe for a URL
response = supabase_client.storage.from_("quivr").list(brain_id)
# Check if the file_identifier is in the response
file_exists = any(file["name"] == file_identifier for file in response)
if file_exists:
logger.info(f"File {file_identifier} exists.")
return True
else:
logger.info(f"File {file_identifier} does not exist.")
return False
except Exception as e:
logger.error(f"An error occurred while checking the file: {e}")
raise e
def upload_file_storage(file, file_identifier: str, upsert: str = "false"):
supabase_client: Client = get_supabase_client()
response = None

View File

@ -1,202 +0,0 @@
import json
import os
import msal
import requests
from fastapi import Depends, FastAPI, HTTPException, Request
from fastapi.responses import JSONResponse, StreamingResponse
app = FastAPI()
CLIENT_ID = "511dce23-02f3-4724-8684-05da226df5f3"
AUTHORITY = "https://login.microsoftonline.com/common"
REDIRECT_URI = "http://localhost:8000/oauth2callback"
SCOPE = [
"https://graph.microsoft.com/Files.Read",
"https://graph.microsoft.com/User.Read",
"https://graph.microsoft.com/Sites.Read.All",
]
client = msal.PublicClientApplication(CLIENT_ID, authority=AUTHORITY)
def get_token_data():
if not os.path.exists("azure_token.json"):
raise HTTPException(status_code=401, detail="User not authenticated")
with open("azure_token.json", "r") as token_file:
token_data = json.load(token_file)
if "access_token" not in token_data:
raise HTTPException(status_code=401, detail="Invalid token data")
return token_data
def refresh_token():
if not os.path.exists("azure_token.json"):
raise HTTPException(status_code=401, detail="User not authenticated")
with open("azure_token.json", "r") as token_file:
token_data = json.load(token_file)
if "refresh_token" not in token_data:
raise HTTPException(status_code=401, detail="No refresh token available")
result = client.acquire_token_by_refresh_token(
token_data["refresh_token"], scopes=SCOPE
)
if "access_token" not in result:
raise HTTPException(status_code=400, detail="Failed to refresh token")
with open("azure_token.json", "w") as token:
json.dump(result, token)
return result
def get_headers(token_data):
return {
"Authorization": f"Bearer {token_data['access_token']}",
"Accept": "application/json",
}
@app.get("/authorize")
def authorize():
authorization_url = client.get_authorization_request_url(
scopes=SCOPE, redirect_uri=REDIRECT_URI
)
return JSONResponse(content={"authorization_url": authorization_url})
@app.get("/oauth2callback")
def oauth2callback(request: Request):
code = request.query_params.get("code")
if not code:
raise HTTPException(status_code=400, detail="Authorization code not found")
result = client.acquire_token_by_authorization_code(
code, scopes=SCOPE, redirect_uri=REDIRECT_URI
)
if "access_token" not in result:
print(f"Token acquisition failed: {result}")
raise HTTPException(status_code=400, detail="Failed to acquire token")
with open("azure_token.json", "w") as token:
json.dump(result, token)
return JSONResponse(content={"message": "Authentication successful"})
@app.get("/list_sites")
def list_sites(token_data: dict = Depends(get_token_data)):
headers = get_headers(token_data)
endpoint = "https://graph.microsoft.com/v1.0/sites?search=*"
response = requests.get(endpoint, headers=headers)
if response.status_code == 401:
token_data = refresh_token()
headers = get_headers(token_data)
response = requests.get(endpoint, headers=headers)
if response.status_code != 200:
raise HTTPException(status_code=response.status_code, detail=response.text)
sites = response.json().get("value", [])
return JSONResponse(content={"sites": sites})
def extract_files_and_folders(items, headers, page_size):
result = []
for item in items:
entry = {
"name": item.get("name"),
"id": item.get("id"),
"parentReference": item.get("parentReference"),
"lastModifiedDateTime": item.get("lastModifiedDateTime"),
"webUrl": item.get("webUrl"),
"size": item.get("size"),
"fileSystemInfo": item.get("fileSystemInfo"),
"folder": item.get("folder"),
"file": item.get("file"),
}
if "folder" in item:
folder_endpoint = f"https://graph.microsoft.com/v1.0/me/drive/items/{item['id']}/children?$top={page_size}"
children = []
while folder_endpoint:
folder_response = requests.get(folder_endpoint, headers=headers)
if folder_response.status_code == 200:
children_page = folder_response.json().get("value", [])
children.extend(children_page)
folder_endpoint = folder_response.json().get(
"@odata.nextLink", None
)
else:
break
entry["children"] = extract_files_and_folders(children, headers, page_size)
result.append(entry)
return result
def fetch_all_files(headers, page_size):
endpoint = (
f"https://graph.microsoft.com/v1.0/me/drive/root/children?$top={page_size}"
)
all_files = []
while endpoint:
response = requests.get(endpoint, headers=headers)
if response.status_code == 401:
token_data = refresh_token()
headers = get_headers(token_data)
response = requests.get(endpoint, headers=headers)
if response.status_code != 200:
raise HTTPException(status_code=response.status_code, detail=response.text)
files = response.json().get("value", [])
all_files.extend(files)
endpoint = response.json().get("@odata.nextLink", None)
return all_files
@app.get("/list_files")
def list_files(page_size: int = 1, token_data: dict = Depends(get_token_data)):
headers = get_headers(token_data)
all_files = fetch_all_files(headers, page_size)
structured_files = extract_files_and_folders(all_files, headers, page_size)
return JSONResponse(content={"files": structured_files})
@app.get("/download_file/{file_id}")
def download_file(file_id: str, token_data: dict = Depends(get_token_data)):
headers = get_headers(token_data)
metadata_endpoint = f"https://graph.microsoft.com/v1.0/me/drive/items/{file_id}"
metadata_response = requests.get(metadata_endpoint, headers=headers)
if metadata_response.status_code == 401:
token_data = refresh_token()
headers = get_headers(token_data)
metadata_response = requests.get(metadata_endpoint, headers=headers)
if metadata_response.status_code != 200:
raise HTTPException(
status_code=metadata_response.status_code, detail=metadata_response.text
)
metadata = metadata_response.json()
if "folder" in metadata:
raise HTTPException(
status_code=400, detail="The specified ID is a folder, not a file"
)
download_endpoint = (
f"https://graph.microsoft.com/v1.0/me/drive/items/{file_id}/content"
)
download_response = requests.get(download_endpoint, headers=headers, stream=True)
if download_response.status_code == 401:
token_data = refresh_token()
headers = get_headers(token_data)
download_response = requests.get(
download_endpoint, headers=headers, stream=True
)
if download_response.status_code != 200:
raise HTTPException(
status_code=download_response.status_code, detail=download_response.text
)
return StreamingResponse(
download_response.iter_content(chunk_size=1024),
headers={"Content-Disposition": f"attachment; filename={metadata.get('name')}"},
)
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)

View File

@ -1,91 +0,0 @@
import json
import os
from fastapi import FastAPI, HTTPException, Request
from fastapi.responses import JSONResponse
from google.auth.transport.requests import Request as GoogleRequest
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import Flow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
app = FastAPI()
SCOPES = ["https://www.googleapis.com/auth/drive.metadata.readonly"]
CLIENT_SECRETS_FILE = "credentials.json"
REDIRECT_URI = "http://localhost:8000/oauth2callback"
# Disable OAuthlib's HTTPS verification when running locally.
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
@app.get("/authorize")
def authorize():
flow = Flow.from_client_secrets_file(
CLIENT_SECRETS_FILE, scopes=SCOPES, redirect_uri=REDIRECT_URI
)
authorization_url, state = flow.authorization_url(
access_type="offline", include_granted_scopes="true"
)
# Store the state in session to validate the callback later
with open("state.json", "w") as state_file:
json.dump({"state": state}, state_file)
return JSONResponse(content={"authorization_url": authorization_url})
@app.get("/oauth2callback")
def oauth2callback(request: Request):
state = request.query_params.get("state")
with open("state.json", "r") as state_file:
saved_state = json.load(state_file)["state"]
if state != saved_state:
raise HTTPException(status_code=400, detail="Invalid state parameter")
flow = Flow.from_client_secrets_file(
CLIENT_SECRETS_FILE, scopes=SCOPES, state=state, redirect_uri=REDIRECT_URI
)
flow.fetch_token(authorization_response=str(request.url))
creds = flow.credentials
# Save the credentials for future use
with open("token.json", "w") as token:
token.write(creds.to_json())
return JSONResponse(content={"message": "Authentication successful"})
@app.get("/list_files")
def list_files():
creds = None
if os.path.exists("token.json"):
creds = Credentials.from_authorized_user_file("token.json", SCOPES)
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(GoogleRequest())
else:
raise HTTPException(status_code=401, detail="Credentials are not valid")
try:
service = build("drive", "v3", credentials=creds)
results = (
service.files()
.list(pageSize=10, fields="nextPageToken, files(id, name)")
.execute()
)
items = results.get("files", [])
if not items:
return JSONResponse(content={"files": "No files found."})
files = [{"name": item["name"], "id": item["id"]} for item in items]
return JSONResponse(content={"files": files})
except HttpError as error:
raise HTTPException(status_code=500, detail=f"An error occurred: {error}")
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)

View File

@ -17,24 +17,25 @@ backoff==2.2.1; python_version >= '3.7' and python_version < '4.0'
beautifulsoup4==4.12.3; python_full_version >= '3.6.0'
billiard==4.2.0; python_version >= '3.7'
black==24.4.2; python_version >= '3.8'
boto3==1.34.107; python_version >= '3.8'
botocore==1.34.107; python_version >= '3.8'
boto3==1.34.104; python_version >= '3.8'
botocore==1.34.104; python_version >= '3.8'
cachetools==5.3.3; python_version >= '3.7'
celery[redis,sqs]==5.4.0; python_version >= '3.8'
certifi==2024.2.2; python_version >= '3.6'
cffi==1.16.0; platform_python_implementation != 'PyPy'
cffi==1.16.0; python_version >= '3.8'
chardet==5.2.0; python_version >= '3.7'
charset-normalizer==3.3.2; python_full_version >= '3.7.0'
click==8.1.7; python_version >= '3.7'
click-didyoumean==0.3.1; python_full_version >= '3.6.2'
click-plugins==1.1.1
click-repl==0.3.0; python_version >= '3.6'
cohere==5.5.0; python_version >= '3.8' and python_version < '4.0'
cohere==5.4.0; python_version >= '3.8' and python_version < '4.0'
coloredlogs==15.0.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
colorlog==6.8.2; python_version >= '3.6'
contourpy==1.2.1; python_version >= '3.9'
cryptography==42.0.7; python_version >= '3.7'
cssselect==1.2.0; python_version >= '3.7'
curl-cffi==0.7.0b4; python_version >= '3.8'
cycler==0.12.1; python_version >= '3.8'
dataclasses-json==0.6.6; python_version >= '3.7' and python_version < '4.0'
datasets==2.19.1; python_full_version >= '3.8.0'
@ -52,7 +53,7 @@ dnspython==2.6.1; python_version >= '3.8'
docker==7.0.0; python_version >= '3.8'
docx2txt==0.8
duckdb==0.10.2; python_full_version >= '3.7.0'
duckduckgo-search==6.1.0; python_version >= '3.8'
duckduckgo-search==5.3.1; python_version >= '3.8'
ecdsa==0.19.0; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
effdet==0.4.1
email-validator==2.1.1; python_version >= '3.8'
@ -68,7 +69,7 @@ filelock==3.14.0; python_version >= '3.8'
filetype==1.2.0
flake8==7.0.0; python_full_version >= '3.8.1'
flake8-black==0.3.6; python_version >= '3.7'
flashrank==0.2.5; python_version >= '3.6'
flashrank==0.2.4; python_version >= '3.6'
flatbuffers==24.3.25
flower==2.0.1; python_version >= '3.7'
fonttools==4.51.0; python_version >= '3.8'
@ -78,10 +79,7 @@ fsspec[http]==2024.3.1; python_version >= '3.8'
gitdb==4.0.11; python_version >= '3.7'
gitpython==3.1.43; python_version >= '3.7'
google-api-core[grpc]==2.19.0; python_version >= '3.7'
google-api-python-client==2.129.0; python_version >= '3.7'
google-auth==2.29.0; python_version >= '3.7'
google-auth-httplib2==0.2.0
google-auth-oauthlib==1.2.0; python_version >= '3.6'
google-cloud-vision==3.7.2
googleapis-common-protos==1.63.0; python_version >= '3.7'
gotrue==2.4.2; python_version >= '3.8' and python_version < '4.0'
@ -91,7 +89,6 @@ grpcio-status==1.62.2
h11==0.14.0; python_version >= '3.7'
html5lib==1.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
httpcore==1.0.5; python_version >= '3.8'
httplib2==0.22.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
httptools==0.6.1
httpx==0.27.0; python_version >= '3.8'
httpx-sse==0.4.0; python_version >= '3.8'
@ -113,20 +110,20 @@ jsonpointer==2.4; python_version >= '2.7' and python_version not in '3.0, 3.1, 3
kiwisolver==1.4.5; python_version >= '3.7'
kombu[sqs]==5.3.7; python_version >= '3.8'
langchain==0.1.20; python_version < '4.0' and python_full_version >= '3.8.1'
langchain-cohere==0.1.5; python_version < '4.0' and python_full_version >= '3.8.1'
langchain-cohere==0.1.4; python_version < '4.0' and python_full_version >= '3.8.1'
langchain-community==0.0.38; python_version < '4.0' and python_full_version >= '3.8.1'
langchain-core==0.1.52; python_version < '4.0' and python_full_version >= '3.8.1'
langchain-openai==0.1.7; python_version < '4.0' and python_full_version >= '3.8.1'
langchain-text-splitters==0.0.2; python_version < '4.0' and python_full_version >= '3.8.1'
langchain-openai==0.1.6; python_version < '4.0' and python_full_version >= '3.8.1'
langchain-text-splitters==0.0.1; python_version < '4.0' and python_full_version >= '3.8.1'
langdetect==1.0.9
langfuse==2.32.0; python_version < '4.0' and python_full_version >= '3.8.1'
langgraph==0.0.49; python_version < '4.0' and python_full_version >= '3.9.0'
langsmith==0.1.59; python_version < '4.0' and python_full_version >= '3.8.1'
langfuse==2.30.0; python_version < '4.0' and python_full_version >= '3.8.1'
langgraph==0.0.48; python_version < '4.0' and python_full_version >= '3.9.0'
langsmith==0.1.57; python_version < '4.0' and python_full_version >= '3.8.1'
layoutparser[layoutmodels,tesseract]==0.3.4; python_version >= '3.6'
litellm==1.37.13; python_version not in '2.7, 3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7' and python_version >= '3.8'
litellm==1.37.5; python_version not in '2.7, 3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7' and python_version >= '3.8'
llama-cpp-python==0.2.67; python_version >= '3.8'
llama-index==0.10.37; python_version < '4.0' and python_full_version >= '3.8.1'
llama-index-agent-openai==0.2.5; python_version < '4.0' and python_full_version >= '3.8.1'
llama-index==0.10.36; python_version < '4.0' and python_full_version >= '3.8.1'
llama-index-agent-openai==0.2.4; python_version < '4.0' and python_full_version >= '3.8.1'
llama-index-cli==0.1.12; python_version < '4.0' and python_full_version >= '3.8.1'
llama-index-core==0.10.36; python_version < '4.0' and python_full_version >= '3.8.1'
llama-index-embeddings-openai==0.1.9; python_version < '4.0' and python_full_version >= '3.8.1'
@ -138,7 +135,7 @@ llama-index-program-openai==0.1.6; python_version < '4.0' and python_full_versio
llama-index-question-gen-openai==0.1.3; python_version < '4.0' and python_full_version >= '3.8.1'
llama-index-readers-file==0.1.22; python_version < '4.0' and python_full_version >= '3.8.1'
llama-index-readers-llama-parse==0.1.4; python_version < '4.0' and python_full_version >= '3.8.1'
llama-parse==0.4.3; python_version < '4.0' and python_full_version >= '3.8.1'
llama-parse==0.4.2; python_version < '4.0' and python_full_version >= '3.8.1'
llamaindex-py-client==0.1.19; python_version >= '3.8' and python_version < '4'
lxml[html_clean]==5.2.2; python_version >= '3.6'
lxml-html-clean==0.1.1
@ -146,12 +143,11 @@ markdown==3.6
markdown-it-py==3.0.0; python_version >= '3.8'
markupsafe==2.1.5; python_version >= '3.7'
marshmallow==3.21.2; python_version >= '3.8'
matplotlib==3.9.0; python_version >= '3.9'
matplotlib==3.8.4; python_version >= '3.9'
mccabe==0.7.0; python_version >= '3.6'
mdurl==0.1.2; python_version >= '3.7'
monotonic==1.6
mpmath==1.3.0
msal==1.28.0; python_version >= '3.7'
msg-parser==1.2.0
multidict==6.0.5; python_version >= '3.7'
multiprocess==0.70.16; python_version >= '3.8'
@ -162,19 +158,18 @@ newspaper3k==0.2.8
nltk==3.8.1; python_version >= '3.7'
nodeenv==1.8.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'
numpy==1.26.4; python_version >= '3.9'
oauthlib==3.2.2; python_version >= '3.6'
olefile==0.47; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
omegaconf==2.3.0; python_version >= '3.6'
onnx==1.16.0
onnxruntime==1.17.3
openai==1.30.1; python_full_version >= '3.7.1'
openai==1.29.0; python_full_version >= '3.7.1'
opencv-python==4.9.0.80; python_version >= '3.6'
openpyxl==3.1.2
ordered-set==4.1.0; python_version >= '3.7'
orjson==3.10.3; python_version >= '3.8'
packaging==23.2; python_version >= '3.7'
pandas==1.5.3; python_version >= '3.8'
pandasai==2.0.42; python_version not in '2.7, 3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7, 3.8' and python_version >= '3.9'
pandasai==2.0.37; python_version not in '2.7, 3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7, 3.8' and python_version >= '3.9'
pathspec==0.12.1; python_version >= '3.8'
pdf2image==1.17.0
pdfminer.six==20231228
@ -182,7 +177,7 @@ pdfplumber==0.11.0; python_version >= '3.8'
pikepdf==8.15.1
pillow==10.3.0; python_version >= '3.8'
pillow-heif==0.16.0
platformdirs==4.2.2; python_version >= '3.8'
platformdirs==4.2.1; python_version >= '3.8'
playwright==1.43.0; python_version >= '3.8'
pluggy==1.5.0; python_version >= '3.8'
portalocker==2.8.2; python_version >= '3.8'
@ -196,7 +191,7 @@ psutil==5.9.8; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2,
psycopg2==2.9.9; python_version >= '3.7'
psycopg2-binary==2.9.9; python_version >= '3.7'
py==1.11.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
pyarrow==16.1.0; python_version >= '3.8'
pyarrow==16.0.0; python_version >= '3.8'
pyarrow-hotfix==0.6; python_version >= '3.5'
pyasn1==0.6.0; python_version >= '3.8'
pyasn1-modules==0.4.0; python_version >= '3.8'
@ -211,13 +206,11 @@ pyee==11.1.0; python_version >= '3.8'
pyflakes==3.2.0; python_version >= '3.8'
pygments==2.18.0; python_version >= '3.8'
pyinstrument==4.6.2; python_version >= '3.7'
pyjwt[crypto]==2.8.0; python_version >= '3.7'
pypandoc==1.13; python_version >= '3.6'
pyparsing==3.1.2; python_version >= '3.1'
pyparsing==3.1.2; python_full_version >= '3.6.8'
pypdf==4.2.0; python_version >= '3.6'
pypdfium2==4.30.0; python_version >= '3.6'
pyreqwest-impersonate==0.4.5; python_version >= '3.8'
pyright==1.1.363; python_version >= '3.7'
pyright==1.1.362; python_version >= '3.7'
pysbd==0.3.4; python_version >= '3'
pytesseract==0.3.10; python_version >= '3.7'
pytest==8.2.0; python_version >= '3.8'
@ -239,18 +232,17 @@ ragas==0.1.7
rapidfuzz==3.9.0; python_version >= '3.8'
realtime==1.0.4; python_version >= '3.8' and python_version < '4.0'
redis==5.0.4; python_version >= '3.7'
regex==2024.5.15; python_version >= '3.8'
regex==2024.5.10; python_version >= '3.8'
requests==2.31.0; python_version >= '3.7'
requests-file==2.0.0
requests-oauthlib==2.0.0; python_version >= '3.4'
resend==1.0.2; python_version >= '3.7'
resend==1.0.1; python_version >= '3.7'
retry==0.9.2
rich==13.7.1; python_full_version >= '3.7.0'
rsa==4.9; python_version >= '3.6' and python_version < '4'
s3transfer==0.10.1; python_version >= '3.8'
safetensors==0.4.3; python_version >= '3.7'
scipy==1.13.0; python_version >= '3.9'
sentry-sdk[fastapi]==2.2.0; python_version >= '3.6'
sentry-sdk[fastapi]==2.1.1; python_version >= '3.6'
setuptools==69.5.1; python_version >= '3.8'
sgmllib3k==1.0.0
shellingham==1.5.4; python_version >= '3.7'
@ -270,7 +262,7 @@ tabulate==0.9.0; python_version >= '3.7'
tavily-python==0.3.3; python_version >= '3.6'
tenacity==8.3.0; python_version >= '3.8'
tiktoken==0.7.0; python_version >= '3.8'
timm==1.0.3; python_version >= '3.8'
timm==0.9.16; python_version >= '3.8'
tinysegmenter==0.3
tldextract==5.1.2; python_version >= '3.8'
tokenizers==0.19.1; python_version >= '3.7'
@ -284,13 +276,12 @@ types-requests==2.31.0.20240406; python_version >= '3.8'
typing-extensions==4.11.0; python_version >= '3.8'
typing-inspect==0.9.0
tzdata==2024.1; python_version >= '2'
ujson==5.10.0; python_version >= '3.8'
ujson==5.9.0; python_version >= '3.8'
unidecode==1.3.8; python_version >= '3.5'
unstructured[all-docs]==0.13.7; python_version < '3.12' and python_full_version >= '3.9.0'
unstructured-client==0.22.0; python_version >= '3.8'
unstructured-inference==0.7.31
unstructured.pytesseract==0.3.12
uritemplate==4.1.1; python_version >= '3.6'
urllib3==2.2.1; python_version >= '3.8'
uvicorn[standard]==0.29.0; python_version >= '3.8'
uvloop==0.19.0
@ -305,4 +296,4 @@ xlrd==2.0.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3
xlsxwriter==3.2.0; python_version >= '3.6'
xxhash==3.4.1; python_version >= '3.7'
yarl==1.9.4; python_version >= '3.7'
zipp==3.18.2; python_version >= '3.8'
zipp==3.18.1; python_version >= '3.8'

View File

@ -1,130 +0,0 @@
create table "public"."syncs_active" (
"id" bigint generated by default as identity not null,
"name" text not null,
"syncs_user_id" bigint not null,
"user_id" uuid not null default gen_random_uuid(),
"settings" jsonb,
"last_synced" timestamp with time zone not null default (now() AT TIME ZONE 'utc'::text),
"sync_interval_minutes" integer default 360
);
alter table "public"."syncs_active" enable row level security;
create table "public"."syncs_user" (
"id" bigint generated by default as identity not null,
"name" text not null,
"provider" text not null,
"state" jsonb,
"credentials" jsonb,
"user_id" uuid default gen_random_uuid()
);
alter table "public"."syncs_user" enable row level security;
CREATE UNIQUE INDEX syncs_active_pkey ON public.syncs_active USING btree (id);
CREATE UNIQUE INDEX syncs_user_pkey ON public.syncs_user USING btree (id);
alter table "public"."syncs_active" add constraint "syncs_active_pkey" PRIMARY KEY using index "syncs_active_pkey";
alter table "public"."syncs_user" add constraint "syncs_user_pkey" PRIMARY KEY using index "syncs_user_pkey";
alter table "public"."syncs_active" add constraint "public_syncs_active_syncs_user_id_fkey" FOREIGN KEY (syncs_user_id) REFERENCES syncs_user(id) ON UPDATE CASCADE ON DELETE CASCADE not valid;
alter table "public"."syncs_active" validate constraint "public_syncs_active_syncs_user_id_fkey";
alter table "public"."syncs_active" add constraint "public_syncs_active_user_id_fkey" FOREIGN KEY (user_id) REFERENCES users(id) ON UPDATE CASCADE ON DELETE CASCADE not valid;
alter table "public"."syncs_active" validate constraint "public_syncs_active_user_id_fkey";
alter table "public"."syncs_user" add constraint "public_syncs_user_user_id_fkey" FOREIGN KEY (user_id) REFERENCES users(id) ON UPDATE CASCADE ON DELETE CASCADE not valid;
alter table "public"."syncs_user" validate constraint "public_syncs_user_user_id_fkey";
grant delete on table "public"."syncs_active" to "anon";
grant insert on table "public"."syncs_active" to "anon";
grant references on table "public"."syncs_active" to "anon";
grant select on table "public"."syncs_active" to "anon";
grant trigger on table "public"."syncs_active" to "anon";
grant truncate on table "public"."syncs_active" to "anon";
grant update on table "public"."syncs_active" to "anon";
grant delete on table "public"."syncs_active" to "authenticated";
grant insert on table "public"."syncs_active" to "authenticated";
grant references on table "public"."syncs_active" to "authenticated";
grant select on table "public"."syncs_active" to "authenticated";
grant trigger on table "public"."syncs_active" to "authenticated";
grant truncate on table "public"."syncs_active" to "authenticated";
grant update on table "public"."syncs_active" to "authenticated";
grant delete on table "public"."syncs_active" to "service_role";
grant insert on table "public"."syncs_active" to "service_role";
grant references on table "public"."syncs_active" to "service_role";
grant select on table "public"."syncs_active" to "service_role";
grant trigger on table "public"."syncs_active" to "service_role";
grant truncate on table "public"."syncs_active" to "service_role";
grant update on table "public"."syncs_active" to "service_role";
grant delete on table "public"."syncs_user" to "anon";
grant insert on table "public"."syncs_user" to "anon";
grant references on table "public"."syncs_user" to "anon";
grant select on table "public"."syncs_user" to "anon";
grant trigger on table "public"."syncs_user" to "anon";
grant truncate on table "public"."syncs_user" to "anon";
grant update on table "public"."syncs_user" to "anon";
grant delete on table "public"."syncs_user" to "authenticated";
grant insert on table "public"."syncs_user" to "authenticated";
grant references on table "public"."syncs_user" to "authenticated";
grant select on table "public"."syncs_user" to "authenticated";
grant trigger on table "public"."syncs_user" to "authenticated";
grant truncate on table "public"."syncs_user" to "authenticated";
grant update on table "public"."syncs_user" to "authenticated";
grant delete on table "public"."syncs_user" to "service_role";
grant insert on table "public"."syncs_user" to "service_role";
grant references on table "public"."syncs_user" to "service_role";
grant select on table "public"."syncs_user" to "service_role";
grant trigger on table "public"."syncs_user" to "service_role";
grant truncate on table "public"."syncs_user" to "service_role";
grant update on table "public"."syncs_user" to "service_role";

View File

@ -1,86 +0,0 @@
create table "public"."syncs_files" (
"id" bigint generated by default as identity not null,
"syncs_active_id" bigint not null,
"last_modified" timestamp with time zone not null default (now() AT TIME ZONE 'utc'::text),
"brain_id" uuid default gen_random_uuid(),
"path" text not null
);
alter table "public"."syncs_files" enable row level security;
alter table "public"."syncs_active" add column "brain_id" uuid;
CREATE UNIQUE INDEX sync_files_pkey ON public.syncs_files USING btree (id);
alter table "public"."syncs_files" add constraint "sync_files_pkey" PRIMARY KEY using index "sync_files_pkey";
alter table "public"."syncs_active" add constraint "public_syncs_active_brain_id_fkey" FOREIGN KEY (brain_id) REFERENCES brains(brain_id) ON UPDATE CASCADE ON DELETE CASCADE not valid;
alter table "public"."syncs_active" validate constraint "public_syncs_active_brain_id_fkey";
alter table "public"."syncs_files" add constraint "public_sync_files_brain_id_fkey" FOREIGN KEY (brain_id) REFERENCES brains(brain_id) ON UPDATE CASCADE ON DELETE CASCADE not valid;
alter table "public"."syncs_files" validate constraint "public_sync_files_brain_id_fkey";
alter table "public"."syncs_files" add constraint "public_sync_files_sync_active_id_fkey" FOREIGN KEY (syncs_active_id) REFERENCES syncs_active(id) ON UPDATE CASCADE ON DELETE CASCADE not valid;
alter table "public"."syncs_files" validate constraint "public_sync_files_sync_active_id_fkey";
grant delete on table "public"."syncs_files" to "anon";
grant insert on table "public"."syncs_files" to "anon";
grant references on table "public"."syncs_files" to "anon";
grant select on table "public"."syncs_files" to "anon";
grant trigger on table "public"."syncs_files" to "anon";
grant truncate on table "public"."syncs_files" to "anon";
grant update on table "public"."syncs_files" to "anon";
grant delete on table "public"."syncs_files" to "authenticated";
grant insert on table "public"."syncs_files" to "authenticated";
grant references on table "public"."syncs_files" to "authenticated";
grant select on table "public"."syncs_files" to "authenticated";
grant trigger on table "public"."syncs_files" to "authenticated";
grant truncate on table "public"."syncs_files" to "authenticated";
grant update on table "public"."syncs_files" to "authenticated";
grant delete on table "public"."syncs_files" to "service_role";
grant insert on table "public"."syncs_files" to "service_role";
grant references on table "public"."syncs_files" to "service_role";
grant select on table "public"."syncs_files" to "service_role";
grant trigger on table "public"."syncs_files" to "service_role";
grant truncate on table "public"."syncs_files" to "service_role";
grant update on table "public"."syncs_files" to "service_role";
create policy "syncs_active"
on "public"."syncs_active"
as permissive
for all
to service_role;
create policy "syncs_user"
on "public"."syncs_user"
as permissive
for all
to service_role;