feat: remove dependencies on Pydantic v1 (#3526)

# Description

By moving to LangChain > 0.3, we can remove any dependency on Pydantic
V1, thus avoiding the conflicts originating from mixing Pydantic V1 with
Pydantic V2


Please include a summary of the changes and the related issue. Please
also include relevant motivation and context.

## Checklist before requesting a review

Please delete options that are not relevant.

- [ ] My code follows the style guidelines of this project
- [ ] I have performed a self-review of my code
- [ ] I have commented hard-to-understand areas
- [ ] I have ideally added tests that prove my fix is effective or that
my feature works
- [ ] New and existing unit tests pass locally with my changes
- [ ] Any dependent changes have been merged

## Screenshots (if appropriate):
This commit is contained in:
Jacopo Chevallard 2024-12-17 16:09:00 +01:00 committed by GitHub
parent 09b4811503
commit ebc4eb811c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 559 additions and 3430 deletions

View File

@ -5,16 +5,16 @@ description = "Quivr core RAG package"
authors = [{ name = "Stan Girard", email = "stan@quivr.app" }]
dependencies = [
"pydantic>=2.8.2",
"langchain-core>=0.2.38",
"langchain>=0.2.14,<0.3.0",
"langgraph>=0.2.38",
"langchain-core>=0.3,<0.4",
"langchain>=0.3,<0.4",
"langgraph>=0.2.38,<0.3",
"httpx>=0.27.0",
"rich>=13.7.1",
"tiktoken>=0.7.0",
"aiofiles>=23.1.0",
"langchain-openai>=0.1.0",
"langchain-cohere>=0.1.0",
"langchain-community>=0.2.12",
"langchain-community>=0.3,<0.4",
"langchain-anthropic>=0.1.23",
"types-pyyaml>=6.0.12.20240808",
"transformers[sentencepiece]>=4.44.2",
@ -22,6 +22,7 @@ dependencies = [
"rapidfuzz>=3.10.1",
"markupsafe>=2.1.5",
"megaparse-sdk>=0.1.9",
"langchain-mistralai>=0.2.3",
]
readme = "README.md"
requires-python = ">= 3.11"

View File

@ -5,9 +5,10 @@ from urllib.parse import parse_qs, urlparse
import tiktoken
from langchain_anthropic import ChatAnthropic
from langchain_mistralai import ChatMistralAI
from langchain_core.language_models.chat_models import BaseChatModel
from langchain_openai import AzureChatOpenAI, ChatOpenAI
from pydantic.v1 import SecretStr
from pydantic import SecretStr
from quivr_core.brain.info import LLMInfo
from quivr_core.rag.entities.config import DefaultModelSuppliers, LLMEndpointConfig
@ -54,7 +55,7 @@ class LLMEndpoint:
@classmethod
def from_config(cls, config: LLMEndpointConfig = LLMEndpointConfig()):
_llm: Union[AzureChatOpenAI, ChatOpenAI, ChatAnthropic]
_llm: Union[AzureChatOpenAI, ChatOpenAI, ChatAnthropic, ChatMistralAI]
try:
if config.supplier == DefaultModelSuppliers.AZURE:
# Parse the URL
@ -92,6 +93,15 @@ class LLMEndpoint:
max_tokens=config.max_output_tokens,
temperature=config.temperature,
)
elif config.supplier == DefaultModelSuppliers.MISTRAL:
_llm = ChatMistralAI(
model=config.model,
api_key=SecretStr(config.llm_api_key)
if config.llm_api_key
else None,
base_url=config.llm_base_url,
temperature=config.temperature,
)
else:
_llm = ChatOpenAI(
model=config.model,

View File

@ -4,7 +4,7 @@ from langchain_community.tools import TavilySearchResults
from langchain_community.utilities.tavily_search import TavilySearchAPIWrapper
from quivr_core.llm_tools.entity import ToolsCategory
import os
from pydantic.v1 import SecretStr as SecretStrV1 # Ensure correct import
from pydantic import SecretStr # Ensure correct import
from quivr_core.llm_tools.entity import ToolWrapper, ToolRegistry
from langchain_core.documents import Document
@ -23,7 +23,7 @@ def create_tavily_tool(config: Dict[str, Any]) -> ToolWrapper:
)
tavily_api_wrapper = TavilySearchAPIWrapper(
tavily_api_key=SecretStrV1(api_key),
tavily_api_key=SecretStr(api_key),
)
tool = TavilySearchResults(
api_wrapper=tavily_api_wrapper,

View File

@ -5,35 +5,33 @@ from uuid import UUID
from langchain_core.documents import Document
from langchain_core.messages import AIMessage, HumanMessage
from langchain_core.pydantic_v1 import BaseModel as BaseModelV1
from langchain_core.pydantic_v1 import Field as FieldV1
from pydantic import BaseModel, Field
from typing_extensions import TypedDict
class cited_answer(BaseModelV1):
class cited_answer(BaseModel):
"""Answer the user question based only on the given sources, and cite the sources used."""
answer: str = FieldV1(
answer: str = Field(
...,
description="The answer to the user question, which is based only on the given sources.",
)
citations: list[int] = FieldV1(
citations: list[int] = Field(
...,
description="The integer IDs of the SPECIFIC sources which justify the answer.",
)
followup_questions: list[str] = FieldV1(
followup_questions: list[str] = Field(
...,
description="Generate up to 3 follow-up questions that could be asked based on the answer given or context provided.",
)
class ChatMessage(BaseModelV1):
class ChatMessage(BaseModel):
chat_id: UUID
message_id: UUID
brain_id: UUID | None
msg: AIMessage | HumanMessage
msg: HumanMessage | AIMessage
message_time: datetime
metadata: dict[str, Any]
@ -108,7 +106,6 @@ class QuivrKnowledge(BaseModel):
metadata: Optional[Dict[str, str]] = None
# NOTE: for compatibility issues with langchain <-> PydanticV1
class SearchResult(BaseModelV1):
class SearchResult(BaseModel):
chunk: Document
distance: float

View File

@ -21,7 +21,7 @@ aiosignal==1.3.1
# via aiohttp
annotated-types==0.7.0
# via pydantic
anthropic==0.36.1
anthropic==0.40.0
# via langchain-anthropic
anyio==4.6.2.post1
# via anthropic
@ -89,12 +89,16 @@ httpcore==1.0.6
httpx==0.27.2
# via anthropic
# via cohere
# via langchain-mistralai
# via langgraph-sdk
# via langsmith
# via megaparse-sdk
# via openai
# via quivr-core
httpx-sse==0.4.0
# via cohere
# via langchain-community
# via langchain-mistralai
# via langgraph-sdk
huggingface-hub==0.25.2
# via tokenizers
@ -125,32 +129,35 @@ jupyter-client==8.6.3
jupyter-core==5.7.2
# via ipykernel
# via jupyter-client
langchain==0.2.16
langchain==0.3.9
# via langchain-community
# via quivr-core
langchain-anthropic==0.1.23
langchain-anthropic==0.3.0
# via quivr-core
langchain-cohere==0.2.4
langchain-cohere==0.3.3
# via quivr-core
langchain-community==0.2.17
langchain-community==0.3.9
# via langchain-experimental
# via quivr-core
langchain-core==0.2.41
langchain-core==0.3.21
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-community
# via langchain-experimental
# via langchain-mistralai
# via langchain-openai
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via quivr-core
langchain-experimental==0.0.65
langchain-experimental==0.3.3
# via langchain-cohere
langchain-openai==0.1.25
langchain-mistralai==0.2.3
# via quivr-core
langchain-text-splitters==0.2.4
langchain-openai==0.2.11
# via quivr-core
langchain-text-splitters==0.3.2
# via langchain
langgraph==0.2.38
# via quivr-core
@ -162,8 +169,12 @@ langsmith==0.1.135
# via langchain
# via langchain-community
# via langchain-core
loguru==0.7.2
# via megaparse-sdk
markdown-it-py==3.0.0
# via rich
markupsafe==3.0.2
# via quivr-core
marshmallow==3.22.0
# via dataclasses-json
matplotlib-inline==0.1.7
@ -173,6 +184,8 @@ mccabe==0.7.0
# via flake8
mdurl==0.1.2
# via markdown-it-py
megaparse-sdk==0.1.10
# via quivr-core
msgpack==1.1.0
# via langgraph-checkpoint
multidict==6.1.0
@ -183,6 +196,8 @@ mypy-extensions==1.0.0
# via black
# via mypy
# via typing-inspect
nats-py==2.9.0
# via megaparse-sdk
nest-asyncio==1.6.0
# via ipykernel
nodeenv==1.9.1
@ -193,7 +208,7 @@ numpy==1.26.4
# via langchain-community
# via pandas
# via transformers
openai==1.52.0
openai==1.56.2
# via langchain-openai
orjson==3.10.7
# via langgraph-sdk
@ -230,8 +245,9 @@ propcache==0.2.0
# via yarl
protobuf==5.28.2
# via transformers
psutil==6.0.0
psutil==6.1.0
# via ipykernel
# via megaparse-sdk
ptyprocess==0.7.0
# via pexpect
pure-eval==0.2.3
@ -240,17 +256,25 @@ py-cpuinfo==9.0.0
# via pytest-benchmark
pycodestyle==2.12.1
# via flake8
pycryptodome==3.21.0
# via megaparse-sdk
pydantic==2.9.2
# via anthropic
# via cohere
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-core
# via langchain-mistralai
# via langsmith
# via openai
# via pydantic-settings
# via quivr-core
pydantic-core==2.23.4
# via cohere
# via pydantic
pydantic-settings==2.6.1
# via langchain-community
pyflakes==3.2.0
# via flake8
pygments==2.18.0
@ -266,6 +290,9 @@ pytest-xdist==3.6.1
python-dateutil==2.8.2
# via jupyter-client
# via pandas
python-dotenv==1.0.1
# via megaparse-sdk
# via pydantic-settings
pytz==2024.2
# via pandas
pyyaml==6.0.2
@ -324,8 +351,8 @@ tiktoken==0.8.0
# via langchain-openai
# via quivr-core
tokenizers==0.20.1
# via anthropic
# via cohere
# via langchain-mistralai
# via transformers
tornado==6.4.1
# via ipykernel

View File

@ -21,7 +21,7 @@ aiosignal==1.3.1
# via aiohttp
annotated-types==0.7.0
# via pydantic
anthropic==0.36.1
anthropic==0.40.0
# via langchain-anthropic
anyio==4.6.2.post1
# via anthropic
@ -63,12 +63,16 @@ httpcore==1.0.6
httpx==0.27.2
# via anthropic
# via cohere
# via langchain-mistralai
# via langgraph-sdk
# via langsmith
# via megaparse-sdk
# via openai
# via quivr-core
httpx-sse==0.4.0
# via cohere
# via langchain-community
# via langchain-mistralai
# via langgraph-sdk
huggingface-hub==0.25.2
# via tokenizers
@ -85,32 +89,35 @@ jsonpatch==1.33
# via langchain-core
jsonpointer==3.0.0
# via jsonpatch
langchain==0.2.16
langchain==0.3.9
# via langchain-community
# via quivr-core
langchain-anthropic==0.1.23
langchain-anthropic==0.3.0
# via quivr-core
langchain-cohere==0.2.4
langchain-cohere==0.3.3
# via quivr-core
langchain-community==0.2.17
langchain-community==0.3.9
# via langchain-experimental
# via quivr-core
langchain-core==0.2.41
langchain-core==0.3.21
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-community
# via langchain-experimental
# via langchain-mistralai
# via langchain-openai
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via quivr-core
langchain-experimental==0.0.65
langchain-experimental==0.3.3
# via langchain-cohere
langchain-openai==0.1.25
langchain-mistralai==0.2.3
# via quivr-core
langchain-text-splitters==0.2.4
langchain-openai==0.2.11
# via quivr-core
langchain-text-splitters==0.3.2
# via langchain
langgraph==0.2.38
# via quivr-core
@ -122,12 +129,18 @@ langsmith==0.1.135
# via langchain
# via langchain-community
# via langchain-core
loguru==0.7.2
# via megaparse-sdk
markdown-it-py==3.0.0
# via rich
markupsafe==3.0.2
# via quivr-core
marshmallow==3.22.0
# via dataclasses-json
mdurl==0.1.2
# via markdown-it-py
megaparse-sdk==0.1.10
# via quivr-core
msgpack==1.1.0
# via langgraph-checkpoint
multidict==6.1.0
@ -135,13 +148,15 @@ multidict==6.1.0
# via yarl
mypy-extensions==1.0.0
# via typing-inspect
nats-py==2.9.0
# via megaparse-sdk
numpy==1.26.4
# via faiss-cpu
# via langchain
# via langchain-community
# via pandas
# via transformers
openai==1.52.0
openai==1.56.2
# via langchain-openai
orjson==3.10.7
# via langgraph-sdk
@ -160,21 +175,34 @@ propcache==0.2.0
# via yarl
protobuf==5.28.2
# via transformers
psutil==6.1.0
# via megaparse-sdk
pycryptodome==3.21.0
# via megaparse-sdk
pydantic==2.9.2
# via anthropic
# via cohere
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-core
# via langchain-mistralai
# via langsmith
# via openai
# via pydantic-settings
# via quivr-core
pydantic-core==2.23.4
# via cohere
# via pydantic
pydantic-settings==2.6.1
# via langchain-community
pygments==2.18.0
# via rich
python-dateutil==2.8.2
# via pandas
python-dotenv==1.0.1
# via megaparse-sdk
# via pydantic-settings
pytz==2024.2
# via pandas
pyyaml==6.0.2
@ -225,8 +253,8 @@ tiktoken==0.8.0
# via langchain-openai
# via quivr-core
tokenizers==0.20.1
# via anthropic
# via cohere
# via langchain-mistralai
# via transformers
tqdm==4.66.5
# via huggingface-hub

View File

@ -2,7 +2,7 @@ import os
import pytest
from langchain_core.language_models import FakeListChatModel
from pydantic.v1.error_wrappers import ValidationError
from pydantic import ValidationError
from quivr_core.rag.entities.config import LLMEndpointConfig
from quivr_core.llm import LLMEndpoint

View File

@ -6,6 +6,6 @@ At each interaction with `Brain.ask_streaming` both your message and the LLM's r
You can also get some cool info about the brain by printing its details with the `print_info()` method, which shows things like how many chats are stored, the current chat history, and more. This makes it easy to keep track of whats going on in your conversations and manage the context being sent to the LLM!
::: quivr_core.chat
::: quivr_core.rag.entities.chat
options:
heading_level: 2

View File

@ -1,22 +1,19 @@
# Configuration
## Retrieval Configuration
::: quivr_core.config.RetrievalConfig
::: quivr_core.rag.entities.config.RetrievalConfig
## Workflow Configuration
::: quivr_core.config.WorkflowConfig
::: quivr_core.rag.entities.config.WorkflowConfig
## LLM Configuration
::: quivr_core.config.LLMEndpointConfig
::: quivr_core.rag.entities.config.LLMEndpointConfig
## Reranker Configuration
::: quivr_core.config.RerankerConfig
::: quivr_core.rag.entities.config.RerankerConfig
## Supported LLM Model Suppliers
::: quivr_core.config.DefaultModelSuppliers
::: quivr_core.rag.entities.config.DefaultModelSuppliers
## Supported Rerankers
::: quivr_core.config.DefaultRerankers
::: quivr_core.rag.entities.config.DefaultRerankers

View File

@ -6,7 +6,7 @@ authors = [
{ name = "Stan Girard", email = "stan@quivr.app" }
]
dependencies = [
"quivr-core>=0.0.18",
"quivr-core @ file:///${PROJECT_ROOT}/../core",
"mkdocs>=1.6.1",
"mkdocstrings[python]>=0.26.0",
"mkdocs-jupyter>=0.24.8",
@ -42,5 +42,3 @@ reportMissingImports = true
[tool.hatch.metadata]
allow-direct-references = true

View File

@ -20,11 +20,14 @@ aiosignal==1.3.1
# via aiohttp
annotated-types==0.7.0
# via pydantic
anthropic==0.36.1
anthropic==0.40.0
# via langchain-anthropic
anyio==4.6.2.post1
# via anthropic
# via httpx
# via openai
appnope==0.1.4
# via ipykernel
asttokens==2.4.1
# via stack-data
attrs==24.2.0
@ -46,6 +49,8 @@ charset-normalizer==3.3.2
click==8.1.7
# via mkdocs
# via mkdocstrings
cohere==5.13.2
# via langchain-cohere
colorama==0.4.6
# via griffe
# via mkdocs-material
@ -62,8 +67,13 @@ defusedxml==0.7.1
# via nbconvert
distro==1.9.0
# via anthropic
# via openai
executing==2.1.0
# via stack-data
faiss-cpu==1.9.0.post1
# via quivr-core
fastavro==1.9.7
# via cohere
fastjsonschema==2.20.0
# via nbformat
filelock==3.16.1
@ -76,8 +86,6 @@ fsspec==2024.9.0
# via huggingface-hub
ghp-import==2.1.0
# via mkdocs
greenlet==3.1.1
# via sqlalchemy
griffe==1.2.0
# via mkdocstrings-python
h11==0.14.0
@ -86,10 +94,17 @@ httpcore==1.0.6
# via httpx
httpx==0.27.2
# via anthropic
# via cohere
# via langchain-mistralai
# via langgraph-sdk
# via langsmith
# via megaparse-sdk
# via openai
# via quivr-core
httpx-sse==0.4.0
# via cohere
# via langchain-community
# via langchain-mistralai
# via langgraph-sdk
huggingface-hub==0.25.2
# via tokenizers
@ -112,6 +127,7 @@ jinja2==3.1.4
# via nbconvert
jiter==0.6.1
# via anthropic
# via openai
jsonpatch==1.33
# via langchain-core
jsonpointer==3.0.0
@ -133,22 +149,35 @@ jupyterlab-pygments==0.3.0
# via nbconvert
jupytext==1.16.4
# via mkdocs-jupyter
langchain==0.2.16
langchain==0.3.9
# via langchain-community
# via quivr-core
langchain-anthropic==0.1.23
langchain-anthropic==0.3.0
# via quivr-core
langchain-community==0.2.17
langchain-cohere==0.3.3
# via quivr-core
langchain-core==0.2.41
langchain-community==0.3.9
# via langchain-experimental
# via quivr-core
langchain-core==0.3.21
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-community
# via langchain-experimental
# via langchain-mistralai
# via langchain-openai
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via quivr-core
langchain-text-splitters==0.2.4
langchain-experimental==0.3.3
# via langchain-cohere
langchain-mistralai==0.2.3
# via quivr-core
langchain-openai==0.2.11
# via quivr-core
langchain-text-splitters==0.3.2
# via langchain
langgraph==0.2.38
# via quivr-core
@ -160,6 +189,8 @@ langsmith==0.1.135
# via langchain
# via langchain-community
# via langchain-core
loguru==0.7.2
# via megaparse-sdk
markdown==3.7
# via mkdocs
# via mkdocs-autorefs
@ -176,6 +207,7 @@ markupsafe==2.1.5
# via mkdocs-autorefs
# via mkdocstrings
# via nbconvert
# via quivr-core
marshmallow==3.22.0
# via dataclasses-json
matplotlib-inline==0.1.7
@ -185,6 +217,8 @@ mdit-py-plugins==0.4.1
# via jupytext
mdurl==0.1.2
# via markdown-it-py
megaparse-sdk==0.1.10
# via quivr-core
mergedeep==1.3.4
# via mkdocs
# via mkdocs-get-deps
@ -220,6 +254,8 @@ multidict==6.1.0
# via yarl
mypy-extensions==1.0.0
# via typing-inspect
nats-py==2.9.0
# via megaparse-sdk
nbclient==0.10.0
# via nbconvert
nbconvert==7.16.4
@ -231,13 +267,18 @@ nbformat==5.10.4
nest-asyncio==1.6.0
# via ipykernel
numpy==1.26.4
# via faiss-cpu
# via langchain
# via langchain-community
# via pandas
# via transformers
openai==1.56.2
# via langchain-openai
orjson==3.10.7
# via langgraph-sdk
# via langsmith
packaging==24.1
# via faiss-cpu
# via huggingface-hub
# via ipykernel
# via jupytext
@ -248,8 +289,12 @@ packaging==24.1
# via transformers
paginate==0.5.7
# via mkdocs-material
pandas==2.2.3
# via langchain-cohere
pandocfilters==1.5.1
# via nbconvert
parameterized==0.9.0
# via cohere
parso==0.8.4
# via jedi
pathspec==0.12.1
@ -266,20 +311,32 @@ propcache==0.2.0
# via yarl
protobuf==5.28.2
# via transformers
psutil==6.0.0
psutil==6.1.0
# via ipykernel
# via megaparse-sdk
ptyprocess==0.7.0
# via pexpect
pure-eval==0.2.3
# via stack-data
pycryptodome==3.21.0
# via megaparse-sdk
pydantic==2.9.2
# via anthropic
# via cohere
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-core
# via langchain-mistralai
# via langsmith
# via openai
# via pydantic-settings
# via quivr-core
pydantic-core==2.23.4
# via cohere
# via pydantic
pydantic-settings==2.6.1
# via langchain-community
pygments==2.18.0
# via ipython
# via mkdocs-jupyter
@ -292,6 +349,12 @@ pymdown-extensions==10.9
python-dateutil==2.9.0.post0
# via ghp-import
# via jupyter-client
# via pandas
python-dotenv==1.0.1
# via megaparse-sdk
# via pydantic-settings
pytz==2024.2
# via pandas
pyyaml==6.0.2
# via huggingface-hub
# via jupytext
@ -308,7 +371,9 @@ pyyaml-env-tag==0.1
pyzmq==26.2.0
# via ipykernel
# via jupyter-client
quivr-core==0.0.18
quivr-core @ file:///${PROJECT_ROOT}/../core
rapidfuzz==3.10.1
# via quivr-core
referencing==0.35.1
# via jsonschema
# via jsonschema-specifications
@ -317,6 +382,7 @@ regex==2024.7.24
# via tiktoken
# via transformers
requests==2.32.3
# via cohere
# via huggingface-hub
# via langchain
# via langchain-community
@ -344,6 +410,7 @@ sniffio==1.3.1
# via anthropic
# via anyio
# via httpx
# via openai
soupsieve==2.6
# via beautifulsoup4
sqlalchemy==2.0.36
@ -351,22 +418,27 @@ sqlalchemy==2.0.36
# via langchain-community
stack-data==0.6.3
# via ipython
tabulate==0.9.0
# via langchain-cohere
tenacity==8.5.0
# via langchain
# via langchain-community
# via langchain-core
tiktoken==0.8.0
# via langchain-openai
# via quivr-core
tinycss2==1.3.0
# via nbconvert
tokenizers==0.20.1
# via anthropic
# via cohere
# via langchain-mistralai
# via transformers
tornado==6.4.1
# via ipykernel
# via jupyter-client
tqdm==4.66.5
# via huggingface-hub
# via openai
# via transformers
traitlets==5.14.3
# via comm
@ -382,19 +454,26 @@ transformers==4.45.2
# via quivr-core
types-pyyaml==6.0.12.20240917
# via quivr-core
types-requests==2.32.0.20241016
# via cohere
typing-extensions==4.12.2
# via anthropic
# via cohere
# via huggingface-hub
# via ipython
# via langchain-core
# via openai
# via pydantic
# via pydantic-core
# via sqlalchemy
# via typing-inspect
typing-inspect==0.9.0
# via dataclasses-json
tzdata==2024.2
# via pandas
urllib3==2.2.2
# via requests
# via types-requests
watchdog==5.0.0
# via mkdocs
wcwidth==0.2.13

View File

@ -20,11 +20,14 @@ aiosignal==1.3.1
# via aiohttp
annotated-types==0.7.0
# via pydantic
anthropic==0.36.1
anthropic==0.40.0
# via langchain-anthropic
anyio==4.6.2.post1
# via anthropic
# via httpx
# via openai
appnope==0.1.4
# via ipykernel
asttokens==2.4.1
# via stack-data
attrs==24.2.0
@ -46,6 +49,8 @@ charset-normalizer==3.3.2
click==8.1.7
# via mkdocs
# via mkdocstrings
cohere==5.13.2
# via langchain-cohere
colorama==0.4.6
# via griffe
# via mkdocs-material
@ -62,8 +67,13 @@ defusedxml==0.7.1
# via nbconvert
distro==1.9.0
# via anthropic
# via openai
executing==2.1.0
# via stack-data
faiss-cpu==1.9.0.post1
# via quivr-core
fastavro==1.9.7
# via cohere
fastjsonschema==2.20.0
# via nbformat
filelock==3.16.1
@ -76,8 +86,6 @@ fsspec==2024.9.0
# via huggingface-hub
ghp-import==2.1.0
# via mkdocs
greenlet==3.1.1
# via sqlalchemy
griffe==1.2.0
# via mkdocstrings-python
h11==0.14.0
@ -86,10 +94,17 @@ httpcore==1.0.6
# via httpx
httpx==0.27.2
# via anthropic
# via cohere
# via langchain-mistralai
# via langgraph-sdk
# via langsmith
# via megaparse-sdk
# via openai
# via quivr-core
httpx-sse==0.4.0
# via cohere
# via langchain-community
# via langchain-mistralai
# via langgraph-sdk
huggingface-hub==0.25.2
# via tokenizers
@ -112,6 +127,7 @@ jinja2==3.1.4
# via nbconvert
jiter==0.6.1
# via anthropic
# via openai
jsonpatch==1.33
# via langchain-core
jsonpointer==3.0.0
@ -133,22 +149,35 @@ jupyterlab-pygments==0.3.0
# via nbconvert
jupytext==1.16.4
# via mkdocs-jupyter
langchain==0.2.16
langchain==0.3.9
# via langchain-community
# via quivr-core
langchain-anthropic==0.1.23
langchain-anthropic==0.3.0
# via quivr-core
langchain-community==0.2.17
langchain-cohere==0.3.3
# via quivr-core
langchain-core==0.2.41
langchain-community==0.3.9
# via langchain-experimental
# via quivr-core
langchain-core==0.3.21
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-community
# via langchain-experimental
# via langchain-mistralai
# via langchain-openai
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via quivr-core
langchain-text-splitters==0.2.4
langchain-experimental==0.3.3
# via langchain-cohere
langchain-mistralai==0.2.3
# via quivr-core
langchain-openai==0.2.11
# via quivr-core
langchain-text-splitters==0.3.2
# via langchain
langgraph==0.2.38
# via quivr-core
@ -160,6 +189,8 @@ langsmith==0.1.135
# via langchain
# via langchain-community
# via langchain-core
loguru==0.7.2
# via megaparse-sdk
markdown==3.7
# via mkdocs
# via mkdocs-autorefs
@ -176,6 +207,7 @@ markupsafe==2.1.5
# via mkdocs-autorefs
# via mkdocstrings
# via nbconvert
# via quivr-core
marshmallow==3.22.0
# via dataclasses-json
matplotlib-inline==0.1.7
@ -185,6 +217,8 @@ mdit-py-plugins==0.4.1
# via jupytext
mdurl==0.1.2
# via markdown-it-py
megaparse-sdk==0.1.10
# via quivr-core
mergedeep==1.3.4
# via mkdocs
# via mkdocs-get-deps
@ -220,6 +254,8 @@ multidict==6.1.0
# via yarl
mypy-extensions==1.0.0
# via typing-inspect
nats-py==2.9.0
# via megaparse-sdk
nbclient==0.10.0
# via nbconvert
nbconvert==7.16.4
@ -231,13 +267,18 @@ nbformat==5.10.4
nest-asyncio==1.6.0
# via ipykernel
numpy==1.26.4
# via faiss-cpu
# via langchain
# via langchain-community
# via pandas
# via transformers
openai==1.56.2
# via langchain-openai
orjson==3.10.7
# via langgraph-sdk
# via langsmith
packaging==24.1
# via faiss-cpu
# via huggingface-hub
# via ipykernel
# via jupytext
@ -248,8 +289,12 @@ packaging==24.1
# via transformers
paginate==0.5.7
# via mkdocs-material
pandas==2.2.3
# via langchain-cohere
pandocfilters==1.5.1
# via nbconvert
parameterized==0.9.0
# via cohere
parso==0.8.4
# via jedi
pathspec==0.12.1
@ -266,20 +311,32 @@ propcache==0.2.0
# via yarl
protobuf==5.28.2
# via transformers
psutil==6.0.0
psutil==6.1.0
# via ipykernel
# via megaparse-sdk
ptyprocess==0.7.0
# via pexpect
pure-eval==0.2.3
# via stack-data
pycryptodome==3.21.0
# via megaparse-sdk
pydantic==2.9.2
# via anthropic
# via cohere
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-core
# via langchain-mistralai
# via langsmith
# via openai
# via pydantic-settings
# via quivr-core
pydantic-core==2.23.4
# via cohere
# via pydantic
pydantic-settings==2.6.1
# via langchain-community
pygments==2.18.0
# via ipython
# via mkdocs-jupyter
@ -292,6 +349,12 @@ pymdown-extensions==10.9
python-dateutil==2.9.0.post0
# via ghp-import
# via jupyter-client
# via pandas
python-dotenv==1.0.1
# via megaparse-sdk
# via pydantic-settings
pytz==2024.2
# via pandas
pyyaml==6.0.2
# via huggingface-hub
# via jupytext
@ -308,7 +371,9 @@ pyyaml-env-tag==0.1
pyzmq==26.2.0
# via ipykernel
# via jupyter-client
quivr-core==0.0.18
quivr-core @ file:///${PROJECT_ROOT}/../core
rapidfuzz==3.10.1
# via quivr-core
referencing==0.35.1
# via jsonschema
# via jsonschema-specifications
@ -317,6 +382,7 @@ regex==2024.7.24
# via tiktoken
# via transformers
requests==2.32.3
# via cohere
# via huggingface-hub
# via langchain
# via langchain-community
@ -344,6 +410,7 @@ sniffio==1.3.1
# via anthropic
# via anyio
# via httpx
# via openai
soupsieve==2.6
# via beautifulsoup4
sqlalchemy==2.0.36
@ -351,22 +418,27 @@ sqlalchemy==2.0.36
# via langchain-community
stack-data==0.6.3
# via ipython
tabulate==0.9.0
# via langchain-cohere
tenacity==8.5.0
# via langchain
# via langchain-community
# via langchain-core
tiktoken==0.8.0
# via langchain-openai
# via quivr-core
tinycss2==1.3.0
# via nbconvert
tokenizers==0.20.1
# via anthropic
# via cohere
# via langchain-mistralai
# via transformers
tornado==6.4.1
# via ipykernel
# via jupyter-client
tqdm==4.66.5
# via huggingface-hub
# via openai
# via transformers
traitlets==5.14.3
# via comm
@ -382,19 +454,26 @@ transformers==4.45.2
# via quivr-core
types-pyyaml==6.0.12.20240917
# via quivr-core
types-requests==2.32.0.20241016
# via cohere
typing-extensions==4.12.2
# via anthropic
# via cohere
# via huggingface-hub
# via ipython
# via langchain-core
# via openai
# via pydantic
# via pydantic-core
# via sqlalchemy
# via typing-inspect
typing-inspect==0.9.0
# via dataclasses-json
tzdata==2024.2
# via pandas
urllib3==2.2.2
# via requests
# via types-requests
watchdog==5.0.0
# via mkdocs
wcwidth==0.2.13

View File

@ -17,8 +17,6 @@ aiohappyeyeballs==2.4.3
aiohttp==3.10.10
# via langchain
# via langchain-community
# via llama-index-core
# via llama-index-legacy
aiosignal==1.3.1
# via aiohttp
alembic==1.13.3
@ -27,10 +25,8 @@ aniso8601==9.0.1
# via graphene
annotated-types==0.7.0
# via pydantic
anthropic==0.36.1
anthropic==0.40.0
# via langchain-anthropic
antlr4-python3-runtime==4.9.3
# via omegaconf
anyio==4.6.2.post1
# via anthropic
# via asyncer
@ -45,12 +41,6 @@ attrs==23.2.0
# via jsonschema
# via referencing
# via sagemaker
backoff==2.2.1
# via megaparse
# via unstructured
beautifulsoup4==4.12.3
# via llama-index-readers-file
# via unstructured
bidict==0.23.1
# via python-socketio
blinker==1.8.2
@ -70,36 +60,23 @@ certifi==2024.8.30
# via httpcore
# via httpx
# via requests
cffi==1.17.1
# via cryptography
chainlit==1.3.2
chardet==5.2.0
# via unstructured
charset-normalizer==3.4.0
# via pdfminer-six
# via requests
chevron==0.14.0
# via literalai
click==8.1.7
# via chainlit
# via flask
# via llama-parse
# via mlflow-skinny
# via nltk
# via python-oxmsg
# via uvicorn
cloudpickle==2.2.1
# via mlflow-skinny
# via sagemaker
cohere==5.11.0
# via langchain-cohere
coloredlogs==15.0.1
# via onnxruntime
contourpy==1.3.0
# via matplotlib
cryptography==43.0.3
# via pdfminer-six
# via unstructured-client
cycler==0.12.1
# via matplotlib
databricks-sdk==0.34.0
@ -107,59 +84,35 @@ databricks-sdk==0.34.0
dataclasses-json==0.6.7
# via chainlit
# via langchain-community
# via llama-index-core
# via llama-index-legacy
# via unstructured
defusedxml==0.7.1
# via langchain-anthropic
deprecated==1.2.14
# via llama-index-core
# via llama-index-legacy
# via opentelemetry-api
# via opentelemetry-exporter-otlp-proto-grpc
# via opentelemetry-exporter-otlp-proto-http
# via opentelemetry-semantic-conventions
# via pikepdf
dill==0.3.9
# via multiprocess
# via pathos
dirtyjson==1.0.8
# via llama-index-core
# via llama-index-legacy
distro==1.9.0
# via anthropic
# via openai
docker==7.1.0
# via mlflow
# via sagemaker
effdet==0.4.1
# via unstructured
emoji==2.14.0
# via unstructured
et-xmlfile==2.0.0
# via openpyxl
eval-type-backport==0.2.0
# via unstructured-client
faiss-cpu==1.9.0
# via quivr-core
fastapi==0.115.5
# via chainlit
# via megaparse
fastavro==1.9.7
# via cohere
filelock==3.16.1
# via huggingface-hub
# via torch
# via transformers
# via triton
filetype==1.2.0
# via chainlit
# via llama-index-core
# via unstructured
flask==3.0.3
# via mlflow
flatbuffers==24.3.25
# via onnxruntime
fonttools==4.54.1
# via matplotlib
frozenlist==1.4.1
@ -167,26 +120,15 @@ frozenlist==1.4.1
# via aiosignal
fsspec==2024.9.0
# via huggingface-hub
# via llama-index-core
# via llama-index-legacy
# via torch
gitdb==4.0.11
# via gitpython
gitpython==3.1.43
# via mlflow-skinny
google-api-core==2.23.0
# via google-cloud-vision
google-auth==2.35.0
# via databricks-sdk
# via google-api-core
# via google-cloud-vision
google-cloud-vision==3.8.1
# via unstructured
google-pasta==0.2.0
# via sagemaker
googleapis-common-protos==1.65.0
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
# via opentelemetry-exporter-otlp-proto-http
graphene==3.3
@ -196,15 +138,8 @@ graphql-core==3.2.5
# via graphql-relay
graphql-relay==3.2.0
# via graphene
greenlet==3.1.1
# via playwright
# via sqlalchemy
grpcio==1.67.0
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
grpcio-status==1.62.3
# via google-api-core
gunicorn==23.0.0
# via mlflow
h11==0.14.0
@ -217,25 +152,20 @@ httpx==0.27.2
# via anthropic
# via chainlit
# via cohere
# via langchain-mistralai
# via langgraph-sdk
# via langsmith
# via literalai
# via llama-cloud
# via llama-index-core
# via llama-index-legacy
# via megaparse-sdk
# via openai
# via quivr-core
# via unstructured-client
httpx-sse==0.4.0
# via cohere
# via langgraph-sdk
# via langchain-community
# via langchain-mistralai
huggingface-hub==0.25.2
# via timm
# via tokenizers
# via transformers
# via unstructured-inference
humanfriendly==10.0
# via coloredlogs
idna==3.10
# via anyio
# via httpx
@ -246,14 +176,11 @@ importlib-metadata==6.11.0
# via opentelemetry-api
# via sagemaker
# via sagemaker-core
iopath==0.1.10
# via layoutparser
itsdangerous==2.2.0
# via flask
jinja2==3.1.4
# via flask
# via mlflow
# via torch
jiter==0.6.1
# via anthropic
# via openai
@ -261,12 +188,9 @@ jmespath==1.0.1
# via boto3
# via botocore
joblib==1.4.2
# via nltk
# via scikit-learn
jsonpatch==1.33
# via langchain-core
jsonpath-python==1.0.6
# via unstructured-client
jsonpointer==3.0.0
# via jsonpatch
jsonschema==4.23.0
@ -276,116 +200,56 @@ jsonschema-specifications==2024.10.1
# via jsonschema
kiwisolver==1.4.7
# via matplotlib
langchain==0.2.16
langchain==0.3.9
# via langchain-community
# via megaparse
# via quivr-core
langchain-anthropic==0.1.23
# via megaparse
langchain-anthropic==0.3.0
# via quivr-core
langchain-cohere==0.2.4
langchain-cohere==0.3.3
# via quivr-core
langchain-community==0.2.17
langchain-community==0.3.9
# via langchain-experimental
# via megaparse
# via quivr-core
langchain-core==0.2.41
langchain-core==0.3.21
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-community
# via langchain-experimental
# via langchain-mistralai
# via langchain-openai
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via megaparse
# via quivr-core
langchain-experimental==0.0.65
langchain-experimental==0.3.3
# via langchain-cohere
langchain-openai==0.1.25
# via megaparse
langchain-mistralai==0.2.3
# via quivr-core
langchain-text-splitters==0.2.4
langchain-openai==0.2.11
# via quivr-core
langchain-text-splitters==0.3.2
# via langchain
langdetect==1.0.9
# via unstructured
langgraph==0.2.38
langgraph==0.2.56
# via quivr-core
langgraph-checkpoint==2.0.1
langgraph-checkpoint==2.0.8
# via langgraph
langgraph-sdk==0.1.33
langgraph-sdk==0.1.43
# via langgraph
langsmith==0.1.135
# via langchain
# via langchain-community
# via langchain-core
layoutparser==0.3.4
# via unstructured-inference
lazify==0.4.0
# via chainlit
literalai==0.0.623
# via chainlit
llama-cloud==0.1.5
# via llama-index-indices-managed-llama-cloud
llama-index==0.11.23
# via megaparse
llama-index-agent-openai==0.3.4
# via llama-index
# via llama-index-program-openai
llama-index-cli==0.3.1
# via llama-index
llama-index-core==0.11.23
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-embeddings-openai
# via llama-index-indices-managed-llama-cloud
# via llama-index-llms-openai
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
# via llama-index-readers-file
# via llama-index-readers-llama-parse
# via llama-parse
llama-index-embeddings-openai==0.2.5
# via llama-index
# via llama-index-cli
llama-index-indices-managed-llama-cloud==0.6.0
# via llama-index
llama-index-legacy==0.9.48.post4
# via llama-index
llama-index-llms-openai==0.2.16
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
llama-index-multi-modal-llms-openai==0.2.3
# via llama-index
llama-index-program-openai==0.2.0
# via llama-index
# via llama-index-question-gen-openai
llama-index-question-gen-openai==0.2.0
# via llama-index
llama-index-readers-file==0.3.0
# via llama-index
llama-index-readers-llama-parse==0.3.0
# via llama-index
llama-parse==0.5.14
# via llama-index-readers-llama-parse
# via megaparse
lxml==5.3.0
# via pikepdf
# via python-docx
# via python-pptx
# via unstructured
loguru==0.7.2
# via megaparse-sdk
mako==1.3.5
# via alembic
markdown==3.7
# via mlflow
# via unstructured
markdown-it-py==3.0.0
# via rich
markupsafe==3.0.1
@ -397,11 +261,9 @@ marshmallow==3.22.0
# via dataclasses-json
matplotlib==3.9.2
# via mlflow
# via pycocotools
# via unstructured-inference
mdurl==0.1.2
# via markdown-it-py
megaparse==0.0.43
megaparse-sdk==0.1.10
# via quivr-core
mlflow==2.17.0
# via sagemaker-mlflow
@ -409,8 +271,6 @@ mlflow-skinny==2.17.0
# via mlflow
mock==4.0.3
# via sagemaker-core
mpmath==1.3.0
# via sympy
msgpack==1.1.0
# via langgraph-checkpoint
multidict==6.1.0
@ -420,94 +280,26 @@ multiprocess==0.70.17
# via pathos
mypy-extensions==1.0.0
# via typing-inspect
nats-py==2.9.0
# via megaparse-sdk
nest-asyncio==1.6.0
# via chainlit
# via llama-index-core
# via llama-index-legacy
# via unstructured-client
networkx==3.4.2
# via llama-index-core
# via llama-index-legacy
# via torch
# via unstructured
nltk==3.9.1
# via llama-index
# via llama-index-core
# via llama-index-legacy
# via unstructured
numpy==1.26.4
# via chainlit
# via contourpy
# via faiss-cpu
# via langchain
# via langchain-community
# via layoutparser
# via llama-index-core
# via llama-index-legacy
# via matplotlib
# via megaparse
# via mlflow
# via onnx
# via onnxruntime
# via opencv-python
# via pandas
# via pyarrow
# via pycocotools
# via sagemaker
# via scikit-learn
# via scipy
# via torchvision
# via transformers
# via unstructured
nvidia-cublas-cu12==12.4.5.8
# via nvidia-cudnn-cu12
# via nvidia-cusolver-cu12
# via torch
nvidia-cuda-cupti-cu12==12.4.127
# via torch
nvidia-cuda-nvrtc-cu12==12.4.127
# via torch
nvidia-cuda-runtime-cu12==12.4.127
# via torch
nvidia-cudnn-cu12==9.1.0.70
# via torch
nvidia-cufft-cu12==11.2.1.3
# via torch
nvidia-curand-cu12==10.3.5.147
# via torch
nvidia-cusolver-cu12==11.6.1.9
# via torch
nvidia-cusparse-cu12==12.3.1.170
# via nvidia-cusolver-cu12
# via torch
nvidia-nccl-cu12==2.21.5
# via torch
nvidia-nvjitlink-cu12==12.4.127
# via nvidia-cusolver-cu12
# via nvidia-cusparse-cu12
# via torch
nvidia-nvtx-cu12==12.4.127
# via torch
olefile==0.47
# via python-oxmsg
omegaconf==2.3.0
# via effdet
onnx==1.17.0
# via unstructured
# via unstructured-inference
onnxruntime==1.20.0
# via unstructured-inference
openai==1.51.2
openai==1.56.2
# via langchain-openai
# via llama-index-agent-openai
# via llama-index-embeddings-openai
# via llama-index-legacy
# via llama-index-llms-openai
opencv-python==4.10.0.84
# via layoutparser
# via unstructured-inference
openpyxl==3.1.5
# via unstructured
opentelemetry-api==1.27.0
# via mlflow-skinny
# via opentelemetry-exporter-otlp-proto-grpc
@ -551,81 +343,36 @@ packaging==23.2
# via marshmallow
# via matplotlib
# via mlflow-skinny
# via onnxruntime
# via pikepdf
# via pytesseract
# via sagemaker
# via transformers
# via unstructured-pytesseract
pandas==2.2.3
# via langchain-cohere
# via layoutparser
# via llama-index-legacy
# via llama-index-readers-file
# via mlflow
# via sagemaker
# via unstructured
parameterized==0.9.0
# via cohere
pathos==0.3.3
# via sagemaker
pdf2image==1.17.0
# via layoutparser
# via unstructured
pdfminer-six==20231228
# via pdfplumber
# via unstructured
pdfplumber==0.11.4
# via layoutparser
# via megaparse
pikepdf==9.4.2
# via unstructured
pillow==11.0.0
# via layoutparser
# via llama-index-core
# via matplotlib
# via pdf2image
# via pdfplumber
# via pikepdf
# via pillow-heif
# via pytesseract
# via python-pptx
# via torchvision
# via unstructured-pytesseract
pillow-heif==0.20.0
# via unstructured
platformdirs==4.3.6
# via sagemaker
# via sagemaker-core
playwright==1.48.0
# via megaparse
portalocker==3.0.0
# via iopath
pox==0.3.5
# via pathos
ppft==1.7.6.9
# via pathos
propcache==0.2.0
# via yarl
proto-plus==1.25.0
# via google-api-core
# via google-cloud-vision
protobuf==4.25.5
# via google-api-core
# via google-cloud-vision
# via googleapis-common-protos
# via grpcio-status
# via mlflow-skinny
# via onnx
# via onnxruntime
# via opentelemetry-proto
# via proto-plus
# via sagemaker
# via transformers
psutil==6.1.0
# via megaparse
# via megaparse-sdk
# via sagemaker
# via unstructured
pyarrow==17.0.0
# via mlflow
pyasn1==0.6.1
@ -633,77 +380,47 @@ pyasn1==0.6.1
# via rsa
pyasn1-modules==0.4.1
# via google-auth
pycocotools==2.0.8
# via effdet
pycparser==2.22
# via cffi
pycryptodome==3.21.0
# via megaparse
# via megaparse-sdk
pydantic==2.9.2
# via anthropic
# via chainlit
# via cohere
# via fastapi
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-core
# via langchain-mistralai
# via langsmith
# via literalai
# via llama-cloud
# via llama-index-core
# via openai
# via pydantic-settings
# via quivr-core
# via sagemaker-core
# via unstructured-client
pydantic-core==2.23.4
# via cohere
# via pydantic
pydantic-settings==2.6.1
# via megaparse
pyee==12.0.0
# via playwright
# via langchain-community
pygments==2.18.0
# via rich
pyjwt==2.9.0
# via chainlit
pypandoc==1.14
# via unstructured
pyparsing==3.2.0
# via matplotlib
pypdf==5.1.0
# via llama-index-readers-file
# via megaparse
# via unstructured
# via unstructured-client
pypdfium2==4.30.0
# via pdfplumber
pytesseract==0.3.13
# via unstructured
python-dateutil==2.8.2
# via botocore
# via matplotlib
# via pandas
# via unstructured-client
python-docx==1.1.2
# via unstructured
python-dotenv==1.0.1
# via chainlit
# via megaparse
# via megaparse-sdk
# via pydantic-settings
python-engineio==4.10.1
# via python-socketio
python-iso639==2024.10.22
# via unstructured
python-magic==0.4.27
# via megaparse
# via unstructured
python-multipart==0.0.9
# via chainlit
# via unstructured-inference
python-oxmsg==0.0.1
# via unstructured
python-pptx==0.6.23
# via unstructured
python-socketio==5.11.4
# via chainlit
pytz==2024.2
@ -713,50 +430,35 @@ pyyaml==6.0.2
# via langchain
# via langchain-community
# via langchain-core
# via layoutparser
# via llama-index-core
# via mlflow-skinny
# via omegaconf
# via sagemaker
# via sagemaker-core
# via timm
# via transformers
quivr-core @ file:///${PROJECT_ROOT}/../../core
rapidfuzz==3.10.1
# via quivr-core
# via unstructured
# via unstructured-inference
ratelimit==2.2.1
# via megaparse
referencing==0.35.1
# via jsonschema
# via jsonschema-specifications
regex==2024.9.11
# via nltk
# via tiktoken
# via transformers
requests==2.32.3
# via cohere
# via databricks-sdk
# via docker
# via google-api-core
# via huggingface-hub
# via langchain
# via langchain-community
# via langsmith
# via llama-index-core
# via llama-index-legacy
# via megaparse
# via mlflow-skinny
# via opentelemetry-exporter-otlp-proto-http
# via requests-toolbelt
# via sagemaker
# via tiktoken
# via transformers
# via unstructured
requests-toolbelt==1.0.0
# via langsmith
# via unstructured-client
rich==13.9.2
# via quivr-core
# via sagemaker-core
@ -768,7 +470,6 @@ rsa==4.9
s3transfer==0.10.3
# via boto3
safetensors==0.4.5
# via timm
# via transformers
sagemaker==2.232.2
# via cohere
@ -781,7 +482,6 @@ schema==0.7.7
scikit-learn==1.5.2
# via mlflow
scipy==1.14.1
# via layoutparser
# via mlflow
# via scikit-learn
sentencepiece==0.2.0
@ -792,7 +492,6 @@ simple-websocket==1.1.0
# via python-engineio
six==1.16.0
# via google-pasta
# via langdetect
# via python-dateutil
smdebug-rulesconfig==1.0.1
# via sagemaker
@ -803,76 +502,44 @@ sniffio==1.3.1
# via anyio
# via httpx
# via openai
soupsieve==2.6
# via beautifulsoup4
sqlalchemy==2.0.36
# via alembic
# via langchain
# via langchain-community
# via llama-index-core
# via llama-index-legacy
# via mlflow
sqlparse==0.5.1
# via mlflow-skinny
starlette==0.41.2
# via chainlit
# via fastapi
striprtf==0.0.26
# via llama-index-readers-file
sympy==1.13.1
# via onnxruntime
# via torch
syncer==2.0.3
# via chainlit
tabulate==0.9.0
# via langchain-cohere
# via unstructured
tblib==3.0.0
# via sagemaker
tenacity==8.5.0
# via langchain
# via langchain-community
# via langchain-core
# via llama-index-core
# via llama-index-legacy
threadpoolctl==3.5.0
# via scikit-learn
tiktoken==0.8.0
# via langchain-openai
# via llama-index-core
# via llama-index-legacy
# via quivr-core
timm==1.0.11
# via effdet
# via unstructured-inference
tokenizers==0.20.1
# via anthropic
# via cohere
# via langchain-mistralai
# via transformers
tomli==2.0.2
# via chainlit
torch==2.5.1
# via effdet
# via timm
# via torchvision
# via unstructured-inference
torchvision==0.20.1
# via effdet
# via timm
tqdm==4.66.5
# via huggingface-hub
# via iopath
# via llama-index-core
# via nltk
# via openai
# via sagemaker
# via transformers
# via unstructured
transformers==4.45.2
# via quivr-core
# via unstructured-inference
triton==3.1.0
# via torch
types-pyyaml==6.0.12.20240917
# via quivr-core
types-requests==2.32.0.20241016
@ -883,36 +550,17 @@ typing-extensions==4.12.2
# via cohere
# via fastapi
# via huggingface-hub
# via iopath
# via langchain-core
# via llama-index-core
# via llama-index-legacy
# via openai
# via opentelemetry-sdk
# via pydantic
# via pydantic-core
# via pyee
# via python-docx
# via python-oxmsg
# via sqlalchemy
# via torch
# via typing-inspect
# via unstructured
typing-inspect==0.9.0
# via dataclasses-json
# via llama-index-core
# via llama-index-legacy
# via unstructured-client
tzdata==2024.2
# via pandas
unstructured==0.15.0
# via megaparse
unstructured-client==0.27.0
# via unstructured
unstructured-inference==0.7.36
# via unstructured
unstructured-pytesseract==0.3.13
# via unstructured
uptrace==1.27.0
# via chainlit
urllib3==2.2.3
@ -923,24 +571,15 @@ urllib3==2.2.3
# via types-requests
uvicorn==0.25.0
# via chainlit
# via megaparse
uvloop==0.21.0
# via megaparse
watchfiles==0.20.0
# via chainlit
werkzeug==3.0.4
# via flask
wrapt==1.16.0
# via deprecated
# via llama-index-core
# via opentelemetry-instrumentation
# via unstructured
wsproto==1.2.0
# via simple-websocket
xlrd==2.0.1
# via unstructured
xlsxwriter==3.2.0
# via python-pptx
yarl==1.15.4
# via aiohttp
zipp==3.20.2

View File

@ -17,8 +17,6 @@ aiohappyeyeballs==2.4.3
aiohttp==3.10.10
# via langchain
# via langchain-community
# via llama-index-core
# via llama-index-legacy
aiosignal==1.3.1
# via aiohttp
alembic==1.13.3
@ -27,10 +25,8 @@ aniso8601==9.0.1
# via graphene
annotated-types==0.7.0
# via pydantic
anthropic==0.36.1
anthropic==0.40.0
# via langchain-anthropic
antlr4-python3-runtime==4.9.3
# via omegaconf
anyio==4.6.2.post1
# via anthropic
# via asyncer
@ -45,12 +41,6 @@ attrs==23.2.0
# via jsonschema
# via referencing
# via sagemaker
backoff==2.2.1
# via megaparse
# via unstructured
beautifulsoup4==4.12.3
# via llama-index-readers-file
# via unstructured
bidict==0.23.1
# via python-socketio
blinker==1.8.2
@ -70,36 +60,23 @@ certifi==2024.8.30
# via httpcore
# via httpx
# via requests
cffi==1.17.1
# via cryptography
chainlit==1.3.2
chardet==5.2.0
# via unstructured
charset-normalizer==3.4.0
# via pdfminer-six
# via requests
chevron==0.14.0
# via literalai
click==8.1.7
# via chainlit
# via flask
# via llama-parse
# via mlflow-skinny
# via nltk
# via python-oxmsg
# via uvicorn
cloudpickle==2.2.1
# via mlflow-skinny
# via sagemaker
cohere==5.11.0
# via langchain-cohere
coloredlogs==15.0.1
# via onnxruntime
contourpy==1.3.0
# via matplotlib
cryptography==43.0.3
# via pdfminer-six
# via unstructured-client
cycler==0.12.1
# via matplotlib
databricks-sdk==0.34.0
@ -107,59 +84,35 @@ databricks-sdk==0.34.0
dataclasses-json==0.6.7
# via chainlit
# via langchain-community
# via llama-index-core
# via llama-index-legacy
# via unstructured
defusedxml==0.7.1
# via langchain-anthropic
deprecated==1.2.14
# via llama-index-core
# via llama-index-legacy
# via opentelemetry-api
# via opentelemetry-exporter-otlp-proto-grpc
# via opentelemetry-exporter-otlp-proto-http
# via opentelemetry-semantic-conventions
# via pikepdf
dill==0.3.9
# via multiprocess
# via pathos
dirtyjson==1.0.8
# via llama-index-core
# via llama-index-legacy
distro==1.9.0
# via anthropic
# via openai
docker==7.1.0
# via mlflow
# via sagemaker
effdet==0.4.1
# via unstructured
emoji==2.14.0
# via unstructured
et-xmlfile==2.0.0
# via openpyxl
eval-type-backport==0.2.0
# via unstructured-client
faiss-cpu==1.9.0
# via quivr-core
fastapi==0.115.5
# via chainlit
# via megaparse
fastavro==1.9.7
# via cohere
filelock==3.16.1
# via huggingface-hub
# via torch
# via transformers
# via triton
filetype==1.2.0
# via chainlit
# via llama-index-core
# via unstructured
flask==3.0.3
# via mlflow
flatbuffers==24.3.25
# via onnxruntime
fonttools==4.54.1
# via matplotlib
frozenlist==1.4.1
@ -167,26 +120,15 @@ frozenlist==1.4.1
# via aiosignal
fsspec==2024.9.0
# via huggingface-hub
# via llama-index-core
# via llama-index-legacy
# via torch
gitdb==4.0.11
# via gitpython
gitpython==3.1.43
# via mlflow-skinny
google-api-core==2.23.0
# via google-cloud-vision
google-auth==2.35.0
# via databricks-sdk
# via google-api-core
# via google-cloud-vision
google-cloud-vision==3.8.1
# via unstructured
google-pasta==0.2.0
# via sagemaker
googleapis-common-protos==1.65.0
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
# via opentelemetry-exporter-otlp-proto-http
graphene==3.3
@ -196,15 +138,8 @@ graphql-core==3.2.5
# via graphql-relay
graphql-relay==3.2.0
# via graphene
greenlet==3.1.1
# via playwright
# via sqlalchemy
grpcio==1.67.0
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
grpcio-status==1.62.3
# via google-api-core
gunicorn==23.0.0
# via mlflow
h11==0.14.0
@ -217,25 +152,20 @@ httpx==0.27.2
# via anthropic
# via chainlit
# via cohere
# via langchain-mistralai
# via langgraph-sdk
# via langsmith
# via literalai
# via llama-cloud
# via llama-index-core
# via llama-index-legacy
# via megaparse-sdk
# via openai
# via quivr-core
# via unstructured-client
httpx-sse==0.4.0
# via cohere
# via langgraph-sdk
# via langchain-community
# via langchain-mistralai
huggingface-hub==0.25.2
# via timm
# via tokenizers
# via transformers
# via unstructured-inference
humanfriendly==10.0
# via coloredlogs
idna==3.10
# via anyio
# via httpx
@ -246,14 +176,11 @@ importlib-metadata==6.11.0
# via opentelemetry-api
# via sagemaker
# via sagemaker-core
iopath==0.1.10
# via layoutparser
itsdangerous==2.2.0
# via flask
jinja2==3.1.4
# via flask
# via mlflow
# via torch
jiter==0.6.1
# via anthropic
# via openai
@ -261,12 +188,9 @@ jmespath==1.0.1
# via boto3
# via botocore
joblib==1.4.2
# via nltk
# via scikit-learn
jsonpatch==1.33
# via langchain-core
jsonpath-python==1.0.6
# via unstructured-client
jsonpointer==3.0.0
# via jsonpatch
jsonschema==4.23.0
@ -276,116 +200,56 @@ jsonschema-specifications==2024.10.1
# via jsonschema
kiwisolver==1.4.7
# via matplotlib
langchain==0.2.16
langchain==0.3.9
# via langchain-community
# via megaparse
# via quivr-core
langchain-anthropic==0.1.23
# via megaparse
langchain-anthropic==0.3.0
# via quivr-core
langchain-cohere==0.2.4
langchain-cohere==0.3.3
# via quivr-core
langchain-community==0.2.17
langchain-community==0.3.9
# via langchain-experimental
# via megaparse
# via quivr-core
langchain-core==0.2.41
langchain-core==0.3.21
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-community
# via langchain-experimental
# via langchain-mistralai
# via langchain-openai
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via megaparse
# via quivr-core
langchain-experimental==0.0.65
langchain-experimental==0.3.3
# via langchain-cohere
langchain-openai==0.1.25
# via megaparse
langchain-mistralai==0.2.3
# via quivr-core
langchain-text-splitters==0.2.4
langchain-openai==0.2.11
# via quivr-core
langchain-text-splitters==0.3.2
# via langchain
langdetect==1.0.9
# via unstructured
langgraph==0.2.38
langgraph==0.2.56
# via quivr-core
langgraph-checkpoint==2.0.1
langgraph-checkpoint==2.0.8
# via langgraph
langgraph-sdk==0.1.33
langgraph-sdk==0.1.43
# via langgraph
langsmith==0.1.135
# via langchain
# via langchain-community
# via langchain-core
layoutparser==0.3.4
# via unstructured-inference
lazify==0.4.0
# via chainlit
literalai==0.0.623
# via chainlit
llama-cloud==0.1.5
# via llama-index-indices-managed-llama-cloud
llama-index==0.11.23
# via megaparse
llama-index-agent-openai==0.3.4
# via llama-index
# via llama-index-program-openai
llama-index-cli==0.3.1
# via llama-index
llama-index-core==0.11.23
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-embeddings-openai
# via llama-index-indices-managed-llama-cloud
# via llama-index-llms-openai
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
# via llama-index-readers-file
# via llama-index-readers-llama-parse
# via llama-parse
llama-index-embeddings-openai==0.2.5
# via llama-index
# via llama-index-cli
llama-index-indices-managed-llama-cloud==0.6.0
# via llama-index
llama-index-legacy==0.9.48.post4
# via llama-index
llama-index-llms-openai==0.2.16
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
llama-index-multi-modal-llms-openai==0.2.3
# via llama-index
llama-index-program-openai==0.2.0
# via llama-index
# via llama-index-question-gen-openai
llama-index-question-gen-openai==0.2.0
# via llama-index
llama-index-readers-file==0.3.0
# via llama-index
llama-index-readers-llama-parse==0.3.0
# via llama-index
llama-parse==0.5.14
# via llama-index-readers-llama-parse
# via megaparse
lxml==5.3.0
# via pikepdf
# via python-docx
# via python-pptx
# via unstructured
loguru==0.7.2
# via megaparse-sdk
mako==1.3.5
# via alembic
markdown==3.7
# via mlflow
# via unstructured
markdown-it-py==3.0.0
# via rich
markupsafe==3.0.1
@ -397,11 +261,9 @@ marshmallow==3.22.0
# via dataclasses-json
matplotlib==3.9.2
# via mlflow
# via pycocotools
# via unstructured-inference
mdurl==0.1.2
# via markdown-it-py
megaparse==0.0.43
megaparse-sdk==0.1.10
# via quivr-core
mlflow==2.17.0
# via sagemaker-mlflow
@ -409,8 +271,6 @@ mlflow-skinny==2.17.0
# via mlflow
mock==4.0.3
# via sagemaker-core
mpmath==1.3.0
# via sympy
msgpack==1.1.0
# via langgraph-checkpoint
multidict==6.1.0
@ -420,94 +280,26 @@ multiprocess==0.70.17
# via pathos
mypy-extensions==1.0.0
# via typing-inspect
nats-py==2.9.0
# via megaparse-sdk
nest-asyncio==1.6.0
# via chainlit
# via llama-index-core
# via llama-index-legacy
# via unstructured-client
networkx==3.4.2
# via llama-index-core
# via llama-index-legacy
# via torch
# via unstructured
nltk==3.9.1
# via llama-index
# via llama-index-core
# via llama-index-legacy
# via unstructured
numpy==1.26.4
# via chainlit
# via contourpy
# via faiss-cpu
# via langchain
# via langchain-community
# via layoutparser
# via llama-index-core
# via llama-index-legacy
# via matplotlib
# via megaparse
# via mlflow
# via onnx
# via onnxruntime
# via opencv-python
# via pandas
# via pyarrow
# via pycocotools
# via sagemaker
# via scikit-learn
# via scipy
# via torchvision
# via transformers
# via unstructured
nvidia-cublas-cu12==12.4.5.8
# via nvidia-cudnn-cu12
# via nvidia-cusolver-cu12
# via torch
nvidia-cuda-cupti-cu12==12.4.127
# via torch
nvidia-cuda-nvrtc-cu12==12.4.127
# via torch
nvidia-cuda-runtime-cu12==12.4.127
# via torch
nvidia-cudnn-cu12==9.1.0.70
# via torch
nvidia-cufft-cu12==11.2.1.3
# via torch
nvidia-curand-cu12==10.3.5.147
# via torch
nvidia-cusolver-cu12==11.6.1.9
# via torch
nvidia-cusparse-cu12==12.3.1.170
# via nvidia-cusolver-cu12
# via torch
nvidia-nccl-cu12==2.21.5
# via torch
nvidia-nvjitlink-cu12==12.4.127
# via nvidia-cusolver-cu12
# via nvidia-cusparse-cu12
# via torch
nvidia-nvtx-cu12==12.4.127
# via torch
olefile==0.47
# via python-oxmsg
omegaconf==2.3.0
# via effdet
onnx==1.17.0
# via unstructured
# via unstructured-inference
onnxruntime==1.20.0
# via unstructured-inference
openai==1.51.2
openai==1.56.2
# via langchain-openai
# via llama-index-agent-openai
# via llama-index-embeddings-openai
# via llama-index-legacy
# via llama-index-llms-openai
opencv-python==4.10.0.84
# via layoutparser
# via unstructured-inference
openpyxl==3.1.5
# via unstructured
opentelemetry-api==1.27.0
# via mlflow-skinny
# via opentelemetry-exporter-otlp-proto-grpc
@ -551,81 +343,36 @@ packaging==23.2
# via marshmallow
# via matplotlib
# via mlflow-skinny
# via onnxruntime
# via pikepdf
# via pytesseract
# via sagemaker
# via transformers
# via unstructured-pytesseract
pandas==2.2.3
# via langchain-cohere
# via layoutparser
# via llama-index-legacy
# via llama-index-readers-file
# via mlflow
# via sagemaker
# via unstructured
parameterized==0.9.0
# via cohere
pathos==0.3.3
# via sagemaker
pdf2image==1.17.0
# via layoutparser
# via unstructured
pdfminer-six==20231228
# via pdfplumber
# via unstructured
pdfplumber==0.11.4
# via layoutparser
# via megaparse
pikepdf==9.4.2
# via unstructured
pillow==11.0.0
# via layoutparser
# via llama-index-core
# via matplotlib
# via pdf2image
# via pdfplumber
# via pikepdf
# via pillow-heif
# via pytesseract
# via python-pptx
# via torchvision
# via unstructured-pytesseract
pillow-heif==0.20.0
# via unstructured
platformdirs==4.3.6
# via sagemaker
# via sagemaker-core
playwright==1.48.0
# via megaparse
portalocker==3.0.0
# via iopath
pox==0.3.5
# via pathos
ppft==1.7.6.9
# via pathos
propcache==0.2.0
# via yarl
proto-plus==1.25.0
# via google-api-core
# via google-cloud-vision
protobuf==4.25.5
# via google-api-core
# via google-cloud-vision
# via googleapis-common-protos
# via grpcio-status
# via mlflow-skinny
# via onnx
# via onnxruntime
# via opentelemetry-proto
# via proto-plus
# via sagemaker
# via transformers
psutil==6.1.0
# via megaparse
# via megaparse-sdk
# via sagemaker
# via unstructured
pyarrow==17.0.0
# via mlflow
pyasn1==0.6.1
@ -633,77 +380,47 @@ pyasn1==0.6.1
# via rsa
pyasn1-modules==0.4.1
# via google-auth
pycocotools==2.0.8
# via effdet
pycparser==2.22
# via cffi
pycryptodome==3.21.0
# via megaparse
# via megaparse-sdk
pydantic==2.9.2
# via anthropic
# via chainlit
# via cohere
# via fastapi
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-core
# via langchain-mistralai
# via langsmith
# via literalai
# via llama-cloud
# via llama-index-core
# via openai
# via pydantic-settings
# via quivr-core
# via sagemaker-core
# via unstructured-client
pydantic-core==2.23.4
# via cohere
# via pydantic
pydantic-settings==2.6.1
# via megaparse
pyee==12.0.0
# via playwright
# via langchain-community
pygments==2.18.0
# via rich
pyjwt==2.9.0
# via chainlit
pypandoc==1.14
# via unstructured
pyparsing==3.2.0
# via matplotlib
pypdf==5.1.0
# via llama-index-readers-file
# via megaparse
# via unstructured
# via unstructured-client
pypdfium2==4.30.0
# via pdfplumber
pytesseract==0.3.13
# via unstructured
python-dateutil==2.8.2
# via botocore
# via matplotlib
# via pandas
# via unstructured-client
python-docx==1.1.2
# via unstructured
python-dotenv==1.0.1
# via chainlit
# via megaparse
# via megaparse-sdk
# via pydantic-settings
python-engineio==4.10.1
# via python-socketio
python-iso639==2024.10.22
# via unstructured
python-magic==0.4.27
# via megaparse
# via unstructured
python-multipart==0.0.9
# via chainlit
# via unstructured-inference
python-oxmsg==0.0.1
# via unstructured
python-pptx==0.6.23
# via unstructured
python-socketio==5.11.4
# via chainlit
pytz==2024.2
@ -713,50 +430,35 @@ pyyaml==6.0.2
# via langchain
# via langchain-community
# via langchain-core
# via layoutparser
# via llama-index-core
# via mlflow-skinny
# via omegaconf
# via sagemaker
# via sagemaker-core
# via timm
# via transformers
quivr-core @ file:///${PROJECT_ROOT}/../../core
rapidfuzz==3.10.1
# via quivr-core
# via unstructured
# via unstructured-inference
ratelimit==2.2.1
# via megaparse
referencing==0.35.1
# via jsonschema
# via jsonschema-specifications
regex==2024.9.11
# via nltk
# via tiktoken
# via transformers
requests==2.32.3
# via cohere
# via databricks-sdk
# via docker
# via google-api-core
# via huggingface-hub
# via langchain
# via langchain-community
# via langsmith
# via llama-index-core
# via llama-index-legacy
# via megaparse
# via mlflow-skinny
# via opentelemetry-exporter-otlp-proto-http
# via requests-toolbelt
# via sagemaker
# via tiktoken
# via transformers
# via unstructured
requests-toolbelt==1.0.0
# via langsmith
# via unstructured-client
rich==13.9.2
# via quivr-core
# via sagemaker-core
@ -768,7 +470,6 @@ rsa==4.9
s3transfer==0.10.3
# via boto3
safetensors==0.4.5
# via timm
# via transformers
sagemaker==2.232.2
# via cohere
@ -781,7 +482,6 @@ schema==0.7.7
scikit-learn==1.5.2
# via mlflow
scipy==1.14.1
# via layoutparser
# via mlflow
# via scikit-learn
sentencepiece==0.2.0
@ -792,7 +492,6 @@ simple-websocket==1.1.0
# via python-engineio
six==1.16.0
# via google-pasta
# via langdetect
# via python-dateutil
smdebug-rulesconfig==1.0.1
# via sagemaker
@ -803,76 +502,44 @@ sniffio==1.3.1
# via anyio
# via httpx
# via openai
soupsieve==2.6
# via beautifulsoup4
sqlalchemy==2.0.36
# via alembic
# via langchain
# via langchain-community
# via llama-index-core
# via llama-index-legacy
# via mlflow
sqlparse==0.5.1
# via mlflow-skinny
starlette==0.41.2
# via chainlit
# via fastapi
striprtf==0.0.26
# via llama-index-readers-file
sympy==1.13.1
# via onnxruntime
# via torch
syncer==2.0.3
# via chainlit
tabulate==0.9.0
# via langchain-cohere
# via unstructured
tblib==3.0.0
# via sagemaker
tenacity==8.5.0
# via langchain
# via langchain-community
# via langchain-core
# via llama-index-core
# via llama-index-legacy
threadpoolctl==3.5.0
# via scikit-learn
tiktoken==0.8.0
# via langchain-openai
# via llama-index-core
# via llama-index-legacy
# via quivr-core
timm==1.0.11
# via effdet
# via unstructured-inference
tokenizers==0.20.1
# via anthropic
# via cohere
# via langchain-mistralai
# via transformers
tomli==2.0.2
# via chainlit
torch==2.5.1
# via effdet
# via timm
# via torchvision
# via unstructured-inference
torchvision==0.20.1
# via effdet
# via timm
tqdm==4.66.5
# via huggingface-hub
# via iopath
# via llama-index-core
# via nltk
# via openai
# via sagemaker
# via transformers
# via unstructured
transformers==4.45.2
# via quivr-core
# via unstructured-inference
triton==3.1.0
# via torch
types-pyyaml==6.0.12.20240917
# via quivr-core
types-requests==2.32.0.20241016
@ -883,36 +550,17 @@ typing-extensions==4.12.2
# via cohere
# via fastapi
# via huggingface-hub
# via iopath
# via langchain-core
# via llama-index-core
# via llama-index-legacy
# via openai
# via opentelemetry-sdk
# via pydantic
# via pydantic-core
# via pyee
# via python-docx
# via python-oxmsg
# via sqlalchemy
# via torch
# via typing-inspect
# via unstructured
typing-inspect==0.9.0
# via dataclasses-json
# via llama-index-core
# via llama-index-legacy
# via unstructured-client
tzdata==2024.2
# via pandas
unstructured==0.15.0
# via megaparse
unstructured-client==0.27.0
# via unstructured
unstructured-inference==0.7.36
# via unstructured
unstructured-pytesseract==0.3.13
# via unstructured
uptrace==1.27.0
# via chainlit
urllib3==2.2.3
@ -923,24 +571,15 @@ urllib3==2.2.3
# via types-requests
uvicorn==0.25.0
# via chainlit
# via megaparse
uvloop==0.21.0
# via megaparse
watchfiles==0.20.0
# via chainlit
werkzeug==3.0.4
# via flask
wrapt==1.16.0
# via deprecated
# via llama-index-core
# via opentelemetry-instrumentation
# via unstructured
wsproto==1.2.0
# via simple-websocket
xlrd==2.0.1
# via unstructured
xlsxwriter==3.2.0
# via python-pptx
yarl==1.15.4
# via aiohttp
zipp==3.20.2

View File

@ -17,8 +17,6 @@ aiohappyeyeballs==2.4.3
aiohttp==3.10.10
# via langchain
# via langchain-community
# via llama-index-core
# via llama-index-legacy
aiosignal==1.3.1
# via aiohttp
alembic==1.13.3
@ -27,10 +25,8 @@ aniso8601==9.0.1
# via graphene
annotated-types==0.7.0
# via pydantic
anthropic==0.36.1
anthropic==0.40.0
# via langchain-anthropic
antlr4-python3-runtime==4.9.3
# via omegaconf
anyio==4.6.2.post1
# via anthropic
# via asyncer
@ -45,12 +41,6 @@ attrs==23.2.0
# via jsonschema
# via referencing
# via sagemaker
backoff==2.2.1
# via megaparse
# via unstructured
beautifulsoup4==4.12.3
# via llama-index-readers-file
# via unstructured
bidict==0.23.1
# via python-socketio
blinker==1.8.2
@ -70,36 +60,23 @@ certifi==2024.8.30
# via httpcore
# via httpx
# via requests
cffi==1.17.1
# via cryptography
chainlit==1.3.2
chardet==5.2.0
# via unstructured
charset-normalizer==3.4.0
# via pdfminer-six
# via requests
chevron==0.14.0
# via literalai
click==8.1.7
# via chainlit
# via flask
# via llama-parse
# via mlflow-skinny
# via nltk
# via python-oxmsg
# via uvicorn
cloudpickle==2.2.1
# via mlflow-skinny
# via sagemaker
cohere==5.11.0
# via langchain-cohere
coloredlogs==15.0.1
# via onnxruntime
contourpy==1.3.0
# via matplotlib
cryptography==43.0.3
# via pdfminer-six
# via unstructured-client
cycler==0.12.1
# via matplotlib
databricks-sdk==0.34.0
@ -107,59 +84,35 @@ databricks-sdk==0.34.0
dataclasses-json==0.6.7
# via chainlit
# via langchain-community
# via llama-index-core
# via llama-index-legacy
# via unstructured
defusedxml==0.7.1
# via langchain-anthropic
deprecated==1.2.14
# via llama-index-core
# via llama-index-legacy
# via opentelemetry-api
# via opentelemetry-exporter-otlp-proto-grpc
# via opentelemetry-exporter-otlp-proto-http
# via opentelemetry-semantic-conventions
# via pikepdf
dill==0.3.9
# via multiprocess
# via pathos
dirtyjson==1.0.8
# via llama-index-core
# via llama-index-legacy
distro==1.9.0
# via anthropic
# via openai
docker==7.1.0
# via mlflow
# via sagemaker
effdet==0.4.1
# via unstructured
emoji==2.14.0
# via unstructured
et-xmlfile==2.0.0
# via openpyxl
eval-type-backport==0.2.0
# via unstructured-client
faiss-cpu==1.9.0
# via quivr-core
fastapi==0.115.5
# via chainlit
# via megaparse
fastavro==1.9.7
# via cohere
filelock==3.16.1
# via huggingface-hub
# via torch
# via transformers
# via triton
filetype==1.2.0
# via chainlit
# via llama-index-core
# via unstructured
flask==3.0.3
# via mlflow
flatbuffers==24.3.25
# via onnxruntime
fonttools==4.54.1
# via matplotlib
frozenlist==1.4.1
@ -167,26 +120,15 @@ frozenlist==1.4.1
# via aiosignal
fsspec==2024.9.0
# via huggingface-hub
# via llama-index-core
# via llama-index-legacy
# via torch
gitdb==4.0.11
# via gitpython
gitpython==3.1.43
# via mlflow-skinny
google-api-core==2.23.0
# via google-cloud-vision
google-auth==2.35.0
# via databricks-sdk
# via google-api-core
# via google-cloud-vision
google-cloud-vision==3.8.1
# via unstructured
google-pasta==0.2.0
# via sagemaker
googleapis-common-protos==1.65.0
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
# via opentelemetry-exporter-otlp-proto-http
graphene==3.3
@ -196,15 +138,8 @@ graphql-core==3.2.5
# via graphql-relay
graphql-relay==3.2.0
# via graphene
greenlet==3.1.1
# via playwright
# via sqlalchemy
grpcio==1.67.0
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
grpcio-status==1.62.3
# via google-api-core
gunicorn==23.0.0
# via mlflow
h11==0.14.0
@ -217,25 +152,21 @@ httpx==0.27.2
# via anthropic
# via chainlit
# via cohere
# via langchain-mistralai
# via langgraph-sdk
# via langsmith
# via literalai
# via llama-cloud
# via llama-index-core
# via llama-index-legacy
# via megaparse-sdk
# via openai
# via quivr-core
# via unstructured-client
httpx-sse==0.4.0
# via cohere
# via langchain-community
# via langchain-mistralai
# via langgraph-sdk
huggingface-hub==0.25.2
# via timm
# via tokenizers
# via transformers
# via unstructured-inference
humanfriendly==10.0
# via coloredlogs
idna==3.10
# via anyio
# via httpx
@ -246,14 +177,11 @@ importlib-metadata==6.11.0
# via opentelemetry-api
# via sagemaker
# via sagemaker-core
iopath==0.1.10
# via layoutparser
itsdangerous==2.2.0
# via flask
jinja2==3.1.4
# via flask
# via mlflow
# via torch
jiter==0.6.1
# via anthropic
# via openai
@ -261,12 +189,9 @@ jmespath==1.0.1
# via boto3
# via botocore
joblib==1.4.2
# via nltk
# via scikit-learn
jsonpatch==1.33
# via langchain-core
jsonpath-python==1.0.6
# via unstructured-client
jsonpointer==3.0.0
# via jsonpatch
jsonschema==4.23.0
@ -276,40 +201,36 @@ jsonschema-specifications==2024.10.1
# via jsonschema
kiwisolver==1.4.7
# via matplotlib
langchain==0.2.16
langchain==0.3.9
# via langchain-community
# via megaparse
# via quivr-core
langchain-anthropic==0.1.23
# via megaparse
langchain-anthropic==0.3.0
# via quivr-core
langchain-cohere==0.2.4
langchain-cohere==0.3.3
# via quivr-core
langchain-community==0.2.17
langchain-community==0.3.9
# via langchain-experimental
# via megaparse
# via quivr-core
langchain-core==0.2.41
langchain-core==0.3.21
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-community
# via langchain-experimental
# via langchain-mistralai
# via langchain-openai
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via megaparse
# via quivr-core
langchain-experimental==0.0.65
langchain-experimental==0.3.3
# via langchain-cohere
langchain-openai==0.1.25
# via megaparse
langchain-mistralai==0.2.3
# via quivr-core
langchain-text-splitters==0.2.4
langchain-openai==0.2.11
# via quivr-core
langchain-text-splitters==0.3.2
# via langchain
langdetect==1.0.9
# via unstructured
langgraph==0.2.38
# via quivr-core
langgraph-checkpoint==2.0.1
@ -320,72 +241,16 @@ langsmith==0.1.135
# via langchain
# via langchain-community
# via langchain-core
layoutparser==0.3.4
# via unstructured-inference
lazify==0.4.0
# via chainlit
literalai==0.0.623
# via chainlit
llama-cloud==0.1.5
# via llama-index-indices-managed-llama-cloud
llama-index==0.11.23
# via megaparse
llama-index-agent-openai==0.3.4
# via llama-index
# via llama-index-program-openai
llama-index-cli==0.3.1
# via llama-index
llama-index-core==0.11.23
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-embeddings-openai
# via llama-index-indices-managed-llama-cloud
# via llama-index-llms-openai
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
# via llama-index-readers-file
# via llama-index-readers-llama-parse
# via llama-parse
llama-index-embeddings-openai==0.2.5
# via llama-index
# via llama-index-cli
llama-index-indices-managed-llama-cloud==0.6.0
# via llama-index
llama-index-legacy==0.9.48.post4
# via llama-index
llama-index-llms-openai==0.2.16
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
llama-index-multi-modal-llms-openai==0.2.3
# via llama-index
llama-index-program-openai==0.2.0
# via llama-index
# via llama-index-question-gen-openai
llama-index-question-gen-openai==0.2.0
# via llama-index
llama-index-readers-file==0.3.0
# via llama-index
llama-index-readers-llama-parse==0.3.0
# via llama-index
llama-parse==0.5.14
# via llama-index-readers-llama-parse
# via megaparse
lxml==5.3.0
# via pikepdf
# via python-docx
# via python-pptx
# via unstructured
loguru==0.7.2
# via megaparse-sdk
mako==1.3.5
# via alembic
markdown==3.7
# via mlflow
# via unstructured
markdown-it-py==3.0.0
# via rich
markupsafe==3.0.1
@ -397,11 +262,9 @@ marshmallow==3.22.0
# via dataclasses-json
matplotlib==3.9.2
# via mlflow
# via pycocotools
# via unstructured-inference
mdurl==0.1.2
# via markdown-it-py
megaparse==0.0.43
megaparse-sdk==0.1.10
# via quivr-core
mlflow==2.17.0
# via sagemaker-mlflow
@ -409,8 +272,6 @@ mlflow-skinny==2.17.0
# via mlflow
mock==4.0.3
# via sagemaker-core
mpmath==1.3.0
# via sympy
msgpack==1.1.0
# via langgraph-checkpoint
multidict==6.1.0
@ -420,94 +281,26 @@ multiprocess==0.70.17
# via pathos
mypy-extensions==1.0.0
# via typing-inspect
nats-py==2.9.0
# via megaparse-sdk
nest-asyncio==1.6.0
# via chainlit
# via llama-index-core
# via llama-index-legacy
# via unstructured-client
networkx==3.4.2
# via llama-index-core
# via llama-index-legacy
# via torch
# via unstructured
nltk==3.9.1
# via llama-index
# via llama-index-core
# via llama-index-legacy
# via unstructured
numpy==1.26.4
# via chainlit
# via contourpy
# via faiss-cpu
# via langchain
# via langchain-community
# via layoutparser
# via llama-index-core
# via llama-index-legacy
# via matplotlib
# via megaparse
# via mlflow
# via onnx
# via onnxruntime
# via opencv-python
# via pandas
# via pyarrow
# via pycocotools
# via sagemaker
# via scikit-learn
# via scipy
# via torchvision
# via transformers
# via unstructured
nvidia-cublas-cu12==12.4.5.8
# via nvidia-cudnn-cu12
# via nvidia-cusolver-cu12
# via torch
nvidia-cuda-cupti-cu12==12.4.127
# via torch
nvidia-cuda-nvrtc-cu12==12.4.127
# via torch
nvidia-cuda-runtime-cu12==12.4.127
# via torch
nvidia-cudnn-cu12==9.1.0.70
# via torch
nvidia-cufft-cu12==11.2.1.3
# via torch
nvidia-curand-cu12==10.3.5.147
# via torch
nvidia-cusolver-cu12==11.6.1.9
# via torch
nvidia-cusparse-cu12==12.3.1.170
# via nvidia-cusolver-cu12
# via torch
nvidia-nccl-cu12==2.21.5
# via torch
nvidia-nvjitlink-cu12==12.4.127
# via nvidia-cusolver-cu12
# via nvidia-cusparse-cu12
# via torch
nvidia-nvtx-cu12==12.4.127
# via torch
olefile==0.47
# via python-oxmsg
omegaconf==2.3.0
# via effdet
onnx==1.17.0
# via unstructured
# via unstructured-inference
onnxruntime==1.20.0
# via unstructured-inference
openai==1.54.5
# via langchain-openai
# via llama-index-agent-openai
# via llama-index-embeddings-openai
# via llama-index-legacy
# via llama-index-llms-openai
opencv-python==4.10.0.84
# via layoutparser
# via unstructured-inference
openpyxl==3.1.5
# via unstructured
opentelemetry-api==1.27.0
# via mlflow-skinny
# via opentelemetry-exporter-otlp-proto-grpc
@ -551,81 +344,36 @@ packaging==23.2
# via marshmallow
# via matplotlib
# via mlflow-skinny
# via onnxruntime
# via pikepdf
# via pytesseract
# via sagemaker
# via transformers
# via unstructured-pytesseract
pandas==2.2.3
# via langchain-cohere
# via layoutparser
# via llama-index-legacy
# via llama-index-readers-file
# via mlflow
# via sagemaker
# via unstructured
parameterized==0.9.0
# via cohere
pathos==0.3.3
# via sagemaker
pdf2image==1.17.0
# via layoutparser
# via unstructured
pdfminer-six==20231228
# via pdfplumber
# via unstructured
pdfplumber==0.11.4
# via layoutparser
# via megaparse
pikepdf==9.4.2
# via unstructured
pillow==11.0.0
# via layoutparser
# via llama-index-core
# via matplotlib
# via pdf2image
# via pdfplumber
# via pikepdf
# via pillow-heif
# via pytesseract
# via python-pptx
# via torchvision
# via unstructured-pytesseract
pillow-heif==0.20.0
# via unstructured
platformdirs==4.3.6
# via sagemaker
# via sagemaker-core
playwright==1.48.0
# via megaparse
portalocker==3.0.0
# via iopath
pox==0.3.5
# via pathos
ppft==1.7.6.9
# via pathos
propcache==0.2.0
# via yarl
proto-plus==1.25.0
# via google-api-core
# via google-cloud-vision
protobuf==4.25.5
# via google-api-core
# via google-cloud-vision
# via googleapis-common-protos
# via grpcio-status
# via mlflow-skinny
# via onnx
# via onnxruntime
# via opentelemetry-proto
# via proto-plus
# via sagemaker
# via transformers
psutil==6.1.0
# via megaparse
# via megaparse-sdk
# via sagemaker
# via unstructured
pyarrow==17.0.0
# via mlflow
pyasn1==0.6.1
@ -633,77 +381,47 @@ pyasn1==0.6.1
# via rsa
pyasn1-modules==0.4.1
# via google-auth
pycocotools==2.0.8
# via effdet
pycparser==2.22
# via cffi
pycryptodome==3.21.0
# via megaparse
# via megaparse-sdk
pydantic==2.9.2
# via anthropic
# via chainlit
# via cohere
# via fastapi
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-core
# via langchain-mistralai
# via langsmith
# via literalai
# via llama-cloud
# via llama-index-core
# via openai
# via pydantic-settings
# via quivr-core
# via sagemaker-core
# via unstructured-client
pydantic-core==2.23.4
# via cohere
# via pydantic
pydantic-settings==2.6.1
# via megaparse
pyee==12.0.0
# via playwright
# via langchain-community
pygments==2.18.0
# via rich
pyjwt==2.9.0
# via chainlit
pypandoc==1.14
# via unstructured
pyparsing==3.2.0
# via matplotlib
pypdf==5.1.0
# via llama-index-readers-file
# via megaparse
# via unstructured
# via unstructured-client
pypdfium2==4.30.0
# via pdfplumber
pytesseract==0.3.13
# via unstructured
python-dateutil==2.8.2
# via botocore
# via matplotlib
# via pandas
# via unstructured-client
python-docx==1.1.2
# via unstructured
python-dotenv==1.0.1
# via chainlit
# via megaparse
# via megaparse-sdk
# via pydantic-settings
python-engineio==4.10.1
# via python-socketio
python-iso639==2024.10.22
# via unstructured
python-magic==0.4.27
# via megaparse
# via unstructured
python-multipart==0.0.9
# via chainlit
# via unstructured-inference
python-oxmsg==0.0.1
# via unstructured
python-pptx==0.6.23
# via unstructured
python-socketio==5.11.4
# via chainlit
pytz==2024.2
@ -713,50 +431,35 @@ pyyaml==6.0.2
# via langchain
# via langchain-community
# via langchain-core
# via layoutparser
# via llama-index-core
# via mlflow-skinny
# via omegaconf
# via sagemaker
# via sagemaker-core
# via timm
# via transformers
quivr-core @ file:///${PROJECT_ROOT}/../../core
rapidfuzz==3.10.1
# via quivr-core
# via unstructured
# via unstructured-inference
ratelimit==2.2.1
# via megaparse
referencing==0.35.1
# via jsonschema
# via jsonschema-specifications
regex==2024.9.11
# via nltk
# via tiktoken
# via transformers
requests==2.32.3
# via cohere
# via databricks-sdk
# via docker
# via google-api-core
# via huggingface-hub
# via langchain
# via langchain-community
# via langsmith
# via llama-index-core
# via llama-index-legacy
# via megaparse
# via mlflow-skinny
# via opentelemetry-exporter-otlp-proto-http
# via requests-toolbelt
# via sagemaker
# via tiktoken
# via transformers
# via unstructured
requests-toolbelt==1.0.0
# via langsmith
# via unstructured-client
rich==13.9.2
# via quivr-core
# via sagemaker-core
@ -768,7 +471,6 @@ rsa==4.9
s3transfer==0.10.3
# via boto3
safetensors==0.4.5
# via timm
# via transformers
sagemaker==2.232.2
# via cohere
@ -781,7 +483,6 @@ schema==0.7.7
scikit-learn==1.5.2
# via mlflow
scipy==1.14.1
# via layoutparser
# via mlflow
# via scikit-learn
sentencepiece==0.2.0
@ -792,7 +493,6 @@ simple-websocket==1.1.0
# via python-engineio
six==1.16.0
# via google-pasta
# via langdetect
# via python-dateutil
smdebug-rulesconfig==1.0.1
# via sagemaker
@ -803,76 +503,44 @@ sniffio==1.3.1
# via anyio
# via httpx
# via openai
soupsieve==2.6
# via beautifulsoup4
sqlalchemy==2.0.36
# via alembic
# via langchain
# via langchain-community
# via llama-index-core
# via llama-index-legacy
# via mlflow
sqlparse==0.5.1
# via mlflow-skinny
starlette==0.41.2
# via chainlit
# via fastapi
striprtf==0.0.26
# via llama-index-readers-file
sympy==1.13.1
# via onnxruntime
# via torch
syncer==2.0.3
# via chainlit
tabulate==0.9.0
# via langchain-cohere
# via unstructured
tblib==3.0.0
# via sagemaker
tenacity==8.5.0
# via langchain
# via langchain-community
# via langchain-core
# via llama-index-core
# via llama-index-legacy
threadpoolctl==3.5.0
# via scikit-learn
tiktoken==0.8.0
# via langchain-openai
# via llama-index-core
# via llama-index-legacy
# via quivr-core
timm==1.0.11
# via effdet
# via unstructured-inference
tokenizers==0.20.1
# via anthropic
# via cohere
# via langchain-mistralai
# via transformers
tomli==2.0.2
# via chainlit
torch==2.5.1
# via effdet
# via timm
# via torchvision
# via unstructured-inference
torchvision==0.20.1
# via effdet
# via timm
tqdm==4.66.5
# via huggingface-hub
# via iopath
# via llama-index-core
# via nltk
# via openai
# via sagemaker
# via transformers
# via unstructured
transformers==4.45.2
# via quivr-core
# via unstructured-inference
triton==3.1.0
# via torch
types-pyyaml==6.0.12.20240917
# via quivr-core
types-requests==2.32.0.20241016
@ -883,36 +551,17 @@ typing-extensions==4.12.2
# via cohere
# via fastapi
# via huggingface-hub
# via iopath
# via langchain-core
# via llama-index-core
# via llama-index-legacy
# via openai
# via opentelemetry-sdk
# via pydantic
# via pydantic-core
# via pyee
# via python-docx
# via python-oxmsg
# via sqlalchemy
# via torch
# via typing-inspect
# via unstructured
typing-inspect==0.9.0
# via dataclasses-json
# via llama-index-core
# via llama-index-legacy
# via unstructured-client
tzdata==2024.2
# via pandas
unstructured==0.15.0
# via megaparse
unstructured-client==0.27.0
# via unstructured
unstructured-inference==0.7.36
# via unstructured
unstructured-pytesseract==0.3.13
# via unstructured
uptrace==1.27.0
# via chainlit
urllib3==2.2.3
@ -923,24 +572,15 @@ urllib3==2.2.3
# via types-requests
uvicorn==0.25.0
# via chainlit
# via megaparse
uvloop==0.21.0
# via megaparse
watchfiles==0.20.0
# via chainlit
werkzeug==3.0.4
# via flask
wrapt==1.16.0
# via deprecated
# via llama-index-core
# via opentelemetry-instrumentation
# via unstructured
wsproto==1.2.0
# via simple-websocket
xlrd==2.0.1
# via unstructured
xlsxwriter==3.2.0
# via python-pptx
yarl==1.15.4
# via aiohttp
zipp==3.20.2

View File

@ -17,8 +17,6 @@ aiohappyeyeballs==2.4.3
aiohttp==3.10.10
# via langchain
# via langchain-community
# via llama-index-core
# via llama-index-legacy
aiosignal==1.3.1
# via aiohttp
alembic==1.13.3
@ -27,10 +25,8 @@ aniso8601==9.0.1
# via graphene
annotated-types==0.7.0
# via pydantic
anthropic==0.36.1
anthropic==0.40.0
# via langchain-anthropic
antlr4-python3-runtime==4.9.3
# via omegaconf
anyio==4.6.2.post1
# via anthropic
# via asyncer
@ -45,12 +41,6 @@ attrs==23.2.0
# via jsonschema
# via referencing
# via sagemaker
backoff==2.2.1
# via megaparse
# via unstructured
beautifulsoup4==4.12.3
# via llama-index-readers-file
# via unstructured
bidict==0.23.1
# via python-socketio
blinker==1.8.2
@ -70,36 +60,23 @@ certifi==2024.8.30
# via httpcore
# via httpx
# via requests
cffi==1.17.1
# via cryptography
chainlit==1.3.2
chardet==5.2.0
# via unstructured
charset-normalizer==3.4.0
# via pdfminer-six
# via requests
chevron==0.14.0
# via literalai
click==8.1.7
# via chainlit
# via flask
# via llama-parse
# via mlflow-skinny
# via nltk
# via python-oxmsg
# via uvicorn
cloudpickle==2.2.1
# via mlflow-skinny
# via sagemaker
cohere==5.11.0
# via langchain-cohere
coloredlogs==15.0.1
# via onnxruntime
contourpy==1.3.0
# via matplotlib
cryptography==43.0.3
# via pdfminer-six
# via unstructured-client
cycler==0.12.1
# via matplotlib
databricks-sdk==0.34.0
@ -107,59 +84,35 @@ databricks-sdk==0.34.0
dataclasses-json==0.6.7
# via chainlit
# via langchain-community
# via llama-index-core
# via llama-index-legacy
# via unstructured
defusedxml==0.7.1
# via langchain-anthropic
deprecated==1.2.14
# via llama-index-core
# via llama-index-legacy
# via opentelemetry-api
# via opentelemetry-exporter-otlp-proto-grpc
# via opentelemetry-exporter-otlp-proto-http
# via opentelemetry-semantic-conventions
# via pikepdf
dill==0.3.9
# via multiprocess
# via pathos
dirtyjson==1.0.8
# via llama-index-core
# via llama-index-legacy
distro==1.9.0
# via anthropic
# via openai
docker==7.1.0
# via mlflow
# via sagemaker
effdet==0.4.1
# via unstructured
emoji==2.14.0
# via unstructured
et-xmlfile==2.0.0
# via openpyxl
eval-type-backport==0.2.0
# via unstructured-client
faiss-cpu==1.9.0
# via quivr-core
fastapi==0.115.5
# via chainlit
# via megaparse
fastavro==1.9.7
# via cohere
filelock==3.16.1
# via huggingface-hub
# via torch
# via transformers
# via triton
filetype==1.2.0
# via chainlit
# via llama-index-core
# via unstructured
flask==3.0.3
# via mlflow
flatbuffers==24.3.25
# via onnxruntime
fonttools==4.54.1
# via matplotlib
frozenlist==1.4.1
@ -167,26 +120,15 @@ frozenlist==1.4.1
# via aiosignal
fsspec==2024.9.0
# via huggingface-hub
# via llama-index-core
# via llama-index-legacy
# via torch
gitdb==4.0.11
# via gitpython
gitpython==3.1.43
# via mlflow-skinny
google-api-core==2.23.0
# via google-cloud-vision
google-auth==2.35.0
# via databricks-sdk
# via google-api-core
# via google-cloud-vision
google-cloud-vision==3.8.1
# via unstructured
google-pasta==0.2.0
# via sagemaker
googleapis-common-protos==1.65.0
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
# via opentelemetry-exporter-otlp-proto-http
graphene==3.3
@ -196,15 +138,8 @@ graphql-core==3.2.5
# via graphql-relay
graphql-relay==3.2.0
# via graphene
greenlet==3.1.1
# via playwright
# via sqlalchemy
grpcio==1.67.0
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
grpcio-status==1.62.3
# via google-api-core
gunicorn==23.0.0
# via mlflow
h11==0.14.0
@ -217,25 +152,21 @@ httpx==0.27.2
# via anthropic
# via chainlit
# via cohere
# via langchain-mistralai
# via langgraph-sdk
# via langsmith
# via literalai
# via llama-cloud
# via llama-index-core
# via llama-index-legacy
# via megaparse-sdk
# via openai
# via quivr-core
# via unstructured-client
httpx-sse==0.4.0
# via cohere
# via langchain-community
# via langchain-mistralai
# via langgraph-sdk
huggingface-hub==0.25.2
# via timm
# via tokenizers
# via transformers
# via unstructured-inference
humanfriendly==10.0
# via coloredlogs
idna==3.10
# via anyio
# via httpx
@ -246,14 +177,11 @@ importlib-metadata==6.11.0
# via opentelemetry-api
# via sagemaker
# via sagemaker-core
iopath==0.1.10
# via layoutparser
itsdangerous==2.2.0
# via flask
jinja2==3.1.4
# via flask
# via mlflow
# via torch
jiter==0.6.1
# via anthropic
# via openai
@ -261,12 +189,9 @@ jmespath==1.0.1
# via boto3
# via botocore
joblib==1.4.2
# via nltk
# via scikit-learn
jsonpatch==1.33
# via langchain-core
jsonpath-python==1.0.6
# via unstructured-client
jsonpointer==3.0.0
# via jsonpatch
jsonschema==4.23.0
@ -276,40 +201,36 @@ jsonschema-specifications==2024.10.1
# via jsonschema
kiwisolver==1.4.7
# via matplotlib
langchain==0.2.16
langchain==0.3.9
# via langchain-community
# via megaparse
# via quivr-core
langchain-anthropic==0.1.23
# via megaparse
langchain-anthropic==0.3.0
# via quivr-core
langchain-cohere==0.2.4
langchain-cohere==0.3.3
# via quivr-core
langchain-community==0.2.17
langchain-community==0.3.9
# via langchain-experimental
# via megaparse
# via quivr-core
langchain-core==0.2.41
langchain-core==0.3.21
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-community
# via langchain-experimental
# via langchain-mistralai
# via langchain-openai
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via megaparse
# via quivr-core
langchain-experimental==0.0.65
langchain-experimental==0.3.3
# via langchain-cohere
langchain-openai==0.1.25
# via megaparse
langchain-mistralai==0.2.3
# via quivr-core
langchain-text-splitters==0.2.4
langchain-openai==0.2.11
# via quivr-core
langchain-text-splitters==0.3.2
# via langchain
langdetect==1.0.9
# via unstructured
langgraph==0.2.38
# via quivr-core
langgraph-checkpoint==2.0.1
@ -320,72 +241,16 @@ langsmith==0.1.135
# via langchain
# via langchain-community
# via langchain-core
layoutparser==0.3.4
# via unstructured-inference
lazify==0.4.0
# via chainlit
literalai==0.0.623
# via chainlit
llama-cloud==0.1.5
# via llama-index-indices-managed-llama-cloud
llama-index==0.11.23
# via megaparse
llama-index-agent-openai==0.3.4
# via llama-index
# via llama-index-program-openai
llama-index-cli==0.3.1
# via llama-index
llama-index-core==0.11.23
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-embeddings-openai
# via llama-index-indices-managed-llama-cloud
# via llama-index-llms-openai
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
# via llama-index-readers-file
# via llama-index-readers-llama-parse
# via llama-parse
llama-index-embeddings-openai==0.2.5
# via llama-index
# via llama-index-cli
llama-index-indices-managed-llama-cloud==0.6.0
# via llama-index
llama-index-legacy==0.9.48.post4
# via llama-index
llama-index-llms-openai==0.2.16
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
llama-index-multi-modal-llms-openai==0.2.3
# via llama-index
llama-index-program-openai==0.2.0
# via llama-index
# via llama-index-question-gen-openai
llama-index-question-gen-openai==0.2.0
# via llama-index
llama-index-readers-file==0.3.0
# via llama-index
llama-index-readers-llama-parse==0.3.0
# via llama-index
llama-parse==0.5.14
# via llama-index-readers-llama-parse
# via megaparse
lxml==5.3.0
# via pikepdf
# via python-docx
# via python-pptx
# via unstructured
loguru==0.7.2
# via megaparse-sdk
mako==1.3.5
# via alembic
markdown==3.7
# via mlflow
# via unstructured
markdown-it-py==3.0.0
# via rich
markupsafe==3.0.1
@ -397,11 +262,9 @@ marshmallow==3.22.0
# via dataclasses-json
matplotlib==3.9.2
# via mlflow
# via pycocotools
# via unstructured-inference
mdurl==0.1.2
# via markdown-it-py
megaparse==0.0.43
megaparse-sdk==0.1.10
# via quivr-core
mlflow==2.17.0
# via sagemaker-mlflow
@ -409,8 +272,6 @@ mlflow-skinny==2.17.0
# via mlflow
mock==4.0.3
# via sagemaker-core
mpmath==1.3.0
# via sympy
msgpack==1.1.0
# via langgraph-checkpoint
multidict==6.1.0
@ -420,94 +281,26 @@ multiprocess==0.70.17
# via pathos
mypy-extensions==1.0.0
# via typing-inspect
nats-py==2.9.0
# via megaparse-sdk
nest-asyncio==1.6.0
# via chainlit
# via llama-index-core
# via llama-index-legacy
# via unstructured-client
networkx==3.4.2
# via llama-index-core
# via llama-index-legacy
# via torch
# via unstructured
nltk==3.9.1
# via llama-index
# via llama-index-core
# via llama-index-legacy
# via unstructured
numpy==1.26.4
# via chainlit
# via contourpy
# via faiss-cpu
# via langchain
# via langchain-community
# via layoutparser
# via llama-index-core
# via llama-index-legacy
# via matplotlib
# via megaparse
# via mlflow
# via onnx
# via onnxruntime
# via opencv-python
# via pandas
# via pyarrow
# via pycocotools
# via sagemaker
# via scikit-learn
# via scipy
# via torchvision
# via transformers
# via unstructured
nvidia-cublas-cu12==12.4.5.8
# via nvidia-cudnn-cu12
# via nvidia-cusolver-cu12
# via torch
nvidia-cuda-cupti-cu12==12.4.127
# via torch
nvidia-cuda-nvrtc-cu12==12.4.127
# via torch
nvidia-cuda-runtime-cu12==12.4.127
# via torch
nvidia-cudnn-cu12==9.1.0.70
# via torch
nvidia-cufft-cu12==11.2.1.3
# via torch
nvidia-curand-cu12==10.3.5.147
# via torch
nvidia-cusolver-cu12==11.6.1.9
# via torch
nvidia-cusparse-cu12==12.3.1.170
# via nvidia-cusolver-cu12
# via torch
nvidia-nccl-cu12==2.21.5
# via torch
nvidia-nvjitlink-cu12==12.4.127
# via nvidia-cusolver-cu12
# via nvidia-cusparse-cu12
# via torch
nvidia-nvtx-cu12==12.4.127
# via torch
olefile==0.47
# via python-oxmsg
omegaconf==2.3.0
# via effdet
onnx==1.17.0
# via unstructured
# via unstructured-inference
onnxruntime==1.20.0
# via unstructured-inference
openai==1.54.5
# via langchain-openai
# via llama-index-agent-openai
# via llama-index-embeddings-openai
# via llama-index-legacy
# via llama-index-llms-openai
opencv-python==4.10.0.84
# via layoutparser
# via unstructured-inference
openpyxl==3.1.5
# via unstructured
opentelemetry-api==1.27.0
# via mlflow-skinny
# via opentelemetry-exporter-otlp-proto-grpc
@ -551,81 +344,36 @@ packaging==23.2
# via marshmallow
# via matplotlib
# via mlflow-skinny
# via onnxruntime
# via pikepdf
# via pytesseract
# via sagemaker
# via transformers
# via unstructured-pytesseract
pandas==2.2.3
# via langchain-cohere
# via layoutparser
# via llama-index-legacy
# via llama-index-readers-file
# via mlflow
# via sagemaker
# via unstructured
parameterized==0.9.0
# via cohere
pathos==0.3.3
# via sagemaker
pdf2image==1.17.0
# via layoutparser
# via unstructured
pdfminer-six==20231228
# via pdfplumber
# via unstructured
pdfplumber==0.11.4
# via layoutparser
# via megaparse
pikepdf==9.4.2
# via unstructured
pillow==11.0.0
# via layoutparser
# via llama-index-core
# via matplotlib
# via pdf2image
# via pdfplumber
# via pikepdf
# via pillow-heif
# via pytesseract
# via python-pptx
# via torchvision
# via unstructured-pytesseract
pillow-heif==0.20.0
# via unstructured
platformdirs==4.3.6
# via sagemaker
# via sagemaker-core
playwright==1.48.0
# via megaparse
portalocker==3.0.0
# via iopath
pox==0.3.5
# via pathos
ppft==1.7.6.9
# via pathos
propcache==0.2.0
# via yarl
proto-plus==1.25.0
# via google-api-core
# via google-cloud-vision
protobuf==4.25.5
# via google-api-core
# via google-cloud-vision
# via googleapis-common-protos
# via grpcio-status
# via mlflow-skinny
# via onnx
# via onnxruntime
# via opentelemetry-proto
# via proto-plus
# via sagemaker
# via transformers
psutil==6.1.0
# via megaparse
# via megaparse-sdk
# via sagemaker
# via unstructured
pyarrow==17.0.0
# via mlflow
pyasn1==0.6.1
@ -633,77 +381,47 @@ pyasn1==0.6.1
# via rsa
pyasn1-modules==0.4.1
# via google-auth
pycocotools==2.0.8
# via effdet
pycparser==2.22
# via cffi
pycryptodome==3.21.0
# via megaparse
# via megaparse-sdk
pydantic==2.9.2
# via anthropic
# via chainlit
# via cohere
# via fastapi
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-core
# via langchain-mistralai
# via langsmith
# via literalai
# via llama-cloud
# via llama-index-core
# via openai
# via pydantic-settings
# via quivr-core
# via sagemaker-core
# via unstructured-client
pydantic-core==2.23.4
# via cohere
# via pydantic
pydantic-settings==2.6.1
# via megaparse
pyee==12.0.0
# via playwright
# via langchain-community
pygments==2.18.0
# via rich
pyjwt==2.9.0
# via chainlit
pypandoc==1.14
# via unstructured
pyparsing==3.2.0
# via matplotlib
pypdf==5.1.0
# via llama-index-readers-file
# via megaparse
# via unstructured
# via unstructured-client
pypdfium2==4.30.0
# via pdfplumber
pytesseract==0.3.13
# via unstructured
python-dateutil==2.8.2
# via botocore
# via matplotlib
# via pandas
# via unstructured-client
python-docx==1.1.2
# via unstructured
python-dotenv==1.0.1
# via chainlit
# via megaparse
# via megaparse-sdk
# via pydantic-settings
python-engineio==4.10.1
# via python-socketio
python-iso639==2024.10.22
# via unstructured
python-magic==0.4.27
# via megaparse
# via unstructured
python-multipart==0.0.9
# via chainlit
# via unstructured-inference
python-oxmsg==0.0.1
# via unstructured
python-pptx==0.6.23
# via unstructured
python-socketio==5.11.4
# via chainlit
pytz==2024.2
@ -713,50 +431,35 @@ pyyaml==6.0.2
# via langchain
# via langchain-community
# via langchain-core
# via layoutparser
# via llama-index-core
# via mlflow-skinny
# via omegaconf
# via sagemaker
# via sagemaker-core
# via timm
# via transformers
quivr-core @ file:///${PROJECT_ROOT}/../../core
rapidfuzz==3.10.1
# via quivr-core
# via unstructured
# via unstructured-inference
ratelimit==2.2.1
# via megaparse
referencing==0.35.1
# via jsonschema
# via jsonschema-specifications
regex==2024.9.11
# via nltk
# via tiktoken
# via transformers
requests==2.32.3
# via cohere
# via databricks-sdk
# via docker
# via google-api-core
# via huggingface-hub
# via langchain
# via langchain-community
# via langsmith
# via llama-index-core
# via llama-index-legacy
# via megaparse
# via mlflow-skinny
# via opentelemetry-exporter-otlp-proto-http
# via requests-toolbelt
# via sagemaker
# via tiktoken
# via transformers
# via unstructured
requests-toolbelt==1.0.0
# via langsmith
# via unstructured-client
rich==13.9.2
# via quivr-core
# via sagemaker-core
@ -768,7 +471,6 @@ rsa==4.9
s3transfer==0.10.3
# via boto3
safetensors==0.4.5
# via timm
# via transformers
sagemaker==2.232.2
# via cohere
@ -781,7 +483,6 @@ schema==0.7.7
scikit-learn==1.5.2
# via mlflow
scipy==1.14.1
# via layoutparser
# via mlflow
# via scikit-learn
sentencepiece==0.2.0
@ -792,7 +493,6 @@ simple-websocket==1.1.0
# via python-engineio
six==1.16.0
# via google-pasta
# via langdetect
# via python-dateutil
smdebug-rulesconfig==1.0.1
# via sagemaker
@ -803,76 +503,44 @@ sniffio==1.3.1
# via anyio
# via httpx
# via openai
soupsieve==2.6
# via beautifulsoup4
sqlalchemy==2.0.36
# via alembic
# via langchain
# via langchain-community
# via llama-index-core
# via llama-index-legacy
# via mlflow
sqlparse==0.5.1
# via mlflow-skinny
starlette==0.41.2
# via chainlit
# via fastapi
striprtf==0.0.26
# via llama-index-readers-file
sympy==1.13.1
# via onnxruntime
# via torch
syncer==2.0.3
# via chainlit
tabulate==0.9.0
# via langchain-cohere
# via unstructured
tblib==3.0.0
# via sagemaker
tenacity==8.5.0
# via langchain
# via langchain-community
# via langchain-core
# via llama-index-core
# via llama-index-legacy
threadpoolctl==3.5.0
# via scikit-learn
tiktoken==0.8.0
# via langchain-openai
# via llama-index-core
# via llama-index-legacy
# via quivr-core
timm==1.0.11
# via effdet
# via unstructured-inference
tokenizers==0.20.1
# via anthropic
# via cohere
# via langchain-mistralai
# via transformers
tomli==2.0.2
# via chainlit
torch==2.5.1
# via effdet
# via timm
# via torchvision
# via unstructured-inference
torchvision==0.20.1
# via effdet
# via timm
tqdm==4.66.5
# via huggingface-hub
# via iopath
# via llama-index-core
# via nltk
# via openai
# via sagemaker
# via transformers
# via unstructured
transformers==4.45.2
# via quivr-core
# via unstructured-inference
triton==3.1.0
# via torch
types-pyyaml==6.0.12.20240917
# via quivr-core
types-requests==2.32.0.20241016
@ -883,36 +551,17 @@ typing-extensions==4.12.2
# via cohere
# via fastapi
# via huggingface-hub
# via iopath
# via langchain-core
# via llama-index-core
# via llama-index-legacy
# via openai
# via opentelemetry-sdk
# via pydantic
# via pydantic-core
# via pyee
# via python-docx
# via python-oxmsg
# via sqlalchemy
# via torch
# via typing-inspect
# via unstructured
typing-inspect==0.9.0
# via dataclasses-json
# via llama-index-core
# via llama-index-legacy
# via unstructured-client
tzdata==2024.2
# via pandas
unstructured==0.15.0
# via megaparse
unstructured-client==0.27.0
# via unstructured
unstructured-inference==0.7.36
# via unstructured
unstructured-pytesseract==0.3.13
# via unstructured
uptrace==1.27.0
# via chainlit
urllib3==2.2.3
@ -923,24 +572,15 @@ urllib3==2.2.3
# via types-requests
uvicorn==0.25.0
# via chainlit
# via megaparse
uvloop==0.21.0
# via megaparse
watchfiles==0.20.0
# via chainlit
werkzeug==3.0.4
# via flask
wrapt==1.16.0
# via deprecated
# via llama-index-core
# via opentelemetry-instrumentation
# via unstructured
wsproto==1.2.0
# via simple-websocket
xlrd==2.0.1
# via unstructured
xlsxwriter==3.2.0
# via python-pptx
yarl==1.15.4
# via aiohttp
zipp==3.20.2

View File

@ -6,9 +6,9 @@ authors = [
{ name = "Stan Girard", email = "stan@quivr.app" }
]
dependencies = [
"quivr-core @ file:///${PROJECT_ROOT}/../../core",
"flask[async]>=3.1.0",
"openai>=1.54.5",
"quivr-core>=0.0.24",
"flask-caching>=2.3.0",
]
readme = "README.md"
@ -20,6 +20,7 @@ build-backend = "hatchling.build"
[tool.rye]
managed = true
virtual = true
dev-dependencies = []
[tool.hatch.metadata]

View File

@ -9,7 +9,6 @@
# generate-hashes: false
# universal: false
-e file:.
aiofiles==24.1.0
# via quivr-core
aiohappyeyeballs==2.4.3
@ -17,293 +16,134 @@ aiohappyeyeballs==2.4.3
aiohttp==3.11.6
# via langchain
# via langchain-community
# via llama-index-core
# via llama-index-legacy
aiosignal==1.3.1
# via aiohttp
annotated-types==0.7.0
# via pydantic
anthropic==0.39.0
# via langchain-anthropic
antlr4-python3-runtime==4.9.3
# via omegaconf
anyio==4.6.2.post1
# via anthropic
# via httpx
# via openai
# via starlette
asgiref==3.8.1
# via flask
attrs==24.2.0
# via aiohttp
backoff==2.2.1
# via megaparse
# via unstructured
beautifulsoup4==4.12.3
# via llama-index-readers-file
# via unstructured
blinker==1.9.0
# via flask
cachelib==0.9.0
# via flask-caching
cachetools==5.5.0
# via google-auth
certifi==2024.8.30
# via httpcore
# via httpx
# via requests
cffi==1.17.1
# via cryptography
chardet==5.2.0
# via unstructured
charset-normalizer==3.4.0
# via pdfminer-six
# via requests
click==8.1.7
# via flask
# via llama-parse
# via nltk
# via python-oxmsg
# via uvicorn
cohere==5.11.4
# via langchain-cohere
coloredlogs==15.0.1
# via onnxruntime
contourpy==1.3.1
# via matplotlib
cryptography==43.0.3
# via pdfminer-six
# via unstructured-client
cycler==0.12.1
# via matplotlib
dataclasses-json==0.6.7
# via langchain-community
# via llama-index-core
# via llama-index-legacy
# via unstructured
defusedxml==0.7.1
# via langchain-anthropic
deprecated==1.2.15
# via llama-index-core
# via llama-index-legacy
# via pikepdf
dirtyjson==1.0.8
# via llama-index-core
# via llama-index-legacy
distro==1.9.0
# via anthropic
# via openai
effdet==0.4.1
# via unstructured
emoji==2.14.0
# via unstructured
et-xmlfile==2.0.0
# via openpyxl
eval-type-backport==0.2.0
# via unstructured-client
faiss-cpu==1.9.0.post1
# via quivr-core
fastapi==0.115.5
# via megaparse
fastavro==1.9.7
# via cohere
filelock==3.16.1
# via huggingface-hub
# via torch
# via transformers
# via triton
filetype==1.2.0
# via llama-index-core
# via unstructured
flask==3.1.0
# via flask-caching
# via quivr-whisper
flask-caching==2.3.0
# via quivr-whisper
flatbuffers==24.3.25
# via onnxruntime
fonttools==4.55.0
# via matplotlib
frozenlist==1.5.0
# via aiohttp
# via aiosignal
fsspec==2024.10.0
# via huggingface-hub
# via llama-index-core
# via llama-index-legacy
# via torch
google-api-core==2.23.0
# via google-cloud-vision
google-auth==2.36.0
# via google-api-core
# via google-cloud-vision
google-cloud-vision==3.8.1
# via unstructured
googleapis-common-protos==1.66.0
# via google-api-core
# via grpcio-status
greenlet==3.1.1
# via playwright
# via sqlalchemy
grpcio==1.68.0
# via google-api-core
# via grpcio-status
grpcio-status==1.68.0
# via google-api-core
h11==0.14.0
# via httpcore
# via uvicorn
httpcore==1.0.7
# via httpx
httpx==0.27.2
# via anthropic
# via cohere
# via langchain-mistralai
# via langgraph-sdk
# via langsmith
# via llama-cloud
# via llama-index-core
# via llama-index-legacy
# via megaparse-sdk
# via openai
# via quivr-core
# via unstructured-client
httpx-sse==0.4.0
# via cohere
# via langgraph-sdk
# via langchain-community
# via langchain-mistralai
huggingface-hub==0.26.2
# via timm
# via tokenizers
# via transformers
# via unstructured-inference
humanfriendly==10.0
# via coloredlogs
idna==3.10
# via anyio
# via httpx
# via requests
# via yarl
iopath==0.1.10
# via layoutparser
itsdangerous==2.2.0
# via flask
jinja2==3.1.4
# via flask
# via torch
jiter==0.7.1
# via anthropic
# via openai
joblib==1.4.2
# via nltk
jsonpatch==1.33
# via langchain-core
jsonpath-python==1.0.6
# via unstructured-client
jsonpointer==3.0.0
# via jsonpatch
kiwisolver==1.4.7
# via matplotlib
langchain==0.2.17
langchain==0.3.9
# via langchain-community
# via megaparse
# via quivr-core
langchain-anthropic==0.1.23
# via megaparse
langchain-anthropic==0.3.0
# via quivr-core
langchain-cohere==0.2.4
langchain-cohere==0.3.3
# via quivr-core
langchain-community==0.2.19
langchain-community==0.3.9
# via langchain-experimental
# via megaparse
# via quivr-core
langchain-core==0.2.43
langchain-core==0.3.21
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-community
# via langchain-experimental
# via langchain-mistralai
# via langchain-openai
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via megaparse
# via quivr-core
langchain-experimental==0.0.65
langchain-experimental==0.3.3
# via langchain-cohere
langchain-openai==0.1.25
# via megaparse
langchain-mistralai==0.2.3
# via quivr-core
langchain-text-splitters==0.2.4
langchain-openai==0.2.11
# via quivr-core
langchain-text-splitters==0.3.2
# via langchain
langdetect==1.0.9
# via unstructured
langgraph==0.2.52
langgraph==0.2.56
# via quivr-core
langgraph-checkpoint==2.0.5
langgraph-checkpoint==2.0.9
# via langgraph
langgraph-sdk==0.1.36
langgraph-sdk==0.1.46
# via langgraph
langsmith==0.1.143
# via langchain
# via langchain-community
# via langchain-core
layoutparser==0.3.4
# via unstructured-inference
llama-cloud==0.1.5
# via llama-index-indices-managed-llama-cloud
llama-index==0.12.0
# via megaparse
llama-index-agent-openai==0.4.0
# via llama-index
# via llama-index-program-openai
llama-index-cli==0.4.0
# via llama-index
llama-index-core==0.12.0
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-embeddings-openai
# via llama-index-indices-managed-llama-cloud
# via llama-index-llms-openai
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
# via llama-index-readers-file
# via llama-index-readers-llama-parse
# via llama-parse
llama-index-embeddings-openai==0.3.0
# via llama-index
# via llama-index-cli
llama-index-indices-managed-llama-cloud==0.6.2
# via llama-index
llama-index-legacy==0.9.48.post4
# via llama-index
llama-index-llms-openai==0.3.0
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
llama-index-multi-modal-llms-openai==0.3.0
# via llama-index
llama-index-program-openai==0.3.0
# via llama-index
# via llama-index-question-gen-openai
llama-index-question-gen-openai==0.3.0
# via llama-index
llama-index-readers-file==0.4.0
# via llama-index
llama-index-readers-llama-parse==0.4.0
# via llama-index
llama-parse==0.5.14
# via llama-index-readers-llama-parse
# via megaparse
lxml==5.3.0
# via pikepdf
# via python-docx
# via python-pptx
# via unstructured
markdown==3.7
# via unstructured
loguru==0.7.2
# via megaparse-sdk
markdown-it-py==3.0.0
# via rich
markupsafe==3.0.2
@ -312,15 +152,10 @@ markupsafe==3.0.2
# via werkzeug
marshmallow==3.23.1
# via dataclasses-json
matplotlib==3.9.2
# via pycocotools
# via unstructured-inference
mdurl==0.1.2
# via markdown-it-py
megaparse==0.0.43
megaparse-sdk==0.1.10
# via quivr-core
mpmath==1.3.0
# via sympy
msgpack==1.1.0
# via langgraph-checkpoint
multidict==6.1.0
@ -328,89 +163,16 @@ multidict==6.1.0
# via yarl
mypy-extensions==1.0.0
# via typing-inspect
nest-asyncio==1.6.0
# via llama-index-core
# via llama-index-legacy
# via unstructured-client
networkx==3.4.2
# via llama-index-core
# via llama-index-legacy
# via torch
# via unstructured
nltk==3.9.1
# via llama-index
# via llama-index-core
# via llama-index-legacy
# via unstructured
nats-py==2.9.0
# via megaparse-sdk
numpy==1.26.4
# via contourpy
# via faiss-cpu
# via langchain
# via langchain-community
# via layoutparser
# via llama-index-core
# via llama-index-legacy
# via matplotlib
# via megaparse
# via onnx
# via onnxruntime
# via opencv-python
# via pandas
# via pycocotools
# via scipy
# via torchvision
# via transformers
# via unstructured
nvidia-cublas-cu12==12.4.5.8
# via nvidia-cudnn-cu12
# via nvidia-cusolver-cu12
# via torch
nvidia-cuda-cupti-cu12==12.4.127
# via torch
nvidia-cuda-nvrtc-cu12==12.4.127
# via torch
nvidia-cuda-runtime-cu12==12.4.127
# via torch
nvidia-cudnn-cu12==9.1.0.70
# via torch
nvidia-cufft-cu12==11.2.1.3
# via torch
nvidia-curand-cu12==10.3.5.147
# via torch
nvidia-cusolver-cu12==11.6.1.9
# via torch
nvidia-cusparse-cu12==12.3.1.170
# via nvidia-cusolver-cu12
# via torch
nvidia-nccl-cu12==2.21.5
# via torch
nvidia-nvjitlink-cu12==12.4.127
# via nvidia-cusolver-cu12
# via nvidia-cusparse-cu12
# via torch
nvidia-nvtx-cu12==12.4.127
# via torch
olefile==0.47
# via python-oxmsg
omegaconf==2.3.0
# via effdet
onnx==1.17.0
# via unstructured
# via unstructured-inference
onnxruntime==1.20.0
# via unstructured-inference
openai==1.54.5
# via langchain-openai
# via llama-index-agent-openai
# via llama-index-embeddings-openai
# via llama-index-legacy
# via llama-index-llms-openai
# via quivr-whisper
opencv-python==4.10.0.84
# via layoutparser
# via unstructured-inference
openpyxl==3.1.5
# via unstructured
orjson==3.10.11
# via langgraph-sdk
# via langsmith
@ -419,133 +181,44 @@ packaging==24.2
# via huggingface-hub
# via langchain-core
# via marshmallow
# via matplotlib
# via onnxruntime
# via pikepdf
# via pytesseract
# via transformers
# via unstructured-pytesseract
pandas==2.2.3
# via langchain-cohere
# via layoutparser
# via llama-index-legacy
# via llama-index-readers-file
# via unstructured
parameterized==0.9.0
# via cohere
pdf2image==1.17.0
# via layoutparser
# via unstructured
pdfminer-six==20231228
# via pdfplumber
# via unstructured
pdfplumber==0.11.4
# via layoutparser
# via megaparse
pikepdf==9.4.2
# via unstructured
pillow==11.0.0
# via layoutparser
# via llama-index-core
# via matplotlib
# via pdf2image
# via pdfplumber
# via pikepdf
# via pillow-heif
# via pytesseract
# via python-pptx
# via torchvision
# via unstructured-pytesseract
pillow-heif==0.20.0
# via unstructured
playwright==1.48.0
# via megaparse
portalocker==3.0.0
# via iopath
propcache==0.2.0
# via aiohttp
# via yarl
proto-plus==1.25.0
# via google-api-core
# via google-cloud-vision
protobuf==5.28.3
# via google-api-core
# via google-cloud-vision
# via googleapis-common-protos
# via grpcio-status
# via onnx
# via onnxruntime
# via proto-plus
# via transformers
psutil==6.1.0
# via megaparse
# via unstructured
pyasn1==0.6.1
# via pyasn1-modules
# via rsa
pyasn1-modules==0.4.1
# via google-auth
pycocotools==2.0.8
# via effdet
pycparser==2.22
# via cffi
# via megaparse-sdk
pycryptodome==3.21.0
# via megaparse
# via megaparse-sdk
pydantic==2.9.2
# via anthropic
# via cohere
# via fastapi
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-core
# via langchain-mistralai
# via langsmith
# via llama-cloud
# via llama-index-core
# via openai
# via pydantic-settings
# via quivr-core
# via unstructured-client
pydantic-core==2.23.4
# via cohere
# via pydantic
pydantic-settings==2.6.1
# via megaparse
pyee==12.0.0
# via playwright
# via langchain-community
pygments==2.18.0
# via rich
pypandoc==1.14
# via unstructured
pyparsing==3.2.0
# via matplotlib
pypdf==5.1.0
# via llama-index-readers-file
# via megaparse
# via unstructured
# via unstructured-client
pypdfium2==4.30.0
# via pdfplumber
pytesseract==0.3.13
# via unstructured
python-dateutil==2.8.2
# via matplotlib
# via pandas
# via unstructured-client
python-docx==1.1.2
# via unstructured
python-dotenv==1.0.1
# via megaparse
# via megaparse-sdk
# via pydantic-settings
python-iso639==2024.10.22
# via unstructured
python-magic==0.4.27
# via megaparse
# via unstructured
python-multipart==0.0.17
# via unstructured-inference
python-oxmsg==0.0.1
# via unstructured
python-pptx==0.6.23
# via unstructured
pytz==2024.2
# via pandas
pyyaml==6.0.2
@ -553,114 +226,59 @@ pyyaml==6.0.2
# via langchain
# via langchain-community
# via langchain-core
# via layoutparser
# via llama-index-core
# via omegaconf
# via timm
# via transformers
quivr-core==0.0.24
# via quivr-whisper
quivr-core @ file:///${PROJECT_ROOT}/../../core
rapidfuzz==3.10.1
# via quivr-core
# via unstructured
# via unstructured-inference
ratelimit==2.2.1
# via megaparse
regex==2024.11.6
# via nltk
# via tiktoken
# via transformers
requests==2.32.3
# via cohere
# via google-api-core
# via huggingface-hub
# via langchain
# via langchain-community
# via langsmith
# via llama-index-core
# via llama-index-legacy
# via megaparse
# via requests-toolbelt
# via tiktoken
# via transformers
# via unstructured
requests-toolbelt==1.0.0
# via langsmith
# via unstructured-client
rich==13.9.4
# via quivr-core
rsa==4.9
# via google-auth
safetensors==0.4.5
# via timm
# via transformers
scipy==1.14.1
# via layoutparser
sentencepiece==0.2.0
# via transformers
six==1.16.0
# via langdetect
# via python-dateutil
sniffio==1.3.1
# via anthropic
# via anyio
# via httpx
# via openai
soupsieve==2.6
# via beautifulsoup4
sqlalchemy==2.0.36
# via langchain
# via langchain-community
# via llama-index-core
# via llama-index-legacy
starlette==0.41.3
# via fastapi
striprtf==0.0.26
# via llama-index-readers-file
sympy==1.13.1
# via onnxruntime
# via torch
tabulate==0.9.0
# via langchain-cohere
# via unstructured
tenacity==8.5.0
# via langchain
# via langchain-community
# via langchain-core
# via llama-index-core
# via llama-index-legacy
tiktoken==0.8.0
# via langchain-openai
# via llama-index-core
# via llama-index-legacy
# via quivr-core
timm==1.0.11
# via effdet
# via unstructured-inference
tokenizers==0.20.3
# via cohere
# via langchain-mistralai
# via transformers
torch==2.5.1
# via effdet
# via timm
# via torchvision
# via unstructured-inference
torchvision==0.20.1
# via effdet
# via timm
tqdm==4.67.0
# via huggingface-hub
# via iopath
# via llama-index-core
# via nltk
# via openai
# via transformers
# via unstructured
transformers==4.46.3
# via quivr-core
# via unstructured-inference
triton==3.1.0
# via torch
types-pyyaml==6.0.12.20240917
# via quivr-core
types-requests==2.32.0.20241016
@ -668,53 +286,21 @@ types-requests==2.32.0.20241016
typing-extensions==4.12.2
# via anthropic
# via cohere
# via fastapi
# via huggingface-hub
# via iopath
# via langchain-core
# via llama-index-core
# via llama-index-legacy
# via openai
# via pydantic
# via pydantic-core
# via pyee
# via python-docx
# via python-oxmsg
# via sqlalchemy
# via torch
# via typing-inspect
# via unstructured
typing-inspect==0.9.0
# via dataclasses-json
# via llama-index-core
# via llama-index-legacy
# via unstructured-client
tzdata==2024.2
# via pandas
unstructured==0.15.0
# via megaparse
unstructured-client==0.27.0
# via unstructured
unstructured-inference==0.7.36
# via unstructured
unstructured-pytesseract==0.3.13
# via unstructured
urllib3==2.2.3
# via requests
# via types-requests
uvicorn==0.32.0
# via megaparse
uvloop==0.21.0
# via megaparse
werkzeug==3.1.3
# via flask
wrapt==1.16.0
# via deprecated
# via llama-index-core
# via unstructured
xlrd==2.0.1
# via unstructured
xlsxwriter==3.2.0
# via python-pptx
yarl==1.17.2
# via aiohttp

View File

@ -9,7 +9,6 @@
# generate-hashes: false
# universal: false
-e file:.
aiofiles==24.1.0
# via quivr-core
aiohappyeyeballs==2.4.3
@ -17,293 +16,134 @@ aiohappyeyeballs==2.4.3
aiohttp==3.11.6
# via langchain
# via langchain-community
# via llama-index-core
# via llama-index-legacy
aiosignal==1.3.1
# via aiohttp
annotated-types==0.7.0
# via pydantic
anthropic==0.39.0
# via langchain-anthropic
antlr4-python3-runtime==4.9.3
# via omegaconf
anyio==4.6.2.post1
# via anthropic
# via httpx
# via openai
# via starlette
asgiref==3.8.1
# via flask
attrs==24.2.0
# via aiohttp
backoff==2.2.1
# via megaparse
# via unstructured
beautifulsoup4==4.12.3
# via llama-index-readers-file
# via unstructured
blinker==1.9.0
# via flask
cachelib==0.9.0
# via flask-caching
cachetools==5.5.0
# via google-auth
certifi==2024.8.30
# via httpcore
# via httpx
# via requests
cffi==1.17.1
# via cryptography
chardet==5.2.0
# via unstructured
charset-normalizer==3.4.0
# via pdfminer-six
# via requests
click==8.1.7
# via flask
# via llama-parse
# via nltk
# via python-oxmsg
# via uvicorn
cohere==5.11.4
# via langchain-cohere
coloredlogs==15.0.1
# via onnxruntime
contourpy==1.3.1
# via matplotlib
cryptography==43.0.3
# via pdfminer-six
# via unstructured-client
cycler==0.12.1
# via matplotlib
dataclasses-json==0.6.7
# via langchain-community
# via llama-index-core
# via llama-index-legacy
# via unstructured
defusedxml==0.7.1
# via langchain-anthropic
deprecated==1.2.15
# via llama-index-core
# via llama-index-legacy
# via pikepdf
dirtyjson==1.0.8
# via llama-index-core
# via llama-index-legacy
distro==1.9.0
# via anthropic
# via openai
effdet==0.4.1
# via unstructured
emoji==2.14.0
# via unstructured
et-xmlfile==2.0.0
# via openpyxl
eval-type-backport==0.2.0
# via unstructured-client
faiss-cpu==1.9.0.post1
# via quivr-core
fastapi==0.115.5
# via megaparse
fastavro==1.9.7
# via cohere
filelock==3.16.1
# via huggingface-hub
# via torch
# via transformers
# via triton
filetype==1.2.0
# via llama-index-core
# via unstructured
flask==3.1.0
# via flask-caching
# via quivr-whisper
flask-caching==2.3.0
# via quivr-whisper
flatbuffers==24.3.25
# via onnxruntime
fonttools==4.55.0
# via matplotlib
frozenlist==1.5.0
# via aiohttp
# via aiosignal
fsspec==2024.10.0
# via huggingface-hub
# via llama-index-core
# via llama-index-legacy
# via torch
google-api-core==2.23.0
# via google-cloud-vision
google-auth==2.36.0
# via google-api-core
# via google-cloud-vision
google-cloud-vision==3.8.1
# via unstructured
googleapis-common-protos==1.66.0
# via google-api-core
# via grpcio-status
greenlet==3.1.1
# via playwright
# via sqlalchemy
grpcio==1.68.0
# via google-api-core
# via grpcio-status
grpcio-status==1.68.0
# via google-api-core
h11==0.14.0
# via httpcore
# via uvicorn
httpcore==1.0.7
# via httpx
httpx==0.27.2
# via anthropic
# via cohere
# via langchain-mistralai
# via langgraph-sdk
# via langsmith
# via llama-cloud
# via llama-index-core
# via llama-index-legacy
# via megaparse-sdk
# via openai
# via quivr-core
# via unstructured-client
httpx-sse==0.4.0
# via cohere
# via langgraph-sdk
# via langchain-community
# via langchain-mistralai
huggingface-hub==0.26.2
# via timm
# via tokenizers
# via transformers
# via unstructured-inference
humanfriendly==10.0
# via coloredlogs
idna==3.10
# via anyio
# via httpx
# via requests
# via yarl
iopath==0.1.10
# via layoutparser
itsdangerous==2.2.0
# via flask
jinja2==3.1.4
# via flask
# via torch
jiter==0.7.1
# via anthropic
# via openai
joblib==1.4.2
# via nltk
jsonpatch==1.33
# via langchain-core
jsonpath-python==1.0.6
# via unstructured-client
jsonpointer==3.0.0
# via jsonpatch
kiwisolver==1.4.7
# via matplotlib
langchain==0.2.17
langchain==0.3.9
# via langchain-community
# via megaparse
# via quivr-core
langchain-anthropic==0.1.23
# via megaparse
langchain-anthropic==0.3.0
# via quivr-core
langchain-cohere==0.2.4
langchain-cohere==0.3.3
# via quivr-core
langchain-community==0.2.19
langchain-community==0.3.9
# via langchain-experimental
# via megaparse
# via quivr-core
langchain-core==0.2.43
langchain-core==0.3.21
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-community
# via langchain-experimental
# via langchain-mistralai
# via langchain-openai
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via megaparse
# via quivr-core
langchain-experimental==0.0.65
langchain-experimental==0.3.3
# via langchain-cohere
langchain-openai==0.1.25
# via megaparse
langchain-mistralai==0.2.3
# via quivr-core
langchain-text-splitters==0.2.4
langchain-openai==0.2.11
# via quivr-core
langchain-text-splitters==0.3.2
# via langchain
langdetect==1.0.9
# via unstructured
langgraph==0.2.52
langgraph==0.2.56
# via quivr-core
langgraph-checkpoint==2.0.5
langgraph-checkpoint==2.0.9
# via langgraph
langgraph-sdk==0.1.36
langgraph-sdk==0.1.46
# via langgraph
langsmith==0.1.143
# via langchain
# via langchain-community
# via langchain-core
layoutparser==0.3.4
# via unstructured-inference
llama-cloud==0.1.5
# via llama-index-indices-managed-llama-cloud
llama-index==0.12.0
# via megaparse
llama-index-agent-openai==0.4.0
# via llama-index
# via llama-index-program-openai
llama-index-cli==0.4.0
# via llama-index
llama-index-core==0.12.0
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-embeddings-openai
# via llama-index-indices-managed-llama-cloud
# via llama-index-llms-openai
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
# via llama-index-readers-file
# via llama-index-readers-llama-parse
# via llama-parse
llama-index-embeddings-openai==0.3.0
# via llama-index
# via llama-index-cli
llama-index-indices-managed-llama-cloud==0.6.2
# via llama-index
llama-index-legacy==0.9.48.post4
# via llama-index
llama-index-llms-openai==0.3.0
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
llama-index-multi-modal-llms-openai==0.3.0
# via llama-index
llama-index-program-openai==0.3.0
# via llama-index
# via llama-index-question-gen-openai
llama-index-question-gen-openai==0.3.0
# via llama-index
llama-index-readers-file==0.4.0
# via llama-index
llama-index-readers-llama-parse==0.4.0
# via llama-index
llama-parse==0.5.14
# via llama-index-readers-llama-parse
# via megaparse
lxml==5.3.0
# via pikepdf
# via python-docx
# via python-pptx
# via unstructured
markdown==3.7
# via unstructured
loguru==0.7.2
# via megaparse-sdk
markdown-it-py==3.0.0
# via rich
markupsafe==3.0.2
@ -312,15 +152,10 @@ markupsafe==3.0.2
# via werkzeug
marshmallow==3.23.1
# via dataclasses-json
matplotlib==3.9.2
# via pycocotools
# via unstructured-inference
mdurl==0.1.2
# via markdown-it-py
megaparse==0.0.43
megaparse-sdk==0.1.10
# via quivr-core
mpmath==1.3.0
# via sympy
msgpack==1.1.0
# via langgraph-checkpoint
multidict==6.1.0
@ -328,89 +163,16 @@ multidict==6.1.0
# via yarl
mypy-extensions==1.0.0
# via typing-inspect
nest-asyncio==1.6.0
# via llama-index-core
# via llama-index-legacy
# via unstructured-client
networkx==3.4.2
# via llama-index-core
# via llama-index-legacy
# via torch
# via unstructured
nltk==3.9.1
# via llama-index
# via llama-index-core
# via llama-index-legacy
# via unstructured
nats-py==2.9.0
# via megaparse-sdk
numpy==1.26.4
# via contourpy
# via faiss-cpu
# via langchain
# via langchain-community
# via layoutparser
# via llama-index-core
# via llama-index-legacy
# via matplotlib
# via megaparse
# via onnx
# via onnxruntime
# via opencv-python
# via pandas
# via pycocotools
# via scipy
# via torchvision
# via transformers
# via unstructured
nvidia-cublas-cu12==12.4.5.8
# via nvidia-cudnn-cu12
# via nvidia-cusolver-cu12
# via torch
nvidia-cuda-cupti-cu12==12.4.127
# via torch
nvidia-cuda-nvrtc-cu12==12.4.127
# via torch
nvidia-cuda-runtime-cu12==12.4.127
# via torch
nvidia-cudnn-cu12==9.1.0.70
# via torch
nvidia-cufft-cu12==11.2.1.3
# via torch
nvidia-curand-cu12==10.3.5.147
# via torch
nvidia-cusolver-cu12==11.6.1.9
# via torch
nvidia-cusparse-cu12==12.3.1.170
# via nvidia-cusolver-cu12
# via torch
nvidia-nccl-cu12==2.21.5
# via torch
nvidia-nvjitlink-cu12==12.4.127
# via nvidia-cusolver-cu12
# via nvidia-cusparse-cu12
# via torch
nvidia-nvtx-cu12==12.4.127
# via torch
olefile==0.47
# via python-oxmsg
omegaconf==2.3.0
# via effdet
onnx==1.17.0
# via unstructured
# via unstructured-inference
onnxruntime==1.20.0
# via unstructured-inference
openai==1.54.5
# via langchain-openai
# via llama-index-agent-openai
# via llama-index-embeddings-openai
# via llama-index-legacy
# via llama-index-llms-openai
# via quivr-whisper
opencv-python==4.10.0.84
# via layoutparser
# via unstructured-inference
openpyxl==3.1.5
# via unstructured
orjson==3.10.11
# via langgraph-sdk
# via langsmith
@ -419,133 +181,44 @@ packaging==24.2
# via huggingface-hub
# via langchain-core
# via marshmallow
# via matplotlib
# via onnxruntime
# via pikepdf
# via pytesseract
# via transformers
# via unstructured-pytesseract
pandas==2.2.3
# via langchain-cohere
# via layoutparser
# via llama-index-legacy
# via llama-index-readers-file
# via unstructured
parameterized==0.9.0
# via cohere
pdf2image==1.17.0
# via layoutparser
# via unstructured
pdfminer-six==20231228
# via pdfplumber
# via unstructured
pdfplumber==0.11.4
# via layoutparser
# via megaparse
pikepdf==9.4.2
# via unstructured
pillow==11.0.0
# via layoutparser
# via llama-index-core
# via matplotlib
# via pdf2image
# via pdfplumber
# via pikepdf
# via pillow-heif
# via pytesseract
# via python-pptx
# via torchvision
# via unstructured-pytesseract
pillow-heif==0.20.0
# via unstructured
playwright==1.48.0
# via megaparse
portalocker==3.0.0
# via iopath
propcache==0.2.0
# via aiohttp
# via yarl
proto-plus==1.25.0
# via google-api-core
# via google-cloud-vision
protobuf==5.28.3
# via google-api-core
# via google-cloud-vision
# via googleapis-common-protos
# via grpcio-status
# via onnx
# via onnxruntime
# via proto-plus
# via transformers
psutil==6.1.0
# via megaparse
# via unstructured
pyasn1==0.6.1
# via pyasn1-modules
# via rsa
pyasn1-modules==0.4.1
# via google-auth
pycocotools==2.0.8
# via effdet
pycparser==2.22
# via cffi
# via megaparse-sdk
pycryptodome==3.21.0
# via megaparse
# via megaparse-sdk
pydantic==2.9.2
# via anthropic
# via cohere
# via fastapi
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-core
# via langchain-mistralai
# via langsmith
# via llama-cloud
# via llama-index-core
# via openai
# via pydantic-settings
# via quivr-core
# via unstructured-client
pydantic-core==2.23.4
# via cohere
# via pydantic
pydantic-settings==2.6.1
# via megaparse
pyee==12.0.0
# via playwright
# via langchain-community
pygments==2.18.0
# via rich
pypandoc==1.14
# via unstructured
pyparsing==3.2.0
# via matplotlib
pypdf==5.1.0
# via llama-index-readers-file
# via megaparse
# via unstructured
# via unstructured-client
pypdfium2==4.30.0
# via pdfplumber
pytesseract==0.3.13
# via unstructured
python-dateutil==2.8.2
# via matplotlib
# via pandas
# via unstructured-client
python-docx==1.1.2
# via unstructured
python-dotenv==1.0.1
# via megaparse
# via megaparse-sdk
# via pydantic-settings
python-iso639==2024.10.22
# via unstructured
python-magic==0.4.27
# via megaparse
# via unstructured
python-multipart==0.0.17
# via unstructured-inference
python-oxmsg==0.0.1
# via unstructured
python-pptx==0.6.23
# via unstructured
pytz==2024.2
# via pandas
pyyaml==6.0.2
@ -553,114 +226,59 @@ pyyaml==6.0.2
# via langchain
# via langchain-community
# via langchain-core
# via layoutparser
# via llama-index-core
# via omegaconf
# via timm
# via transformers
quivr-core==0.0.24
# via quivr-whisper
quivr-core @ file:///${PROJECT_ROOT}/../../core
rapidfuzz==3.10.1
# via quivr-core
# via unstructured
# via unstructured-inference
ratelimit==2.2.1
# via megaparse
regex==2024.11.6
# via nltk
# via tiktoken
# via transformers
requests==2.32.3
# via cohere
# via google-api-core
# via huggingface-hub
# via langchain
# via langchain-community
# via langsmith
# via llama-index-core
# via llama-index-legacy
# via megaparse
# via requests-toolbelt
# via tiktoken
# via transformers
# via unstructured
requests-toolbelt==1.0.0
# via langsmith
# via unstructured-client
rich==13.9.4
# via quivr-core
rsa==4.9
# via google-auth
safetensors==0.4.5
# via timm
# via transformers
scipy==1.14.1
# via layoutparser
sentencepiece==0.2.0
# via transformers
six==1.16.0
# via langdetect
# via python-dateutil
sniffio==1.3.1
# via anthropic
# via anyio
# via httpx
# via openai
soupsieve==2.6
# via beautifulsoup4
sqlalchemy==2.0.36
# via langchain
# via langchain-community
# via llama-index-core
# via llama-index-legacy
starlette==0.41.3
# via fastapi
striprtf==0.0.26
# via llama-index-readers-file
sympy==1.13.1
# via onnxruntime
# via torch
tabulate==0.9.0
# via langchain-cohere
# via unstructured
tenacity==8.5.0
# via langchain
# via langchain-community
# via langchain-core
# via llama-index-core
# via llama-index-legacy
tiktoken==0.8.0
# via langchain-openai
# via llama-index-core
# via llama-index-legacy
# via quivr-core
timm==1.0.11
# via effdet
# via unstructured-inference
tokenizers==0.20.3
# via cohere
# via langchain-mistralai
# via transformers
torch==2.5.1
# via effdet
# via timm
# via torchvision
# via unstructured-inference
torchvision==0.20.1
# via effdet
# via timm
tqdm==4.67.0
# via huggingface-hub
# via iopath
# via llama-index-core
# via nltk
# via openai
# via transformers
# via unstructured
transformers==4.46.3
# via quivr-core
# via unstructured-inference
triton==3.1.0
# via torch
types-pyyaml==6.0.12.20240917
# via quivr-core
types-requests==2.32.0.20241016
@ -668,53 +286,21 @@ types-requests==2.32.0.20241016
typing-extensions==4.12.2
# via anthropic
# via cohere
# via fastapi
# via huggingface-hub
# via iopath
# via langchain-core
# via llama-index-core
# via llama-index-legacy
# via openai
# via pydantic
# via pydantic-core
# via pyee
# via python-docx
# via python-oxmsg
# via sqlalchemy
# via torch
# via typing-inspect
# via unstructured
typing-inspect==0.9.0
# via dataclasses-json
# via llama-index-core
# via llama-index-legacy
# via unstructured-client
tzdata==2024.2
# via pandas
unstructured==0.15.0
# via megaparse
unstructured-client==0.27.0
# via unstructured
unstructured-inference==0.7.36
# via unstructured
unstructured-pytesseract==0.3.13
# via unstructured
urllib3==2.2.3
# via requests
# via types-requests
uvicorn==0.32.0
# via megaparse
uvloop==0.21.0
# via megaparse
werkzeug==3.1.3
# via flask
wrapt==1.16.0
# via deprecated
# via llama-index-core
# via unstructured
xlrd==2.0.1
# via unstructured
xlsxwriter==3.2.0
# via python-pptx
yarl==1.17.2
# via aiohttp

View File

@ -20,7 +20,7 @@ aiosignal==1.3.1
# via aiohttp
annotated-types==0.7.0
# via pydantic
anthropic==0.36.2
anthropic==0.40.0
# via langchain-anthropic
anyio==4.6.2.post1
# via anthropic
@ -62,12 +62,16 @@ httpcore==1.0.6
httpx==0.27.2
# via anthropic
# via cohere
# via langchain-mistralai
# via langgraph-sdk
# via langsmith
# via megaparse-sdk
# via openai
# via quivr-core
httpx-sse==0.4.0
# via cohere
# via langchain-community
# via langchain-mistralai
# via langgraph-sdk
huggingface-hub==0.26.1
# via tokenizers
@ -84,32 +88,35 @@ jsonpatch==1.33
# via langchain-core
jsonpointer==3.0.0
# via jsonpatch
langchain==0.2.16
langchain==0.3.9
# via langchain-community
# via quivr-core
langchain-anthropic==0.1.23
langchain-anthropic==0.3.0
# via quivr-core
langchain-cohere==0.2.4
langchain-cohere==0.3.3
# via quivr-core
langchain-community==0.2.17
langchain-community==0.3.9
# via langchain-experimental
# via quivr-core
langchain-core==0.2.41
langchain-core==0.3.21
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-community
# via langchain-experimental
# via langchain-mistralai
# via langchain-openai
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via quivr-core
langchain-experimental==0.0.65
langchain-experimental==0.3.3
# via langchain-cohere
langchain-openai==0.1.25
langchain-mistralai==0.2.3
# via quivr-core
langchain-text-splitters==0.2.4
langchain-openai==0.2.11
# via quivr-core
langchain-text-splitters==0.3.2
# via langchain
langgraph==0.2.39
# via quivr-core
@ -121,12 +128,18 @@ langsmith==0.1.136
# via langchain
# via langchain-community
# via langchain-core
loguru==0.7.2
# via megaparse-sdk
markdown-it-py==3.0.0
# via rich
markupsafe==3.0.2
# via quivr-core
marshmallow==3.23.0
# via dataclasses-json
mdurl==0.1.2
# via markdown-it-py
megaparse-sdk==0.1.10
# via quivr-core
msgpack==1.1.0
# via langgraph-checkpoint
multidict==6.1.0
@ -134,13 +147,15 @@ multidict==6.1.0
# via yarl
mypy-extensions==1.0.0
# via typing-inspect
nats-py==2.9.0
# via megaparse-sdk
numpy==1.26.4
# via faiss-cpu
# via langchain
# via langchain-community
# via pandas
# via transformers
openai==1.52.0
openai==1.56.2
# via langchain-openai
orjson==3.10.9
# via langgraph-sdk
@ -159,24 +174,34 @@ propcache==0.2.0
# via yarl
protobuf==5.28.2
# via transformers
psutil==6.1.0
# via megaparse-sdk
pycryptodome==3.21.0
# via megaparse-sdk
pydantic==2.9.2
# via anthropic
# via cohere
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-core
# via langchain-mistralai
# via langsmith
# via openai
# via pydantic-settings
# via quivr-core
# via sqlmodel
pydantic-core==2.23.4
# via cohere
# via pydantic
pydantic-settings==2.6.1
# via langchain-community
pygments==2.18.0
# via rich
python-dateutil==2.9.0.post0
# via pandas
python-dotenv==1.0.1
# via quivr-core
# via megaparse-sdk
# via pydantic-settings
pytz==2024.2
# via pandas
pyyaml==6.0.2
@ -218,9 +243,6 @@ sniffio==1.3.1
sqlalchemy==2.0.36
# via langchain
# via langchain-community
# via sqlmodel
sqlmodel==0.0.22
# via quivr-core
tabulate==0.9.0
# via langchain-cohere
tenacity==8.5.0
@ -231,8 +253,8 @@ tiktoken==0.8.0
# via langchain-openai
# via quivr-core
tokenizers==0.20.1
# via anthropic
# via cohere
# via langchain-mistralai
# via transformers
tqdm==4.66.5
# via huggingface-hub

View File

@ -20,7 +20,7 @@ aiosignal==1.3.1
# via aiohttp
annotated-types==0.7.0
# via pydantic
anthropic==0.36.2
anthropic==0.40.0
# via langchain-anthropic
anyio==4.6.2.post1
# via anthropic
@ -62,12 +62,16 @@ httpcore==1.0.6
httpx==0.27.2
# via anthropic
# via cohere
# via langchain-mistralai
# via langgraph-sdk
# via langsmith
# via megaparse-sdk
# via openai
# via quivr-core
httpx-sse==0.4.0
# via cohere
# via langchain-community
# via langchain-mistralai
# via langgraph-sdk
huggingface-hub==0.26.1
# via tokenizers
@ -84,32 +88,35 @@ jsonpatch==1.33
# via langchain-core
jsonpointer==3.0.0
# via jsonpatch
langchain==0.2.16
langchain==0.3.9
# via langchain-community
# via quivr-core
langchain-anthropic==0.1.23
langchain-anthropic==0.3.0
# via quivr-core
langchain-cohere==0.2.4
langchain-cohere==0.3.3
# via quivr-core
langchain-community==0.2.17
langchain-community==0.3.9
# via langchain-experimental
# via quivr-core
langchain-core==0.2.41
langchain-core==0.3.21
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-community
# via langchain-experimental
# via langchain-mistralai
# via langchain-openai
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via quivr-core
langchain-experimental==0.0.65
langchain-experimental==0.3.3
# via langchain-cohere
langchain-openai==0.1.25
langchain-mistralai==0.2.3
# via quivr-core
langchain-text-splitters==0.2.4
langchain-openai==0.2.11
# via quivr-core
langchain-text-splitters==0.3.2
# via langchain
langgraph==0.2.39
# via quivr-core
@ -121,12 +128,18 @@ langsmith==0.1.136
# via langchain
# via langchain-community
# via langchain-core
loguru==0.7.2
# via megaparse-sdk
markdown-it-py==3.0.0
# via rich
markupsafe==3.0.2
# via quivr-core
marshmallow==3.23.0
# via dataclasses-json
mdurl==0.1.2
# via markdown-it-py
megaparse-sdk==0.1.10
# via quivr-core
msgpack==1.1.0
# via langgraph-checkpoint
multidict==6.1.0
@ -134,13 +147,15 @@ multidict==6.1.0
# via yarl
mypy-extensions==1.0.0
# via typing-inspect
nats-py==2.9.0
# via megaparse-sdk
numpy==1.26.4
# via faiss-cpu
# via langchain
# via langchain-community
# via pandas
# via transformers
openai==1.52.0
openai==1.56.2
# via langchain-openai
orjson==3.10.9
# via langgraph-sdk
@ -159,24 +174,34 @@ propcache==0.2.0
# via yarl
protobuf==5.28.2
# via transformers
psutil==6.1.0
# via megaparse-sdk
pycryptodome==3.21.0
# via megaparse-sdk
pydantic==2.9.2
# via anthropic
# via cohere
# via langchain
# via langchain-anthropic
# via langchain-cohere
# via langchain-core
# via langchain-mistralai
# via langsmith
# via openai
# via pydantic-settings
# via quivr-core
# via sqlmodel
pydantic-core==2.23.4
# via cohere
# via pydantic
pydantic-settings==2.6.1
# via langchain-community
pygments==2.18.0
# via rich
python-dateutil==2.9.0.post0
# via pandas
python-dotenv==1.0.1
# via quivr-core
# via megaparse-sdk
# via pydantic-settings
pytz==2024.2
# via pandas
pyyaml==6.0.2
@ -218,9 +243,6 @@ sniffio==1.3.1
sqlalchemy==2.0.36
# via langchain
# via langchain-community
# via sqlmodel
sqlmodel==0.0.22
# via quivr-core
tabulate==0.9.0
# via langchain-cohere
tenacity==8.5.0
@ -231,8 +253,8 @@ tiktoken==0.8.0
# via langchain-openai
# via quivr-core
tokenizers==0.20.1
# via anthropic
# via cohere
# via langchain-mistralai
# via transformers
tqdm==4.66.5
# via huggingface-hub

View File

@ -1,456 +0,0 @@
# generated by rye
# use `rye lock` or `rye sync` to update this lockfile
#
# last locked with the following flags:
# pre: false
# features: []
# all-features: false
# with-sources: false
# generate-hashes: false
# universal: false
-e file:core
aiofiles==24.1.0
# via quivr-core
aiohappyeyeballs==2.4.3
# via aiohttp
aiohttp==3.10.10
# via langchain
# via langchain-community
aiosignal==1.3.1
# via aiohttp
annotated-types==0.7.0
# via pydantic
anthropic==0.36.1
# via langchain-anthropic
anyio==4.6.2.post1
# via anthropic
# via httpx
asttokens==2.4.1
# via stack-data
attrs==24.2.0
# via aiohttp
# via jsonschema
# via referencing
babel==2.16.0
# via mkdocs-material
beautifulsoup4==4.12.3
# via nbconvert
black==24.10.0
# via flake8-black
bleach==6.1.0
# via nbconvert
certifi==2024.8.30
# via httpcore
# via httpx
# via requests
cfgv==3.4.0
# via pre-commit
charset-normalizer==3.4.0
# via requests
click==8.1.7
# via black
# via mkdocs
# via mkdocstrings
colorama==0.4.6
# via griffe
# via mkdocs-material
comm==0.2.2
# via ipykernel
dataclasses-json==0.6.7
# via langchain-community
debugpy==1.8.7
# via ipykernel
decorator==5.1.1
# via ipython
defusedxml==0.7.1
# via langchain-anthropic
# via nbconvert
distlib==0.3.9
# via virtualenv
distro==1.9.0
# via anthropic
execnet==2.1.1
# via pytest-xdist
executing==2.1.0
# via stack-data
fastjsonschema==2.20.0
# via nbformat
filelock==3.16.1
# via huggingface-hub
# via transformers
# via virtualenv
flake8==7.1.1
# via flake8-black
flake8-black==0.3.6
frozenlist==1.4.1
# via aiohttp
# via aiosignal
fsspec==2024.9.0
# via huggingface-hub
ghp-import==2.1.0
# via mkdocs
greenlet==3.1.1
# via sqlalchemy
griffe==1.4.1
# via mkdocstrings-python
h11==0.14.0
# via httpcore
httpcore==1.0.6
# via httpx
httpx==0.27.2
# via anthropic
# via langgraph-sdk
# via langsmith
# via quivr-core
httpx-sse==0.4.0
# via langgraph-sdk
huggingface-hub==0.25.2
# via tokenizers
# via transformers
identify==2.6.1
# via pre-commit
idna==3.10
# via anyio
# via httpx
# via requests
# via yarl
iniconfig==2.0.0
# via pytest
ipykernel==6.29.5
# via mkdocs-jupyter
ipython==8.28.0
# via ipykernel
jedi==0.19.1
# via ipython
jinja2==3.1.4
# via mkdocs
# via mkdocs-material
# via mkdocstrings
# via nbconvert
jiter==0.6.1
# via anthropic
jsonpatch==1.33
# via langchain-core
jsonpointer==3.0.0
# via jsonpatch
jsonschema==4.23.0
# via nbformat
jsonschema-specifications==2024.10.1
# via jsonschema
jupyter-client==8.6.3
# via ipykernel
# via nbclient
jupyter-core==5.7.2
# via ipykernel
# via jupyter-client
# via nbclient
# via nbconvert
# via nbformat
jupyterlab-pygments==0.3.0
# via nbconvert
jupytext==1.16.4
# via mkdocs-jupyter
langchain==0.2.16
# via langchain-community
# via quivr-core
langchain-anthropic==0.1.23
# via quivr-core
langchain-community==0.2.17
# via quivr-core
langchain-core==0.2.41
# via langchain
# via langchain-anthropic
# via langchain-community
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via quivr-core
langchain-text-splitters==0.2.4
# via langchain
langgraph==0.2.38
# via quivr-core
langgraph-checkpoint==2.0.1
# via langgraph
langgraph-sdk==0.1.33
# via langgraph
langsmith==0.1.135
# via langchain
# via langchain-community
# via langchain-core
markdown==3.7
# via mkdocs
# via mkdocs-autorefs
# via mkdocs-material
# via mkdocstrings
# via pymdown-extensions
markdown-it-py==3.0.0
# via jupytext
# via mdit-py-plugins
# via rich
markupsafe==3.0.1
# via jinja2
# via mkdocs
# via mkdocs-autorefs
# via mkdocstrings
# via nbconvert
marshmallow==3.22.0
# via dataclasses-json
matplotlib-inline==0.1.7
# via ipykernel
# via ipython
mccabe==0.7.0
# via flake8
mdit-py-plugins==0.4.2
# via jupytext
mdurl==0.1.2
# via markdown-it-py
mergedeep==1.3.4
# via mkdocs
# via mkdocs-get-deps
mistune==3.0.2
# via nbconvert
mkdocs==1.6.1
# via mkdocs-autorefs
# via mkdocs-include-dir-to-nav
# via mkdocs-jupyter
# via mkdocs-material
# via mkdocs-redirects
# via mkdocstrings
mkdocs-autorefs==1.2.0
# via mkdocstrings
# via mkdocstrings-python
mkdocs-get-deps==0.2.0
# via mkdocs
mkdocs-include-dir-to-nav==1.2.0
mkdocs-jupyter==0.25.1
mkdocs-material==9.5.41
# via mkdocs-jupyter
mkdocs-material-extensions==1.3.1
# via mkdocs-material
mkdocs-redirects==1.2.1
mkdocstrings==0.26.2
# via mkdocstrings-python
mkdocstrings-python==1.12.1
# via mkdocstrings
msgpack==1.1.0
# via langgraph-checkpoint
multidict==6.1.0
# via aiohttp
# via yarl
mypy==1.12.0
mypy-extensions==1.0.0
# via black
# via mypy
# via typing-inspect
nbclient==0.10.0
# via nbconvert
nbconvert==7.16.4
# via mkdocs-jupyter
nbformat==5.10.4
# via jupytext
# via nbclient
# via nbconvert
nest-asyncio==1.6.0
# via ipykernel
nodeenv==1.9.1
# via pre-commit
numpy==1.26.4
# via langchain
# via langchain-community
# via transformers
orjson==3.10.7
# via langgraph-sdk
# via langsmith
packaging==24.1
# via black
# via huggingface-hub
# via ipykernel
# via jupytext
# via langchain-core
# via marshmallow
# via mkdocs
# via nbconvert
# via pytest
# via transformers
paginate==0.5.7
# via mkdocs-material
pandocfilters==1.5.1
# via nbconvert
parso==0.8.4
# via jedi
pathspec==0.12.1
# via black
# via mkdocs
pexpect==4.9.0
# via ipython
platformdirs==4.3.6
# via black
# via jupyter-core
# via mkdocs-get-deps
# via mkdocstrings
# via virtualenv
pluggy==1.5.0
# via pytest
pre-commit==4.0.1
prompt-toolkit==3.0.48
# via ipython
propcache==0.2.0
# via yarl
protobuf==5.28.2
# via transformers
psutil==6.0.0
# via ipykernel
ptyprocess==0.7.0
# via pexpect
pure-eval==0.2.3
# via stack-data
py-cpuinfo==9.0.0
# via pytest-benchmark
pycodestyle==2.12.1
# via flake8
pydantic==2.9.2
# via anthropic
# via langchain
# via langchain-core
# via langsmith
# via quivr-core
pydantic-core==2.23.4
# via pydantic
pyflakes==3.2.0
# via flake8
pygments==2.18.0
# via ipython
# via mkdocs-jupyter
# via mkdocs-material
# via nbconvert
# via rich
pymdown-extensions==10.11.2
# via mkdocs-material
# via mkdocstrings
pytest==8.3.3
# via pytest-asyncio
# via pytest-benchmark
# via pytest-xdist
pytest-asyncio==0.24.0
pytest-benchmark==4.0.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
# via ghp-import
# via jupyter-client
pyyaml==6.0.2
# via huggingface-hub
# via jupytext
# via langchain
# via langchain-community
# via langchain-core
# via mkdocs
# via mkdocs-get-deps
# via pre-commit
# via pymdown-extensions
# via pyyaml-env-tag
# via transformers
pyyaml-env-tag==0.1
# via mkdocs
pyzmq==26.2.0
# via ipykernel
# via jupyter-client
referencing==0.35.1
# via jsonschema
# via jsonschema-specifications
regex==2024.9.11
# via mkdocs-material
# via tiktoken
# via transformers
requests==2.32.3
# via huggingface-hub
# via langchain
# via langchain-community
# via langsmith
# via mkdocs-material
# via requests-toolbelt
# via tiktoken
# via transformers
requests-toolbelt==1.0.0
# via langsmith
rich==13.9.2
# via quivr-core
rpds-py==0.20.0
# via jsonschema
# via referencing
ruff==0.6.9
safetensors==0.4.5
# via transformers
sentencepiece==0.2.0
# via transformers
six==1.16.0
# via asttokens
# via bleach
# via python-dateutil
sniffio==1.3.1
# via anthropic
# via anyio
# via httpx
soupsieve==2.6
# via beautifulsoup4
sqlalchemy==2.0.36
# via langchain
# via langchain-community
stack-data==0.6.3
# via ipython
tenacity==8.5.0
# via langchain
# via langchain-community
# via langchain-core
tiktoken==0.8.0
# via quivr-core
tinycss2==1.3.0
# via nbconvert
tokenizers==0.20.1
# via anthropic
# via transformers
tornado==6.4.1
# via ipykernel
# via jupyter-client
tqdm==4.66.5
# via huggingface-hub
# via transformers
traitlets==5.14.3
# via comm
# via ipykernel
# via ipython
# via jupyter-client
# via jupyter-core
# via matplotlib-inline
# via nbclient
# via nbconvert
# via nbformat
transformers==4.45.2
# via quivr-core
types-pyyaml==6.0.12.20240917
# via quivr-core
typing-extensions==4.12.2
# via anthropic
# via huggingface-hub
# via ipython
# via langchain-core
# via mypy
# via pydantic
# via pydantic-core
# via sqlalchemy
# via typing-inspect
typing-inspect==0.9.0
# via dataclasses-json
urllib3==2.2.3
# via requests
virtualenv==20.26.6
# via pre-commit
watchdog==5.0.3
# via mkdocs
wcwidth==0.2.13
# via prompt-toolkit
webencodings==0.5.1
# via bleach
# via tinycss2
yarl==1.15.4
# via aiohttp

View File

@ -1,406 +0,0 @@
# generated by rye
# use `rye lock` or `rye sync` to update this lockfile
#
# last locked with the following flags:
# pre: false
# features: []
# all-features: false
# with-sources: false
# generate-hashes: false
# universal: false
-e file:core
aiofiles==24.1.0
# via quivr-core
aiohappyeyeballs==2.4.3
# via aiohttp
aiohttp==3.10.10
# via langchain
# via langchain-community
aiosignal==1.3.1
# via aiohttp
annotated-types==0.7.0
# via pydantic
anthropic==0.36.1
# via langchain-anthropic
anyio==4.6.2.post1
# via anthropic
# via httpx
asttokens==2.4.1
# via stack-data
attrs==24.2.0
# via aiohttp
# via jsonschema
# via referencing
babel==2.16.0
# via mkdocs-material
beautifulsoup4==4.12.3
# via nbconvert
bleach==6.1.0
# via nbconvert
certifi==2024.8.30
# via httpcore
# via httpx
# via requests
charset-normalizer==3.4.0
# via requests
click==8.1.7
# via mkdocs
# via mkdocstrings
colorama==0.4.6
# via griffe
# via mkdocs-material
comm==0.2.2
# via ipykernel
dataclasses-json==0.6.7
# via langchain-community
debugpy==1.8.7
# via ipykernel
decorator==5.1.1
# via ipython
defusedxml==0.7.1
# via langchain-anthropic
# via nbconvert
distro==1.9.0
# via anthropic
executing==2.1.0
# via stack-data
fastjsonschema==2.20.0
# via nbformat
filelock==3.16.1
# via huggingface-hub
# via transformers
frozenlist==1.4.1
# via aiohttp
# via aiosignal
fsspec==2024.9.0
# via huggingface-hub
ghp-import==2.1.0
# via mkdocs
greenlet==3.1.1
# via sqlalchemy
griffe==1.4.1
# via mkdocstrings-python
h11==0.14.0
# via httpcore
httpcore==1.0.6
# via httpx
httpx==0.27.2
# via anthropic
# via langgraph-sdk
# via langsmith
# via quivr-core
httpx-sse==0.4.0
# via langgraph-sdk
huggingface-hub==0.25.2
# via tokenizers
# via transformers
idna==3.10
# via anyio
# via httpx
# via requests
# via yarl
ipykernel==6.29.5
# via mkdocs-jupyter
ipython==8.28.0
# via ipykernel
jedi==0.19.1
# via ipython
jinja2==3.1.4
# via mkdocs
# via mkdocs-material
# via mkdocstrings
# via nbconvert
jiter==0.6.1
# via anthropic
jsonpatch==1.33
# via langchain-core
jsonpointer==3.0.0
# via jsonpatch
jsonschema==4.23.0
# via nbformat
jsonschema-specifications==2024.10.1
# via jsonschema
jupyter-client==8.6.3
# via ipykernel
# via nbclient
jupyter-core==5.7.2
# via ipykernel
# via jupyter-client
# via nbclient
# via nbconvert
# via nbformat
jupyterlab-pygments==0.3.0
# via nbconvert
jupytext==1.16.4
# via mkdocs-jupyter
langchain==0.2.16
# via langchain-community
# via quivr-core
langchain-anthropic==0.1.23
# via quivr-core
langchain-community==0.2.17
# via quivr-core
langchain-core==0.2.41
# via langchain
# via langchain-anthropic
# via langchain-community
# via langchain-text-splitters
# via langgraph
# via langgraph-checkpoint
# via quivr-core
langchain-text-splitters==0.2.4
# via langchain
langgraph==0.2.38
# via quivr-core
langgraph-checkpoint==2.0.1
# via langgraph
langgraph-sdk==0.1.33
# via langgraph
langsmith==0.1.135
# via langchain
# via langchain-community
# via langchain-core
markdown==3.7
# via mkdocs
# via mkdocs-autorefs
# via mkdocs-material
# via mkdocstrings
# via pymdown-extensions
markdown-it-py==3.0.0
# via jupytext
# via mdit-py-plugins
# via rich
markupsafe==3.0.1
# via jinja2
# via mkdocs
# via mkdocs-autorefs
# via mkdocstrings
# via nbconvert
marshmallow==3.22.0
# via dataclasses-json
matplotlib-inline==0.1.7
# via ipykernel
# via ipython
mdit-py-plugins==0.4.2
# via jupytext
mdurl==0.1.2
# via markdown-it-py
mergedeep==1.3.4
# via mkdocs
# via mkdocs-get-deps
mistune==3.0.2
# via nbconvert
mkdocs==1.6.1
# via mkdocs-autorefs
# via mkdocs-include-dir-to-nav
# via mkdocs-jupyter
# via mkdocs-material
# via mkdocs-redirects
# via mkdocstrings
mkdocs-autorefs==1.2.0
# via mkdocstrings
# via mkdocstrings-python
mkdocs-get-deps==0.2.0
# via mkdocs
mkdocs-include-dir-to-nav==1.2.0
mkdocs-jupyter==0.25.1
mkdocs-material==9.5.41
# via mkdocs-jupyter
mkdocs-material-extensions==1.3.1
# via mkdocs-material
mkdocs-redirects==1.2.1
mkdocstrings==0.26.2
# via mkdocstrings-python
mkdocstrings-python==1.12.1
# via mkdocstrings
msgpack==1.1.0
# via langgraph-checkpoint
multidict==6.1.0
# via aiohttp
# via yarl
mypy-extensions==1.0.0
# via typing-inspect
nbclient==0.10.0
# via nbconvert
nbconvert==7.16.4
# via mkdocs-jupyter
nbformat==5.10.4
# via jupytext
# via nbclient
# via nbconvert
nest-asyncio==1.6.0
# via ipykernel
numpy==1.26.4
# via langchain
# via langchain-community
# via transformers
orjson==3.10.7
# via langgraph-sdk
# via langsmith
packaging==24.1
# via huggingface-hub
# via ipykernel
# via jupytext
# via langchain-core
# via marshmallow
# via mkdocs
# via nbconvert
# via transformers
paginate==0.5.7
# via mkdocs-material
pandocfilters==1.5.1
# via nbconvert
parso==0.8.4
# via jedi
pathspec==0.12.1
# via mkdocs
pexpect==4.9.0
# via ipython
platformdirs==4.3.6
# via jupyter-core
# via mkdocs-get-deps
# via mkdocstrings
prompt-toolkit==3.0.48
# via ipython
propcache==0.2.0
# via yarl
protobuf==5.28.2
# via transformers
psutil==6.0.0
# via ipykernel
ptyprocess==0.7.0
# via pexpect
pure-eval==0.2.3
# via stack-data
pydantic==2.9.2
# via anthropic
# via langchain
# via langchain-core
# via langsmith
# via quivr-core
pydantic-core==2.23.4
# via pydantic
pygments==2.18.0
# via ipython
# via mkdocs-jupyter
# via mkdocs-material
# via nbconvert
# via rich
pymdown-extensions==10.11.2
# via mkdocs-material
# via mkdocstrings
python-dateutil==2.9.0.post0
# via ghp-import
# via jupyter-client
pyyaml==6.0.2
# via huggingface-hub
# via jupytext
# via langchain
# via langchain-community
# via langchain-core
# via mkdocs
# via mkdocs-get-deps
# via pymdown-extensions
# via pyyaml-env-tag
# via transformers
pyyaml-env-tag==0.1
# via mkdocs
pyzmq==26.2.0
# via ipykernel
# via jupyter-client
referencing==0.35.1
# via jsonschema
# via jsonschema-specifications
regex==2024.9.11
# via mkdocs-material
# via tiktoken
# via transformers
requests==2.32.3
# via huggingface-hub
# via langchain
# via langchain-community
# via langsmith
# via mkdocs-material
# via requests-toolbelt
# via tiktoken
# via transformers
requests-toolbelt==1.0.0
# via langsmith
rich==13.9.2
# via quivr-core
rpds-py==0.20.0
# via jsonschema
# via referencing
safetensors==0.4.5
# via transformers
sentencepiece==0.2.0
# via transformers
six==1.16.0
# via asttokens
# via bleach
# via python-dateutil
sniffio==1.3.1
# via anthropic
# via anyio
# via httpx
soupsieve==2.6
# via beautifulsoup4
sqlalchemy==2.0.36
# via langchain
# via langchain-community
stack-data==0.6.3
# via ipython
tenacity==8.5.0
# via langchain
# via langchain-community
# via langchain-core
tiktoken==0.8.0
# via quivr-core
tinycss2==1.3.0
# via nbconvert
tokenizers==0.20.1
# via anthropic
# via transformers
tornado==6.4.1
# via ipykernel
# via jupyter-client
tqdm==4.66.5
# via huggingface-hub
# via transformers
traitlets==5.14.3
# via comm
# via ipykernel
# via ipython
# via jupyter-client
# via jupyter-core
# via matplotlib-inline
# via nbclient
# via nbconvert
# via nbformat
transformers==4.45.2
# via quivr-core
types-pyyaml==6.0.12.20240917
# via quivr-core
typing-extensions==4.12.2
# via anthropic
# via huggingface-hub
# via ipython
# via langchain-core
# via pydantic
# via pydantic-core
# via sqlalchemy
# via typing-inspect
typing-inspect==0.9.0
# via dataclasses-json
urllib3==2.2.3
# via requests
watchdog==5.0.3
# via mkdocs
wcwidth==0.2.13
# via prompt-toolkit
webencodings==0.5.1
# via bleach
# via tinycss2
yarl==1.15.4
# via aiohttp