mirror of
https://github.com/QuivrHQ/quivr.git
synced 2024-12-14 07:59:00 +03:00
feat(ask): non-streaming now calls streaming (#3409)
# Description Please include a summary of the changes and the related issue. Please also include relevant motivation and context. ## Checklist before requesting a review Please delete options that are not relevant. - [ ] My code follows the style guidelines of this project - [ ] I have performed a self-review of my code - [ ] I have commented hard-to-understand areas - [ ] I have ideally added tests that prove my fix is effective or that my feature works - [ ] New and existing unit tests pass locally with my changes - [ ] Any dependent changes have been merged ## Screenshots (if appropriate):
This commit is contained in:
parent
2b347c96b5
commit
e71e46bcdf
@ -545,34 +545,28 @@ class Brain:
|
|||||||
print(answer.answer)
|
print(answer.answer)
|
||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
llm = self.llm
|
async def collect_streamed_response():
|
||||||
|
full_answer = ""
|
||||||
|
async for response in self.ask_streaming(
|
||||||
|
question=question,
|
||||||
|
retrieval_config=retrieval_config,
|
||||||
|
rag_pipeline=rag_pipeline,
|
||||||
|
list_files=list_files,
|
||||||
|
chat_history=chat_history
|
||||||
|
):
|
||||||
|
full_answer += response.answer
|
||||||
|
return full_answer
|
||||||
|
|
||||||
# If you passed a different llm model we'll override the brain one
|
# Run the async function in the event loop
|
||||||
if retrieval_config:
|
loop = asyncio.get_event_loop()
|
||||||
if retrieval_config.llm_config != self.llm.get_config():
|
full_answer = loop.run_until_complete(collect_streamed_response())
|
||||||
llm = LLMEndpoint.from_config(config=retrieval_config.llm_config)
|
|
||||||
else:
|
|
||||||
retrieval_config = RetrievalConfig(llm_config=self.llm.get_config())
|
|
||||||
|
|
||||||
if rag_pipeline is None:
|
|
||||||
rag_pipeline = QuivrQARAGLangGraph
|
|
||||||
|
|
||||||
rag_instance = rag_pipeline(
|
|
||||||
retrieval_config=retrieval_config, llm=llm, vector_store=self.vector_db
|
|
||||||
)
|
|
||||||
|
|
||||||
chat_history = self.default_chat if chat_history is None else chat_history
|
chat_history = self.default_chat if chat_history is None else chat_history
|
||||||
list_files = [] if list_files is None else list_files
|
|
||||||
|
|
||||||
parsed_response = rag_instance.answer(
|
|
||||||
question=question, history=chat_history, list_files=list_files
|
|
||||||
)
|
|
||||||
|
|
||||||
chat_history.append(HumanMessage(content=question))
|
chat_history.append(HumanMessage(content=question))
|
||||||
chat_history.append(AIMessage(content=parsed_response.answer))
|
chat_history.append(AIMessage(content=full_answer))
|
||||||
|
|
||||||
# Save answer to the chat history
|
# Return the final response
|
||||||
return parsed_response
|
return ParsedRAGResponse(answer=full_answer)
|
||||||
|
|
||||||
async def ask_streaming(
|
async def ask_streaming(
|
||||||
self,
|
self,
|
||||||
@ -635,3 +629,4 @@ class Brain:
|
|||||||
chat_history.append(HumanMessage(content=question))
|
chat_history.append(HumanMessage(content=question))
|
||||||
chat_history.append(AIMessage(content=full_answer))
|
chat_history.append(AIMessage(content=full_answer))
|
||||||
yield response
|
yield response
|
||||||
|
|
||||||
|
@ -233,7 +233,7 @@ class LLMEndpointConfig(QuivrBaseConfig):
|
|||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
supplier (DefaultModelSuppliers): The LLM provider (default: OPENAI).
|
supplier (DefaultModelSuppliers): The LLM provider (default: OPENAI).
|
||||||
model (str): The specific model to use (default: "gpt-3.5-turbo-0125").
|
model (str): The specific model to use (default: "gpt-4o").
|
||||||
context_length (int | None): The maximum context length for the model.
|
context_length (int | None): The maximum context length for the model.
|
||||||
tokenizer_hub (str | None): The tokenizer to use for this model.
|
tokenizer_hub (str | None): The tokenizer to use for this model.
|
||||||
llm_base_url (str | None): Base URL for the LLM API.
|
llm_base_url (str | None): Base URL for the LLM API.
|
||||||
@ -247,7 +247,7 @@ class LLMEndpointConfig(QuivrBaseConfig):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
supplier: DefaultModelSuppliers = DefaultModelSuppliers.OPENAI
|
supplier: DefaultModelSuppliers = DefaultModelSuppliers.OPENAI
|
||||||
model: str = "gpt-3.5-turbo-0125"
|
model: str = "gpt-4o"
|
||||||
context_length: int | None = None
|
context_length: int | None = None
|
||||||
tokenizer_hub: str | None = None
|
tokenizer_hub: str | None = None
|
||||||
llm_base_url: str | None = None
|
llm_base_url: str | None = None
|
||||||
|
@ -26,7 +26,7 @@ def model_supports_function_calling(model_name: str):
|
|||||||
"gpt-4",
|
"gpt-4",
|
||||||
"gpt-4-1106-preview",
|
"gpt-4-1106-preview",
|
||||||
"gpt-4-0613",
|
"gpt-4-0613",
|
||||||
"gpt-3.5-turbo-0125",
|
"gpt-4o",
|
||||||
"gpt-3.5-turbo-1106",
|
"gpt-3.5-turbo-1106",
|
||||||
"gpt-3.5-turbo-0613",
|
"gpt-3.5-turbo-0613",
|
||||||
"gpt-4-0125-preview",
|
"gpt-4-0125-preview",
|
||||||
|
@ -27,7 +27,7 @@ retrieval_config:
|
|||||||
supplier: "openai"
|
supplier: "openai"
|
||||||
|
|
||||||
# The model to use for the LLM for the given supplier
|
# The model to use for the LLM for the given supplier
|
||||||
model: "gpt-3.5-turbo-0125"
|
model: "gpt-4o"
|
||||||
|
|
||||||
max_input_tokens: 2000
|
max_input_tokens: 2000
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ retrieval_config:
|
|||||||
supplier: "openai"
|
supplier: "openai"
|
||||||
|
|
||||||
# The model to use for the LLM for the given supplier
|
# The model to use for the LLM for the given supplier
|
||||||
model: "gpt-3.5-turbo-0125"
|
model: "gpt-4o"
|
||||||
|
|
||||||
max_input_tokens: 2000
|
max_input_tokens: 2000
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ def test_default_llm_config():
|
|||||||
config = LLMEndpointConfig()
|
config = LLMEndpointConfig()
|
||||||
|
|
||||||
assert config.model_dump(exclude={"llm_api_key"}) == LLMEndpointConfig(
|
assert config.model_dump(exclude={"llm_api_key"}) == LLMEndpointConfig(
|
||||||
model="gpt-3.5-turbo-0125",
|
model="gpt-4o",
|
||||||
llm_base_url=None,
|
llm_base_url=None,
|
||||||
llm_api_key=None,
|
llm_api_key=None,
|
||||||
max_input_tokens=2000,
|
max_input_tokens=2000,
|
||||||
|
@ -17,8 +17,6 @@ aiohappyeyeballs==2.4.3
|
|||||||
aiohttp==3.10.10
|
aiohttp==3.10.10
|
||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
aiosignal==1.3.1
|
aiosignal==1.3.1
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
alembic==1.13.3
|
alembic==1.13.3
|
||||||
@ -29,8 +27,6 @@ annotated-types==0.7.0
|
|||||||
# via pydantic
|
# via pydantic
|
||||||
anthropic==0.36.1
|
anthropic==0.36.1
|
||||||
# via langchain-anthropic
|
# via langchain-anthropic
|
||||||
antlr4-python3-runtime==4.9.3
|
|
||||||
# via omegaconf
|
|
||||||
anyio==4.6.2.post1
|
anyio==4.6.2.post1
|
||||||
# via anthropic
|
# via anthropic
|
||||||
# via asyncer
|
# via asyncer
|
||||||
@ -45,11 +41,6 @@ attrs==23.2.0
|
|||||||
# via jsonschema
|
# via jsonschema
|
||||||
# via referencing
|
# via referencing
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
backoff==2.2.1
|
|
||||||
# via unstructured
|
|
||||||
beautifulsoup4==4.12.3
|
|
||||||
# via llama-index-readers-file
|
|
||||||
# via unstructured
|
|
||||||
bidict==0.23.1
|
bidict==0.23.1
|
||||||
# via python-socketio
|
# via python-socketio
|
||||||
blinker==1.8.2
|
blinker==1.8.2
|
||||||
@ -69,38 +60,23 @@ certifi==2024.8.30
|
|||||||
# via httpcore
|
# via httpcore
|
||||||
# via httpx
|
# via httpx
|
||||||
# via requests
|
# via requests
|
||||||
cffi==1.17.1
|
|
||||||
# via cryptography
|
|
||||||
chainlit==1.2.0
|
chainlit==1.2.0
|
||||||
chardet==5.2.0
|
|
||||||
# via unstructured
|
|
||||||
charset-normalizer==3.4.0
|
charset-normalizer==3.4.0
|
||||||
# via pdfminer-six
|
|
||||||
# via requests
|
# via requests
|
||||||
chevron==0.14.0
|
chevron==0.14.0
|
||||||
# via literalai
|
# via literalai
|
||||||
click==8.1.7
|
click==8.1.7
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via flask
|
# via flask
|
||||||
# via llama-parse
|
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via nltk
|
|
||||||
# via python-oxmsg
|
|
||||||
# via uvicorn
|
# via uvicorn
|
||||||
cloudpickle==2.2.1
|
cloudpickle==2.2.1
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
cobble==0.1.4
|
|
||||||
# via mammoth
|
|
||||||
cohere==5.11.0
|
cohere==5.11.0
|
||||||
# via langchain-cohere
|
# via langchain-cohere
|
||||||
coloredlogs==15.0.1
|
|
||||||
# via onnxruntime
|
|
||||||
contourpy==1.3.0
|
contourpy==1.3.0
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
cryptography==43.0.1
|
|
||||||
# via pdfminer-six
|
|
||||||
# via unstructured-client
|
|
||||||
cycler==0.12.1
|
cycler==0.12.1
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
databricks-sdk==0.34.0
|
databricks-sdk==0.34.0
|
||||||
@ -108,41 +84,22 @@ databricks-sdk==0.34.0
|
|||||||
dataclasses-json==0.6.7
|
dataclasses-json==0.6.7
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via unstructured
|
|
||||||
defusedxml==0.7.1
|
defusedxml==0.7.1
|
||||||
# via langchain-anthropic
|
# via langchain-anthropic
|
||||||
deprecated==1.2.14
|
deprecated==1.2.14
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via opentelemetry-api
|
# via opentelemetry-api
|
||||||
# via opentelemetry-exporter-otlp-proto-grpc
|
# via opentelemetry-exporter-otlp-proto-grpc
|
||||||
# via opentelemetry-exporter-otlp-proto-http
|
# via opentelemetry-exporter-otlp-proto-http
|
||||||
# via opentelemetry-semantic-conventions
|
# via opentelemetry-semantic-conventions
|
||||||
# via pikepdf
|
|
||||||
dill==0.3.9
|
dill==0.3.9
|
||||||
# via multiprocess
|
# via multiprocess
|
||||||
# via pathos
|
# via pathos
|
||||||
dirtyjson==1.0.8
|
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
distro==1.9.0
|
distro==1.9.0
|
||||||
# via anthropic
|
# via anthropic
|
||||||
# via openai
|
# via openai
|
||||||
docker==7.1.0
|
docker==7.1.0
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
docx2txt==0.8
|
|
||||||
# via quivr-core
|
|
||||||
effdet==0.4.1
|
|
||||||
# via unstructured
|
|
||||||
emoji==2.14.0
|
|
||||||
# via unstructured
|
|
||||||
et-xmlfile==1.1.0
|
|
||||||
# via openpyxl
|
|
||||||
eval-type-backport==0.2.0
|
|
||||||
# via unstructured-client
|
|
||||||
faiss-cpu==1.9.0
|
faiss-cpu==1.9.0
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
fastapi==0.112.4
|
fastapi==0.112.4
|
||||||
@ -151,46 +108,27 @@ fastavro==1.9.7
|
|||||||
# via cohere
|
# via cohere
|
||||||
filelock==3.16.1
|
filelock==3.16.1
|
||||||
# via huggingface-hub
|
# via huggingface-hub
|
||||||
# via torch
|
|
||||||
# via transformers
|
# via transformers
|
||||||
# via triton
|
|
||||||
filetype==1.2.0
|
filetype==1.2.0
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via unstructured
|
|
||||||
fire==0.7.0
|
|
||||||
# via pdf2docx
|
|
||||||
flask==3.0.3
|
flask==3.0.3
|
||||||
# via mlflow
|
# via mlflow
|
||||||
flatbuffers==24.3.25
|
|
||||||
# via onnxruntime
|
|
||||||
fonttools==4.54.1
|
fonttools==4.54.1
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
# via pdf2docx
|
|
||||||
frozenlist==1.4.1
|
frozenlist==1.4.1
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
# via aiosignal
|
# via aiosignal
|
||||||
fsspec==2024.9.0
|
fsspec==2024.9.0
|
||||||
# via huggingface-hub
|
# via huggingface-hub
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via torch
|
|
||||||
gitdb==4.0.11
|
gitdb==4.0.11
|
||||||
# via gitpython
|
# via gitpython
|
||||||
gitpython==3.1.43
|
gitpython==3.1.43
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
google-api-core==2.21.0
|
|
||||||
# via google-cloud-vision
|
|
||||||
google-auth==2.35.0
|
google-auth==2.35.0
|
||||||
# via databricks-sdk
|
# via databricks-sdk
|
||||||
# via google-api-core
|
|
||||||
# via google-cloud-vision
|
|
||||||
google-cloud-vision==3.7.4
|
|
||||||
# via unstructured
|
|
||||||
google-pasta==0.2.0
|
google-pasta==0.2.0
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
googleapis-common-protos==1.65.0
|
googleapis-common-protos==1.65.0
|
||||||
# via google-api-core
|
|
||||||
# via grpcio-status
|
|
||||||
# via opentelemetry-exporter-otlp-proto-grpc
|
# via opentelemetry-exporter-otlp-proto-grpc
|
||||||
# via opentelemetry-exporter-otlp-proto-http
|
# via opentelemetry-exporter-otlp-proto-http
|
||||||
graphene==3.3
|
graphene==3.3
|
||||||
@ -203,11 +141,7 @@ graphql-relay==3.2.0
|
|||||||
greenlet==3.1.1
|
greenlet==3.1.1
|
||||||
# via sqlalchemy
|
# via sqlalchemy
|
||||||
grpcio==1.67.0
|
grpcio==1.67.0
|
||||||
# via google-api-core
|
|
||||||
# via grpcio-status
|
|
||||||
# via opentelemetry-exporter-otlp-proto-grpc
|
# via opentelemetry-exporter-otlp-proto-grpc
|
||||||
grpcio-status==1.62.3
|
|
||||||
# via google-api-core
|
|
||||||
gunicorn==23.0.0
|
gunicorn==23.0.0
|
||||||
# via mlflow
|
# via mlflow
|
||||||
h11==0.14.0
|
h11==0.14.0
|
||||||
@ -223,22 +157,14 @@ httpx==0.27.2
|
|||||||
# via langgraph-sdk
|
# via langgraph-sdk
|
||||||
# via langsmith
|
# via langsmith
|
||||||
# via literalai
|
# via literalai
|
||||||
# via llama-cloud
|
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via openai
|
# via openai
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
# via unstructured-client
|
|
||||||
httpx-sse==0.4.0
|
httpx-sse==0.4.0
|
||||||
# via cohere
|
# via cohere
|
||||||
# via langgraph-sdk
|
# via langgraph-sdk
|
||||||
huggingface-hub==0.25.2
|
huggingface-hub==0.25.2
|
||||||
# via timm
|
|
||||||
# via tokenizers
|
# via tokenizers
|
||||||
# via transformers
|
# via transformers
|
||||||
# via unstructured-inference
|
|
||||||
humanfriendly==10.0
|
|
||||||
# via coloredlogs
|
|
||||||
idna==3.10
|
idna==3.10
|
||||||
# via anyio
|
# via anyio
|
||||||
# via httpx
|
# via httpx
|
||||||
@ -249,14 +175,11 @@ importlib-metadata==6.11.0
|
|||||||
# via opentelemetry-api
|
# via opentelemetry-api
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via sagemaker-core
|
# via sagemaker-core
|
||||||
iopath==0.1.10
|
|
||||||
# via layoutparser
|
|
||||||
itsdangerous==2.2.0
|
itsdangerous==2.2.0
|
||||||
# via flask
|
# via flask
|
||||||
jinja2==3.1.4
|
jinja2==3.1.4
|
||||||
# via flask
|
# via flask
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via torch
|
|
||||||
jiter==0.6.1
|
jiter==0.6.1
|
||||||
# via anthropic
|
# via anthropic
|
||||||
# via openai
|
# via openai
|
||||||
@ -264,12 +187,9 @@ jmespath==1.0.1
|
|||||||
# via boto3
|
# via boto3
|
||||||
# via botocore
|
# via botocore
|
||||||
joblib==1.4.2
|
joblib==1.4.2
|
||||||
# via nltk
|
|
||||||
# via scikit-learn
|
# via scikit-learn
|
||||||
jsonpatch==1.33
|
jsonpatch==1.33
|
||||||
# via langchain-core
|
# via langchain-core
|
||||||
jsonpath-python==1.0.6
|
|
||||||
# via unstructured-client
|
|
||||||
jsonpointer==3.0.0
|
jsonpointer==3.0.0
|
||||||
# via jsonpatch
|
# via jsonpatch
|
||||||
jsonschema==4.23.0
|
jsonschema==4.23.0
|
||||||
@ -281,14 +201,13 @@ kiwisolver==1.4.7
|
|||||||
# via matplotlib
|
# via matplotlib
|
||||||
langchain==0.2.16
|
langchain==0.2.16
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via megaparse
|
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
langchain-anthropic==0.1.23
|
langchain-anthropic==0.1.23
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
langchain-cohere==0.2.4
|
langchain-cohere==0.2.4
|
||||||
|
# via quivr-core
|
||||||
langchain-community==0.2.17
|
langchain-community==0.2.17
|
||||||
# via langchain-experimental
|
# via langchain-experimental
|
||||||
# via megaparse
|
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
langchain-core==0.2.41
|
langchain-core==0.2.41
|
||||||
# via langchain
|
# via langchain
|
||||||
@ -300,16 +219,13 @@ langchain-core==0.2.41
|
|||||||
# via langchain-text-splitters
|
# via langchain-text-splitters
|
||||||
# via langgraph
|
# via langgraph
|
||||||
# via langgraph-checkpoint
|
# via langgraph-checkpoint
|
||||||
# via megaparse
|
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
langchain-experimental==0.0.65
|
langchain-experimental==0.0.65
|
||||||
# via langchain-cohere
|
# via langchain-cohere
|
||||||
langchain-openai==0.1.25
|
langchain-openai==0.1.25
|
||||||
# via megaparse
|
# via quivr-core
|
||||||
langchain-text-splitters==0.2.4
|
langchain-text-splitters==0.2.4
|
||||||
# via langchain
|
# via langchain
|
||||||
langdetect==1.0.9
|
|
||||||
# via unstructured
|
|
||||||
langgraph==0.2.38
|
langgraph==0.2.38
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
langgraph-checkpoint==2.0.1
|
langgraph-checkpoint==2.0.1
|
||||||
@ -320,74 +236,14 @@ langsmith==0.1.135
|
|||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via langchain-core
|
# via langchain-core
|
||||||
layoutparser==0.3.4
|
|
||||||
# via unstructured-inference
|
|
||||||
lazify==0.4.0
|
lazify==0.4.0
|
||||||
# via chainlit
|
# via chainlit
|
||||||
literalai==0.0.607
|
literalai==0.0.607
|
||||||
# via chainlit
|
# via chainlit
|
||||||
llama-cloud==0.1.2
|
|
||||||
# via llama-index-indices-managed-llama-cloud
|
|
||||||
llama-index==0.11.18
|
|
||||||
# via megaparse
|
|
||||||
llama-index-agent-openai==0.3.4
|
|
||||||
# via llama-index
|
|
||||||
# via llama-index-program-openai
|
|
||||||
llama-index-cli==0.3.1
|
|
||||||
# via llama-index
|
|
||||||
llama-index-core==0.11.18
|
|
||||||
# via llama-index
|
|
||||||
# via llama-index-agent-openai
|
|
||||||
# via llama-index-cli
|
|
||||||
# via llama-index-embeddings-openai
|
|
||||||
# via llama-index-indices-managed-llama-cloud
|
|
||||||
# via llama-index-llms-openai
|
|
||||||
# via llama-index-multi-modal-llms-openai
|
|
||||||
# via llama-index-program-openai
|
|
||||||
# via llama-index-question-gen-openai
|
|
||||||
# via llama-index-readers-file
|
|
||||||
# via llama-index-readers-llama-parse
|
|
||||||
# via llama-parse
|
|
||||||
llama-index-embeddings-openai==0.2.5
|
|
||||||
# via llama-index
|
|
||||||
# via llama-index-cli
|
|
||||||
llama-index-indices-managed-llama-cloud==0.4.0
|
|
||||||
# via llama-index
|
|
||||||
llama-index-legacy==0.9.48.post3
|
|
||||||
# via llama-index
|
|
||||||
llama-index-llms-openai==0.2.15
|
|
||||||
# via llama-index
|
|
||||||
# via llama-index-agent-openai
|
|
||||||
# via llama-index-cli
|
|
||||||
# via llama-index-multi-modal-llms-openai
|
|
||||||
# via llama-index-program-openai
|
|
||||||
# via llama-index-question-gen-openai
|
|
||||||
llama-index-multi-modal-llms-openai==0.2.2
|
|
||||||
# via llama-index
|
|
||||||
llama-index-program-openai==0.2.0
|
|
||||||
# via llama-index
|
|
||||||
# via llama-index-question-gen-openai
|
|
||||||
llama-index-question-gen-openai==0.2.0
|
|
||||||
# via llama-index
|
|
||||||
llama-index-readers-file==0.2.2
|
|
||||||
# via llama-index
|
|
||||||
llama-index-readers-llama-parse==0.3.0
|
|
||||||
# via llama-index
|
|
||||||
llama-parse==0.5.9
|
|
||||||
# via llama-index-readers-llama-parse
|
|
||||||
# via megaparse
|
|
||||||
lxml==5.3.0
|
|
||||||
# via pikepdf
|
|
||||||
# via python-docx
|
|
||||||
# via python-pptx
|
|
||||||
# via unstructured
|
|
||||||
mako==1.3.5
|
mako==1.3.5
|
||||||
# via alembic
|
# via alembic
|
||||||
mammoth==1.8.0
|
|
||||||
# via megaparse
|
|
||||||
markdown==3.7
|
markdown==3.7
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via unstructured
|
|
||||||
markdown-it-py==3.0.0
|
markdown-it-py==3.0.0
|
||||||
# via rich
|
# via rich
|
||||||
markupsafe==3.0.1
|
markupsafe==3.0.1
|
||||||
@ -398,20 +254,14 @@ marshmallow==3.22.0
|
|||||||
# via dataclasses-json
|
# via dataclasses-json
|
||||||
matplotlib==3.9.2
|
matplotlib==3.9.2
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via pycocotools
|
|
||||||
# via unstructured-inference
|
|
||||||
mdurl==0.1.2
|
mdurl==0.1.2
|
||||||
# via markdown-it-py
|
# via markdown-it-py
|
||||||
megaparse==0.0.31
|
|
||||||
# via quivr-core
|
|
||||||
mlflow==2.17.0
|
mlflow==2.17.0
|
||||||
# via sagemaker-mlflow
|
# via sagemaker-mlflow
|
||||||
mlflow-skinny==2.17.0
|
mlflow-skinny==2.17.0
|
||||||
# via mlflow
|
# via mlflow
|
||||||
mock==4.0.3
|
mock==4.0.3
|
||||||
# via sagemaker-core
|
# via sagemaker-core
|
||||||
mpmath==1.3.0
|
|
||||||
# via sympy
|
|
||||||
msgpack==1.1.0
|
msgpack==1.1.0
|
||||||
# via langgraph-checkpoint
|
# via langgraph-checkpoint
|
||||||
multidict==6.1.0
|
multidict==6.1.0
|
||||||
@ -423,94 +273,22 @@ mypy-extensions==1.0.0
|
|||||||
# via typing-inspect
|
# via typing-inspect
|
||||||
nest-asyncio==1.6.0
|
nest-asyncio==1.6.0
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via unstructured-client
|
|
||||||
networkx==3.4.1
|
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via torch
|
|
||||||
# via unstructured
|
|
||||||
nltk==3.9.1
|
|
||||||
# via llama-index
|
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via unstructured
|
|
||||||
numpy==1.26.4
|
numpy==1.26.4
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via contourpy
|
# via contourpy
|
||||||
# via faiss-cpu
|
# via faiss-cpu
|
||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via layoutparser
|
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via onnx
|
|
||||||
# via onnxruntime
|
|
||||||
# via opencv-python
|
|
||||||
# via opencv-python-headless
|
|
||||||
# via pandas
|
# via pandas
|
||||||
# via pdf2docx
|
|
||||||
# via pyarrow
|
# via pyarrow
|
||||||
# via pycocotools
|
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via scikit-learn
|
# via scikit-learn
|
||||||
# via scipy
|
# via scipy
|
||||||
# via torchvision
|
|
||||||
# via transformers
|
# via transformers
|
||||||
# via unstructured
|
|
||||||
nvidia-cublas-cu12==12.1.3.1
|
|
||||||
# via nvidia-cudnn-cu12
|
|
||||||
# via nvidia-cusolver-cu12
|
|
||||||
# via torch
|
|
||||||
nvidia-cuda-cupti-cu12==12.1.105
|
|
||||||
# via torch
|
|
||||||
nvidia-cuda-nvrtc-cu12==12.1.105
|
|
||||||
# via torch
|
|
||||||
nvidia-cuda-runtime-cu12==12.1.105
|
|
||||||
# via torch
|
|
||||||
nvidia-cudnn-cu12==9.1.0.70
|
|
||||||
# via torch
|
|
||||||
nvidia-cufft-cu12==11.0.2.54
|
|
||||||
# via torch
|
|
||||||
nvidia-curand-cu12==10.3.2.106
|
|
||||||
# via torch
|
|
||||||
nvidia-cusolver-cu12==11.4.5.107
|
|
||||||
# via torch
|
|
||||||
nvidia-cusparse-cu12==12.1.0.106
|
|
||||||
# via nvidia-cusolver-cu12
|
|
||||||
# via torch
|
|
||||||
nvidia-nccl-cu12==2.20.5
|
|
||||||
# via torch
|
|
||||||
nvidia-nvjitlink-cu12==12.6.77
|
|
||||||
# via nvidia-cusolver-cu12
|
|
||||||
# via nvidia-cusparse-cu12
|
|
||||||
nvidia-nvtx-cu12==12.1.105
|
|
||||||
# via torch
|
|
||||||
olefile==0.47
|
|
||||||
# via python-oxmsg
|
|
||||||
omegaconf==2.3.0
|
|
||||||
# via effdet
|
|
||||||
onnx==1.17.0
|
|
||||||
# via unstructured
|
|
||||||
# via unstructured-inference
|
|
||||||
onnxruntime==1.19.2
|
|
||||||
# via unstructured-inference
|
|
||||||
openai==1.51.2
|
openai==1.51.2
|
||||||
# via langchain-openai
|
# via langchain-openai
|
||||||
# via llama-index-agent-openai
|
|
||||||
# via llama-index-embeddings-openai
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via llama-index-llms-openai
|
|
||||||
opencv-python==4.10.0.84
|
|
||||||
# via layoutparser
|
|
||||||
# via unstructured-inference
|
|
||||||
opencv-python-headless==4.10.0.84
|
|
||||||
# via pdf2docx
|
|
||||||
openpyxl==3.1.5
|
|
||||||
# via unstructured
|
|
||||||
opentelemetry-api==1.27.0
|
opentelemetry-api==1.27.0
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via opentelemetry-exporter-otlp-proto-grpc
|
# via opentelemetry-exporter-otlp-proto-grpc
|
||||||
@ -554,78 +332,35 @@ packaging==23.2
|
|||||||
# via marshmallow
|
# via marshmallow
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via onnxruntime
|
|
||||||
# via pikepdf
|
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via transformers
|
# via transformers
|
||||||
# via unstructured-pytesseract
|
|
||||||
pandas==2.2.3
|
pandas==2.2.3
|
||||||
# via langchain-cohere
|
# via langchain-cohere
|
||||||
# via layoutparser
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via llama-index-readers-file
|
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via unstructured
|
|
||||||
parameterized==0.9.0
|
parameterized==0.9.0
|
||||||
# via cohere
|
# via cohere
|
||||||
pathos==0.3.3
|
pathos==0.3.3
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
pdf2docx==0.5.8
|
|
||||||
# via megaparse
|
|
||||||
pdf2image==1.17.0
|
|
||||||
# via layoutparser
|
|
||||||
# via unstructured
|
|
||||||
pdfminer-six==20231228
|
|
||||||
# via pdfplumber
|
|
||||||
# via unstructured
|
|
||||||
pdfplumber==0.11.4
|
|
||||||
# via layoutparser
|
|
||||||
# via megaparse
|
|
||||||
pi-heif==0.18.0
|
|
||||||
# via unstructured
|
|
||||||
pikepdf==9.3.0
|
|
||||||
# via unstructured
|
|
||||||
pillow==11.0.0
|
pillow==11.0.0
|
||||||
# via layoutparser
|
|
||||||
# via llama-index-core
|
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
# via pdf2image
|
|
||||||
# via pdfplumber
|
|
||||||
# via pi-heif
|
|
||||||
# via pikepdf
|
|
||||||
# via python-pptx
|
|
||||||
# via torchvision
|
|
||||||
# via unstructured-pytesseract
|
|
||||||
platformdirs==4.3.6
|
platformdirs==4.3.6
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via sagemaker-core
|
# via sagemaker-core
|
||||||
portalocker==2.10.1
|
|
||||||
# via iopath
|
|
||||||
pox==0.3.5
|
pox==0.3.5
|
||||||
# via pathos
|
# via pathos
|
||||||
ppft==1.7.6.9
|
ppft==1.7.6.9
|
||||||
# via pathos
|
# via pathos
|
||||||
propcache==0.2.0
|
propcache==0.2.0
|
||||||
# via yarl
|
# via yarl
|
||||||
proto-plus==1.24.0
|
|
||||||
# via google-api-core
|
|
||||||
# via google-cloud-vision
|
|
||||||
protobuf==4.25.5
|
protobuf==4.25.5
|
||||||
# via google-api-core
|
|
||||||
# via google-cloud-vision
|
|
||||||
# via googleapis-common-protos
|
# via googleapis-common-protos
|
||||||
# via grpcio-status
|
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via onnx
|
|
||||||
# via onnxruntime
|
|
||||||
# via opentelemetry-proto
|
# via opentelemetry-proto
|
||||||
# via proto-plus
|
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via transformers
|
# via transformers
|
||||||
psutil==6.0.0
|
psutil==6.0.0
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via unstructured
|
|
||||||
pyarrow==17.0.0
|
pyarrow==17.0.0
|
||||||
# via mlflow
|
# via mlflow
|
||||||
pyasn1==0.6.1
|
pyasn1==0.6.1
|
||||||
@ -633,12 +368,6 @@ pyasn1==0.6.1
|
|||||||
# via rsa
|
# via rsa
|
||||||
pyasn1-modules==0.4.1
|
pyasn1-modules==0.4.1
|
||||||
# via google-auth
|
# via google-auth
|
||||||
pycocotools==2.0.8
|
|
||||||
# via effdet
|
|
||||||
pycparser==2.22
|
|
||||||
# via cffi
|
|
||||||
pycryptodome==3.21.0
|
|
||||||
# via megaparse
|
|
||||||
pydantic==2.9.2
|
pydantic==2.9.2
|
||||||
# via anthropic
|
# via anthropic
|
||||||
# via chainlit
|
# via chainlit
|
||||||
@ -648,13 +377,9 @@ pydantic==2.9.2
|
|||||||
# via langchain-core
|
# via langchain-core
|
||||||
# via langsmith
|
# via langsmith
|
||||||
# via literalai
|
# via literalai
|
||||||
# via llama-cloud
|
|
||||||
# via llama-index-core
|
|
||||||
# via openai
|
# via openai
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
# via sagemaker-core
|
# via sagemaker-core
|
||||||
# via sqlmodel
|
|
||||||
# via unstructured-client
|
|
||||||
pydantic-core==2.23.4
|
pydantic-core==2.23.4
|
||||||
# via cohere
|
# via cohere
|
||||||
# via pydantic
|
# via pydantic
|
||||||
@ -662,44 +387,18 @@ pygments==2.18.0
|
|||||||
# via rich
|
# via rich
|
||||||
pyjwt==2.9.0
|
pyjwt==2.9.0
|
||||||
# via chainlit
|
# via chainlit
|
||||||
pymupdf==1.24.11
|
|
||||||
# via pdf2docx
|
|
||||||
pypandoc==1.14
|
|
||||||
# via unstructured
|
|
||||||
pyparsing==3.2.0
|
pyparsing==3.2.0
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
pypdf==4.3.1
|
|
||||||
# via llama-index-readers-file
|
|
||||||
# via unstructured
|
|
||||||
# via unstructured-client
|
|
||||||
pypdfium2==4.30.0
|
|
||||||
# via pdfplumber
|
|
||||||
python-dateutil==2.8.2
|
python-dateutil==2.8.2
|
||||||
# via botocore
|
# via botocore
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
# via pandas
|
# via pandas
|
||||||
# via unstructured-client
|
|
||||||
python-docx==1.1.2
|
|
||||||
# via megaparse
|
|
||||||
# via pdf2docx
|
|
||||||
# via unstructured
|
|
||||||
python-dotenv==1.0.1
|
python-dotenv==1.0.1
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via megaparse
|
|
||||||
python-engineio==4.10.1
|
python-engineio==4.10.1
|
||||||
# via python-socketio
|
# via python-socketio
|
||||||
python-iso639==2024.4.27
|
|
||||||
# via unstructured
|
|
||||||
python-magic==0.4.27
|
|
||||||
# via unstructured
|
|
||||||
python-multipart==0.0.9
|
python-multipart==0.0.9
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via unstructured-inference
|
|
||||||
python-oxmsg==0.0.1
|
|
||||||
# via unstructured
|
|
||||||
python-pptx==1.0.2
|
|
||||||
# via megaparse
|
|
||||||
# via unstructured
|
|
||||||
python-socketio==5.11.4
|
python-socketio==5.11.4
|
||||||
# via chainlit
|
# via chainlit
|
||||||
pytz==2024.2
|
pytz==2024.2
|
||||||
@ -709,46 +408,33 @@ pyyaml==6.0.2
|
|||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via langchain-core
|
# via langchain-core
|
||||||
# via layoutparser
|
|
||||||
# via llama-index-core
|
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via omegaconf
|
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via sagemaker-core
|
# via sagemaker-core
|
||||||
# via timm
|
|
||||||
# via transformers
|
# via transformers
|
||||||
quivr-core==0.0.18
|
quivr-core @ file:///${PROJECT_ROOT}/../../core
|
||||||
rapidfuzz==3.10.0
|
|
||||||
# via unstructured
|
|
||||||
# via unstructured-inference
|
|
||||||
referencing==0.35.1
|
referencing==0.35.1
|
||||||
# via jsonschema
|
# via jsonschema
|
||||||
# via jsonschema-specifications
|
# via jsonschema-specifications
|
||||||
regex==2024.9.11
|
regex==2024.9.11
|
||||||
# via nltk
|
|
||||||
# via tiktoken
|
# via tiktoken
|
||||||
# via transformers
|
# via transformers
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
# via cohere
|
# via cohere
|
||||||
# via databricks-sdk
|
# via databricks-sdk
|
||||||
# via docker
|
# via docker
|
||||||
# via google-api-core
|
|
||||||
# via huggingface-hub
|
# via huggingface-hub
|
||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via langsmith
|
# via langsmith
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via opentelemetry-exporter-otlp-proto-http
|
# via opentelemetry-exporter-otlp-proto-http
|
||||||
# via requests-toolbelt
|
# via requests-toolbelt
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via tiktoken
|
# via tiktoken
|
||||||
# via transformers
|
# via transformers
|
||||||
# via unstructured
|
|
||||||
requests-toolbelt==1.0.0
|
requests-toolbelt==1.0.0
|
||||||
# via langsmith
|
# via langsmith
|
||||||
# via unstructured-client
|
|
||||||
rich==13.9.2
|
rich==13.9.2
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
# via sagemaker-core
|
# via sagemaker-core
|
||||||
@ -760,7 +446,6 @@ rsa==4.9
|
|||||||
s3transfer==0.10.3
|
s3transfer==0.10.3
|
||||||
# via boto3
|
# via boto3
|
||||||
safetensors==0.4.5
|
safetensors==0.4.5
|
||||||
# via timm
|
|
||||||
# via transformers
|
# via transformers
|
||||||
sagemaker==2.232.2
|
sagemaker==2.232.2
|
||||||
# via cohere
|
# via cohere
|
||||||
@ -773,7 +458,6 @@ schema==0.7.7
|
|||||||
scikit-learn==1.5.2
|
scikit-learn==1.5.2
|
||||||
# via mlflow
|
# via mlflow
|
||||||
scipy==1.14.1
|
scipy==1.14.1
|
||||||
# via layoutparser
|
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via scikit-learn
|
# via scikit-learn
|
||||||
sentencepiece==0.2.0
|
sentencepiece==0.2.0
|
||||||
@ -784,7 +468,6 @@ simple-websocket==1.1.0
|
|||||||
# via python-engineio
|
# via python-engineio
|
||||||
six==1.16.0
|
six==1.16.0
|
||||||
# via google-pasta
|
# via google-pasta
|
||||||
# via langdetect
|
|
||||||
# via python-dateutil
|
# via python-dateutil
|
||||||
smdebug-rulesconfig==1.0.1
|
smdebug-rulesconfig==1.0.1
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
@ -795,80 +478,44 @@ sniffio==1.3.1
|
|||||||
# via anyio
|
# via anyio
|
||||||
# via httpx
|
# via httpx
|
||||||
# via openai
|
# via openai
|
||||||
soupsieve==2.6
|
|
||||||
# via beautifulsoup4
|
|
||||||
sqlalchemy==2.0.36
|
sqlalchemy==2.0.36
|
||||||
# via alembic
|
# via alembic
|
||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via sqlmodel
|
|
||||||
sqlmodel==0.0.22
|
|
||||||
sqlparse==0.5.1
|
sqlparse==0.5.1
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
starlette==0.37.2
|
starlette==0.37.2
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via fastapi
|
# via fastapi
|
||||||
striprtf==0.0.26
|
|
||||||
# via llama-index-readers-file
|
|
||||||
sympy==1.13.3
|
|
||||||
# via onnxruntime
|
|
||||||
# via torch
|
|
||||||
syncer==2.0.3
|
syncer==2.0.3
|
||||||
# via chainlit
|
# via chainlit
|
||||||
tabulate==0.9.0
|
tabulate==0.9.0
|
||||||
# via langchain-cohere
|
# via langchain-cohere
|
||||||
# via unstructured
|
|
||||||
tblib==3.0.0
|
tblib==3.0.0
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
tenacity==8.5.0
|
tenacity==8.5.0
|
||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via langchain-core
|
# via langchain-core
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
termcolor==2.5.0
|
|
||||||
# via fire
|
|
||||||
threadpoolctl==3.5.0
|
threadpoolctl==3.5.0
|
||||||
# via scikit-learn
|
# via scikit-learn
|
||||||
tiktoken==0.8.0
|
tiktoken==0.8.0
|
||||||
# via langchain-openai
|
# via langchain-openai
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
timm==1.0.11
|
|
||||||
# via effdet
|
|
||||||
# via unstructured-inference
|
|
||||||
tokenizers==0.20.1
|
tokenizers==0.20.1
|
||||||
# via anthropic
|
# via anthropic
|
||||||
# via cohere
|
# via cohere
|
||||||
# via transformers
|
# via transformers
|
||||||
tomli==2.0.2
|
tomli==2.0.2
|
||||||
# via chainlit
|
# via chainlit
|
||||||
torch==2.4.1
|
|
||||||
# via effdet
|
|
||||||
# via timm
|
|
||||||
# via torchvision
|
|
||||||
# via unstructured-inference
|
|
||||||
torchvision==0.19.1
|
|
||||||
# via effdet
|
|
||||||
# via timm
|
|
||||||
tqdm==4.66.5
|
tqdm==4.66.5
|
||||||
# via huggingface-hub
|
# via huggingface-hub
|
||||||
# via iopath
|
|
||||||
# via llama-index-core
|
|
||||||
# via nltk
|
|
||||||
# via openai
|
# via openai
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via transformers
|
# via transformers
|
||||||
# via unstructured
|
|
||||||
transformers==4.45.2
|
transformers==4.45.2
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
# via unstructured-inference
|
|
||||||
triton==3.0.0
|
|
||||||
# via torch
|
|
||||||
types-pyyaml==6.0.12.20240917
|
types-pyyaml==6.0.12.20240917
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
types-requests==2.32.0.20241016
|
types-requests==2.32.0.20241016
|
||||||
@ -879,37 +526,17 @@ typing-extensions==4.12.2
|
|||||||
# via cohere
|
# via cohere
|
||||||
# via fastapi
|
# via fastapi
|
||||||
# via huggingface-hub
|
# via huggingface-hub
|
||||||
# via iopath
|
|
||||||
# via langchain-core
|
# via langchain-core
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via openai
|
# via openai
|
||||||
# via opentelemetry-sdk
|
# via opentelemetry-sdk
|
||||||
# via pydantic
|
# via pydantic
|
||||||
# via pydantic-core
|
# via pydantic-core
|
||||||
# via python-docx
|
|
||||||
# via python-oxmsg
|
|
||||||
# via python-pptx
|
|
||||||
# via sqlalchemy
|
# via sqlalchemy
|
||||||
# via torch
|
|
||||||
# via typing-inspect
|
# via typing-inspect
|
||||||
# via unstructured
|
|
||||||
typing-inspect==0.9.0
|
typing-inspect==0.9.0
|
||||||
# via dataclasses-json
|
# via dataclasses-json
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via unstructured-client
|
|
||||||
tzdata==2024.2
|
tzdata==2024.2
|
||||||
# via pandas
|
# via pandas
|
||||||
unstructured==0.15.14
|
|
||||||
# via megaparse
|
|
||||||
# via quivr-core
|
|
||||||
unstructured-client==0.26.1
|
|
||||||
# via unstructured
|
|
||||||
unstructured-inference==0.7.36
|
|
||||||
# via unstructured
|
|
||||||
unstructured-pytesseract==0.3.13
|
|
||||||
# via unstructured
|
|
||||||
uptrace==1.27.0
|
uptrace==1.27.0
|
||||||
# via chainlit
|
# via chainlit
|
||||||
urllib3==2.2.3
|
urllib3==2.2.3
|
||||||
@ -926,15 +553,9 @@ werkzeug==3.0.4
|
|||||||
# via flask
|
# via flask
|
||||||
wrapt==1.16.0
|
wrapt==1.16.0
|
||||||
# via deprecated
|
# via deprecated
|
||||||
# via llama-index-core
|
|
||||||
# via opentelemetry-instrumentation
|
# via opentelemetry-instrumentation
|
||||||
# via unstructured
|
|
||||||
wsproto==1.2.0
|
wsproto==1.2.0
|
||||||
# via simple-websocket
|
# via simple-websocket
|
||||||
xlrd==2.0.1
|
|
||||||
# via unstructured
|
|
||||||
xlsxwriter==3.2.0
|
|
||||||
# via python-pptx
|
|
||||||
yarl==1.15.4
|
yarl==1.15.4
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
zipp==3.20.2
|
zipp==3.20.2
|
||||||
|
@ -17,8 +17,6 @@ aiohappyeyeballs==2.4.3
|
|||||||
aiohttp==3.10.10
|
aiohttp==3.10.10
|
||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
aiosignal==1.3.1
|
aiosignal==1.3.1
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
alembic==1.13.3
|
alembic==1.13.3
|
||||||
@ -29,8 +27,6 @@ annotated-types==0.7.0
|
|||||||
# via pydantic
|
# via pydantic
|
||||||
anthropic==0.36.1
|
anthropic==0.36.1
|
||||||
# via langchain-anthropic
|
# via langchain-anthropic
|
||||||
antlr4-python3-runtime==4.9.3
|
|
||||||
# via omegaconf
|
|
||||||
anyio==4.6.2.post1
|
anyio==4.6.2.post1
|
||||||
# via anthropic
|
# via anthropic
|
||||||
# via asyncer
|
# via asyncer
|
||||||
@ -45,11 +41,6 @@ attrs==23.2.0
|
|||||||
# via jsonschema
|
# via jsonschema
|
||||||
# via referencing
|
# via referencing
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
backoff==2.2.1
|
|
||||||
# via unstructured
|
|
||||||
beautifulsoup4==4.12.3
|
|
||||||
# via llama-index-readers-file
|
|
||||||
# via unstructured
|
|
||||||
bidict==0.23.1
|
bidict==0.23.1
|
||||||
# via python-socketio
|
# via python-socketio
|
||||||
blinker==1.8.2
|
blinker==1.8.2
|
||||||
@ -69,38 +60,23 @@ certifi==2024.8.30
|
|||||||
# via httpcore
|
# via httpcore
|
||||||
# via httpx
|
# via httpx
|
||||||
# via requests
|
# via requests
|
||||||
cffi==1.17.1
|
|
||||||
# via cryptography
|
|
||||||
chainlit==1.2.0
|
chainlit==1.2.0
|
||||||
chardet==5.2.0
|
|
||||||
# via unstructured
|
|
||||||
charset-normalizer==3.4.0
|
charset-normalizer==3.4.0
|
||||||
# via pdfminer-six
|
|
||||||
# via requests
|
# via requests
|
||||||
chevron==0.14.0
|
chevron==0.14.0
|
||||||
# via literalai
|
# via literalai
|
||||||
click==8.1.7
|
click==8.1.7
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via flask
|
# via flask
|
||||||
# via llama-parse
|
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via nltk
|
|
||||||
# via python-oxmsg
|
|
||||||
# via uvicorn
|
# via uvicorn
|
||||||
cloudpickle==2.2.1
|
cloudpickle==2.2.1
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
cobble==0.1.4
|
|
||||||
# via mammoth
|
|
||||||
cohere==5.11.0
|
cohere==5.11.0
|
||||||
# via langchain-cohere
|
# via langchain-cohere
|
||||||
coloredlogs==15.0.1
|
|
||||||
# via onnxruntime
|
|
||||||
contourpy==1.3.0
|
contourpy==1.3.0
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
cryptography==43.0.1
|
|
||||||
# via pdfminer-six
|
|
||||||
# via unstructured-client
|
|
||||||
cycler==0.12.1
|
cycler==0.12.1
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
databricks-sdk==0.34.0
|
databricks-sdk==0.34.0
|
||||||
@ -108,41 +84,22 @@ databricks-sdk==0.34.0
|
|||||||
dataclasses-json==0.6.7
|
dataclasses-json==0.6.7
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via unstructured
|
|
||||||
defusedxml==0.7.1
|
defusedxml==0.7.1
|
||||||
# via langchain-anthropic
|
# via langchain-anthropic
|
||||||
deprecated==1.2.14
|
deprecated==1.2.14
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via opentelemetry-api
|
# via opentelemetry-api
|
||||||
# via opentelemetry-exporter-otlp-proto-grpc
|
# via opentelemetry-exporter-otlp-proto-grpc
|
||||||
# via opentelemetry-exporter-otlp-proto-http
|
# via opentelemetry-exporter-otlp-proto-http
|
||||||
# via opentelemetry-semantic-conventions
|
# via opentelemetry-semantic-conventions
|
||||||
# via pikepdf
|
|
||||||
dill==0.3.9
|
dill==0.3.9
|
||||||
# via multiprocess
|
# via multiprocess
|
||||||
# via pathos
|
# via pathos
|
||||||
dirtyjson==1.0.8
|
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
distro==1.9.0
|
distro==1.9.0
|
||||||
# via anthropic
|
# via anthropic
|
||||||
# via openai
|
# via openai
|
||||||
docker==7.1.0
|
docker==7.1.0
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
docx2txt==0.8
|
|
||||||
# via quivr-core
|
|
||||||
effdet==0.4.1
|
|
||||||
# via unstructured
|
|
||||||
emoji==2.14.0
|
|
||||||
# via unstructured
|
|
||||||
et-xmlfile==1.1.0
|
|
||||||
# via openpyxl
|
|
||||||
eval-type-backport==0.2.0
|
|
||||||
# via unstructured-client
|
|
||||||
faiss-cpu==1.9.0
|
faiss-cpu==1.9.0
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
fastapi==0.112.4
|
fastapi==0.112.4
|
||||||
@ -151,46 +108,27 @@ fastavro==1.9.7
|
|||||||
# via cohere
|
# via cohere
|
||||||
filelock==3.16.1
|
filelock==3.16.1
|
||||||
# via huggingface-hub
|
# via huggingface-hub
|
||||||
# via torch
|
|
||||||
# via transformers
|
# via transformers
|
||||||
# via triton
|
|
||||||
filetype==1.2.0
|
filetype==1.2.0
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via unstructured
|
|
||||||
fire==0.7.0
|
|
||||||
# via pdf2docx
|
|
||||||
flask==3.0.3
|
flask==3.0.3
|
||||||
# via mlflow
|
# via mlflow
|
||||||
flatbuffers==24.3.25
|
|
||||||
# via onnxruntime
|
|
||||||
fonttools==4.54.1
|
fonttools==4.54.1
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
# via pdf2docx
|
|
||||||
frozenlist==1.4.1
|
frozenlist==1.4.1
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
# via aiosignal
|
# via aiosignal
|
||||||
fsspec==2024.9.0
|
fsspec==2024.9.0
|
||||||
# via huggingface-hub
|
# via huggingface-hub
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via torch
|
|
||||||
gitdb==4.0.11
|
gitdb==4.0.11
|
||||||
# via gitpython
|
# via gitpython
|
||||||
gitpython==3.1.43
|
gitpython==3.1.43
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
google-api-core==2.21.0
|
|
||||||
# via google-cloud-vision
|
|
||||||
google-auth==2.35.0
|
google-auth==2.35.0
|
||||||
# via databricks-sdk
|
# via databricks-sdk
|
||||||
# via google-api-core
|
|
||||||
# via google-cloud-vision
|
|
||||||
google-cloud-vision==3.7.4
|
|
||||||
# via unstructured
|
|
||||||
google-pasta==0.2.0
|
google-pasta==0.2.0
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
googleapis-common-protos==1.65.0
|
googleapis-common-protos==1.65.0
|
||||||
# via google-api-core
|
|
||||||
# via grpcio-status
|
|
||||||
# via opentelemetry-exporter-otlp-proto-grpc
|
# via opentelemetry-exporter-otlp-proto-grpc
|
||||||
# via opentelemetry-exporter-otlp-proto-http
|
# via opentelemetry-exporter-otlp-proto-http
|
||||||
graphene==3.3
|
graphene==3.3
|
||||||
@ -203,11 +141,7 @@ graphql-relay==3.2.0
|
|||||||
greenlet==3.1.1
|
greenlet==3.1.1
|
||||||
# via sqlalchemy
|
# via sqlalchemy
|
||||||
grpcio==1.67.0
|
grpcio==1.67.0
|
||||||
# via google-api-core
|
|
||||||
# via grpcio-status
|
|
||||||
# via opentelemetry-exporter-otlp-proto-grpc
|
# via opentelemetry-exporter-otlp-proto-grpc
|
||||||
grpcio-status==1.62.3
|
|
||||||
# via google-api-core
|
|
||||||
gunicorn==23.0.0
|
gunicorn==23.0.0
|
||||||
# via mlflow
|
# via mlflow
|
||||||
h11==0.14.0
|
h11==0.14.0
|
||||||
@ -223,22 +157,14 @@ httpx==0.27.2
|
|||||||
# via langgraph-sdk
|
# via langgraph-sdk
|
||||||
# via langsmith
|
# via langsmith
|
||||||
# via literalai
|
# via literalai
|
||||||
# via llama-cloud
|
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via openai
|
# via openai
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
# via unstructured-client
|
|
||||||
httpx-sse==0.4.0
|
httpx-sse==0.4.0
|
||||||
# via cohere
|
# via cohere
|
||||||
# via langgraph-sdk
|
# via langgraph-sdk
|
||||||
huggingface-hub==0.25.2
|
huggingface-hub==0.25.2
|
||||||
# via timm
|
|
||||||
# via tokenizers
|
# via tokenizers
|
||||||
# via transformers
|
# via transformers
|
||||||
# via unstructured-inference
|
|
||||||
humanfriendly==10.0
|
|
||||||
# via coloredlogs
|
|
||||||
idna==3.10
|
idna==3.10
|
||||||
# via anyio
|
# via anyio
|
||||||
# via httpx
|
# via httpx
|
||||||
@ -249,14 +175,11 @@ importlib-metadata==6.11.0
|
|||||||
# via opentelemetry-api
|
# via opentelemetry-api
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via sagemaker-core
|
# via sagemaker-core
|
||||||
iopath==0.1.10
|
|
||||||
# via layoutparser
|
|
||||||
itsdangerous==2.2.0
|
itsdangerous==2.2.0
|
||||||
# via flask
|
# via flask
|
||||||
jinja2==3.1.4
|
jinja2==3.1.4
|
||||||
# via flask
|
# via flask
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via torch
|
|
||||||
jiter==0.6.1
|
jiter==0.6.1
|
||||||
# via anthropic
|
# via anthropic
|
||||||
# via openai
|
# via openai
|
||||||
@ -264,12 +187,9 @@ jmespath==1.0.1
|
|||||||
# via boto3
|
# via boto3
|
||||||
# via botocore
|
# via botocore
|
||||||
joblib==1.4.2
|
joblib==1.4.2
|
||||||
# via nltk
|
|
||||||
# via scikit-learn
|
# via scikit-learn
|
||||||
jsonpatch==1.33
|
jsonpatch==1.33
|
||||||
# via langchain-core
|
# via langchain-core
|
||||||
jsonpath-python==1.0.6
|
|
||||||
# via unstructured-client
|
|
||||||
jsonpointer==3.0.0
|
jsonpointer==3.0.0
|
||||||
# via jsonpatch
|
# via jsonpatch
|
||||||
jsonschema==4.23.0
|
jsonschema==4.23.0
|
||||||
@ -281,14 +201,13 @@ kiwisolver==1.4.7
|
|||||||
# via matplotlib
|
# via matplotlib
|
||||||
langchain==0.2.16
|
langchain==0.2.16
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via megaparse
|
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
langchain-anthropic==0.1.23
|
langchain-anthropic==0.1.23
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
langchain-cohere==0.2.4
|
langchain-cohere==0.2.4
|
||||||
|
# via quivr-core
|
||||||
langchain-community==0.2.17
|
langchain-community==0.2.17
|
||||||
# via langchain-experimental
|
# via langchain-experimental
|
||||||
# via megaparse
|
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
langchain-core==0.2.41
|
langchain-core==0.2.41
|
||||||
# via langchain
|
# via langchain
|
||||||
@ -300,16 +219,13 @@ langchain-core==0.2.41
|
|||||||
# via langchain-text-splitters
|
# via langchain-text-splitters
|
||||||
# via langgraph
|
# via langgraph
|
||||||
# via langgraph-checkpoint
|
# via langgraph-checkpoint
|
||||||
# via megaparse
|
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
langchain-experimental==0.0.65
|
langchain-experimental==0.0.65
|
||||||
# via langchain-cohere
|
# via langchain-cohere
|
||||||
langchain-openai==0.1.25
|
langchain-openai==0.1.25
|
||||||
# via megaparse
|
# via quivr-core
|
||||||
langchain-text-splitters==0.2.4
|
langchain-text-splitters==0.2.4
|
||||||
# via langchain
|
# via langchain
|
||||||
langdetect==1.0.9
|
|
||||||
# via unstructured
|
|
||||||
langgraph==0.2.38
|
langgraph==0.2.38
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
langgraph-checkpoint==2.0.1
|
langgraph-checkpoint==2.0.1
|
||||||
@ -320,74 +236,14 @@ langsmith==0.1.135
|
|||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via langchain-core
|
# via langchain-core
|
||||||
layoutparser==0.3.4
|
|
||||||
# via unstructured-inference
|
|
||||||
lazify==0.4.0
|
lazify==0.4.0
|
||||||
# via chainlit
|
# via chainlit
|
||||||
literalai==0.0.607
|
literalai==0.0.607
|
||||||
# via chainlit
|
# via chainlit
|
||||||
llama-cloud==0.1.2
|
|
||||||
# via llama-index-indices-managed-llama-cloud
|
|
||||||
llama-index==0.11.18
|
|
||||||
# via megaparse
|
|
||||||
llama-index-agent-openai==0.3.4
|
|
||||||
# via llama-index
|
|
||||||
# via llama-index-program-openai
|
|
||||||
llama-index-cli==0.3.1
|
|
||||||
# via llama-index
|
|
||||||
llama-index-core==0.11.18
|
|
||||||
# via llama-index
|
|
||||||
# via llama-index-agent-openai
|
|
||||||
# via llama-index-cli
|
|
||||||
# via llama-index-embeddings-openai
|
|
||||||
# via llama-index-indices-managed-llama-cloud
|
|
||||||
# via llama-index-llms-openai
|
|
||||||
# via llama-index-multi-modal-llms-openai
|
|
||||||
# via llama-index-program-openai
|
|
||||||
# via llama-index-question-gen-openai
|
|
||||||
# via llama-index-readers-file
|
|
||||||
# via llama-index-readers-llama-parse
|
|
||||||
# via llama-parse
|
|
||||||
llama-index-embeddings-openai==0.2.5
|
|
||||||
# via llama-index
|
|
||||||
# via llama-index-cli
|
|
||||||
llama-index-indices-managed-llama-cloud==0.4.0
|
|
||||||
# via llama-index
|
|
||||||
llama-index-legacy==0.9.48.post3
|
|
||||||
# via llama-index
|
|
||||||
llama-index-llms-openai==0.2.15
|
|
||||||
# via llama-index
|
|
||||||
# via llama-index-agent-openai
|
|
||||||
# via llama-index-cli
|
|
||||||
# via llama-index-multi-modal-llms-openai
|
|
||||||
# via llama-index-program-openai
|
|
||||||
# via llama-index-question-gen-openai
|
|
||||||
llama-index-multi-modal-llms-openai==0.2.2
|
|
||||||
# via llama-index
|
|
||||||
llama-index-program-openai==0.2.0
|
|
||||||
# via llama-index
|
|
||||||
# via llama-index-question-gen-openai
|
|
||||||
llama-index-question-gen-openai==0.2.0
|
|
||||||
# via llama-index
|
|
||||||
llama-index-readers-file==0.2.2
|
|
||||||
# via llama-index
|
|
||||||
llama-index-readers-llama-parse==0.3.0
|
|
||||||
# via llama-index
|
|
||||||
llama-parse==0.5.9
|
|
||||||
# via llama-index-readers-llama-parse
|
|
||||||
# via megaparse
|
|
||||||
lxml==5.3.0
|
|
||||||
# via pikepdf
|
|
||||||
# via python-docx
|
|
||||||
# via python-pptx
|
|
||||||
# via unstructured
|
|
||||||
mako==1.3.5
|
mako==1.3.5
|
||||||
# via alembic
|
# via alembic
|
||||||
mammoth==1.8.0
|
|
||||||
# via megaparse
|
|
||||||
markdown==3.7
|
markdown==3.7
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via unstructured
|
|
||||||
markdown-it-py==3.0.0
|
markdown-it-py==3.0.0
|
||||||
# via rich
|
# via rich
|
||||||
markupsafe==3.0.1
|
markupsafe==3.0.1
|
||||||
@ -398,20 +254,14 @@ marshmallow==3.22.0
|
|||||||
# via dataclasses-json
|
# via dataclasses-json
|
||||||
matplotlib==3.9.2
|
matplotlib==3.9.2
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via pycocotools
|
|
||||||
# via unstructured-inference
|
|
||||||
mdurl==0.1.2
|
mdurl==0.1.2
|
||||||
# via markdown-it-py
|
# via markdown-it-py
|
||||||
megaparse==0.0.31
|
|
||||||
# via quivr-core
|
|
||||||
mlflow==2.17.0
|
mlflow==2.17.0
|
||||||
# via sagemaker-mlflow
|
# via sagemaker-mlflow
|
||||||
mlflow-skinny==2.17.0
|
mlflow-skinny==2.17.0
|
||||||
# via mlflow
|
# via mlflow
|
||||||
mock==4.0.3
|
mock==4.0.3
|
||||||
# via sagemaker-core
|
# via sagemaker-core
|
||||||
mpmath==1.3.0
|
|
||||||
# via sympy
|
|
||||||
msgpack==1.1.0
|
msgpack==1.1.0
|
||||||
# via langgraph-checkpoint
|
# via langgraph-checkpoint
|
||||||
multidict==6.1.0
|
multidict==6.1.0
|
||||||
@ -423,94 +273,22 @@ mypy-extensions==1.0.0
|
|||||||
# via typing-inspect
|
# via typing-inspect
|
||||||
nest-asyncio==1.6.0
|
nest-asyncio==1.6.0
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via unstructured-client
|
|
||||||
networkx==3.4.1
|
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via torch
|
|
||||||
# via unstructured
|
|
||||||
nltk==3.9.1
|
|
||||||
# via llama-index
|
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via unstructured
|
|
||||||
numpy==1.26.4
|
numpy==1.26.4
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via contourpy
|
# via contourpy
|
||||||
# via faiss-cpu
|
# via faiss-cpu
|
||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via layoutparser
|
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via onnx
|
|
||||||
# via onnxruntime
|
|
||||||
# via opencv-python
|
|
||||||
# via opencv-python-headless
|
|
||||||
# via pandas
|
# via pandas
|
||||||
# via pdf2docx
|
|
||||||
# via pyarrow
|
# via pyarrow
|
||||||
# via pycocotools
|
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via scikit-learn
|
# via scikit-learn
|
||||||
# via scipy
|
# via scipy
|
||||||
# via torchvision
|
|
||||||
# via transformers
|
# via transformers
|
||||||
# via unstructured
|
|
||||||
nvidia-cublas-cu12==12.1.3.1
|
|
||||||
# via nvidia-cudnn-cu12
|
|
||||||
# via nvidia-cusolver-cu12
|
|
||||||
# via torch
|
|
||||||
nvidia-cuda-cupti-cu12==12.1.105
|
|
||||||
# via torch
|
|
||||||
nvidia-cuda-nvrtc-cu12==12.1.105
|
|
||||||
# via torch
|
|
||||||
nvidia-cuda-runtime-cu12==12.1.105
|
|
||||||
# via torch
|
|
||||||
nvidia-cudnn-cu12==9.1.0.70
|
|
||||||
# via torch
|
|
||||||
nvidia-cufft-cu12==11.0.2.54
|
|
||||||
# via torch
|
|
||||||
nvidia-curand-cu12==10.3.2.106
|
|
||||||
# via torch
|
|
||||||
nvidia-cusolver-cu12==11.4.5.107
|
|
||||||
# via torch
|
|
||||||
nvidia-cusparse-cu12==12.1.0.106
|
|
||||||
# via nvidia-cusolver-cu12
|
|
||||||
# via torch
|
|
||||||
nvidia-nccl-cu12==2.20.5
|
|
||||||
# via torch
|
|
||||||
nvidia-nvjitlink-cu12==12.6.77
|
|
||||||
# via nvidia-cusolver-cu12
|
|
||||||
# via nvidia-cusparse-cu12
|
|
||||||
nvidia-nvtx-cu12==12.1.105
|
|
||||||
# via torch
|
|
||||||
olefile==0.47
|
|
||||||
# via python-oxmsg
|
|
||||||
omegaconf==2.3.0
|
|
||||||
# via effdet
|
|
||||||
onnx==1.17.0
|
|
||||||
# via unstructured
|
|
||||||
# via unstructured-inference
|
|
||||||
onnxruntime==1.19.2
|
|
||||||
# via unstructured-inference
|
|
||||||
openai==1.51.2
|
openai==1.51.2
|
||||||
# via langchain-openai
|
# via langchain-openai
|
||||||
# via llama-index-agent-openai
|
|
||||||
# via llama-index-embeddings-openai
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via llama-index-llms-openai
|
|
||||||
opencv-python==4.10.0.84
|
|
||||||
# via layoutparser
|
|
||||||
# via unstructured-inference
|
|
||||||
opencv-python-headless==4.10.0.84
|
|
||||||
# via pdf2docx
|
|
||||||
openpyxl==3.1.5
|
|
||||||
# via unstructured
|
|
||||||
opentelemetry-api==1.27.0
|
opentelemetry-api==1.27.0
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via opentelemetry-exporter-otlp-proto-grpc
|
# via opentelemetry-exporter-otlp-proto-grpc
|
||||||
@ -554,78 +332,35 @@ packaging==23.2
|
|||||||
# via marshmallow
|
# via marshmallow
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via onnxruntime
|
|
||||||
# via pikepdf
|
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via transformers
|
# via transformers
|
||||||
# via unstructured-pytesseract
|
|
||||||
pandas==2.2.3
|
pandas==2.2.3
|
||||||
# via langchain-cohere
|
# via langchain-cohere
|
||||||
# via layoutparser
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via llama-index-readers-file
|
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via unstructured
|
|
||||||
parameterized==0.9.0
|
parameterized==0.9.0
|
||||||
# via cohere
|
# via cohere
|
||||||
pathos==0.3.3
|
pathos==0.3.3
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
pdf2docx==0.5.8
|
|
||||||
# via megaparse
|
|
||||||
pdf2image==1.17.0
|
|
||||||
# via layoutparser
|
|
||||||
# via unstructured
|
|
||||||
pdfminer-six==20231228
|
|
||||||
# via pdfplumber
|
|
||||||
# via unstructured
|
|
||||||
pdfplumber==0.11.4
|
|
||||||
# via layoutparser
|
|
||||||
# via megaparse
|
|
||||||
pi-heif==0.18.0
|
|
||||||
# via unstructured
|
|
||||||
pikepdf==9.3.0
|
|
||||||
# via unstructured
|
|
||||||
pillow==11.0.0
|
pillow==11.0.0
|
||||||
# via layoutparser
|
|
||||||
# via llama-index-core
|
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
# via pdf2image
|
|
||||||
# via pdfplumber
|
|
||||||
# via pi-heif
|
|
||||||
# via pikepdf
|
|
||||||
# via python-pptx
|
|
||||||
# via torchvision
|
|
||||||
# via unstructured-pytesseract
|
|
||||||
platformdirs==4.3.6
|
platformdirs==4.3.6
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via sagemaker-core
|
# via sagemaker-core
|
||||||
portalocker==2.10.1
|
|
||||||
# via iopath
|
|
||||||
pox==0.3.5
|
pox==0.3.5
|
||||||
# via pathos
|
# via pathos
|
||||||
ppft==1.7.6.9
|
ppft==1.7.6.9
|
||||||
# via pathos
|
# via pathos
|
||||||
propcache==0.2.0
|
propcache==0.2.0
|
||||||
# via yarl
|
# via yarl
|
||||||
proto-plus==1.24.0
|
|
||||||
# via google-api-core
|
|
||||||
# via google-cloud-vision
|
|
||||||
protobuf==4.25.5
|
protobuf==4.25.5
|
||||||
# via google-api-core
|
|
||||||
# via google-cloud-vision
|
|
||||||
# via googleapis-common-protos
|
# via googleapis-common-protos
|
||||||
# via grpcio-status
|
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via onnx
|
|
||||||
# via onnxruntime
|
|
||||||
# via opentelemetry-proto
|
# via opentelemetry-proto
|
||||||
# via proto-plus
|
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via transformers
|
# via transformers
|
||||||
psutil==6.0.0
|
psutil==6.0.0
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via unstructured
|
|
||||||
pyarrow==17.0.0
|
pyarrow==17.0.0
|
||||||
# via mlflow
|
# via mlflow
|
||||||
pyasn1==0.6.1
|
pyasn1==0.6.1
|
||||||
@ -633,12 +368,6 @@ pyasn1==0.6.1
|
|||||||
# via rsa
|
# via rsa
|
||||||
pyasn1-modules==0.4.1
|
pyasn1-modules==0.4.1
|
||||||
# via google-auth
|
# via google-auth
|
||||||
pycocotools==2.0.8
|
|
||||||
# via effdet
|
|
||||||
pycparser==2.22
|
|
||||||
# via cffi
|
|
||||||
pycryptodome==3.21.0
|
|
||||||
# via megaparse
|
|
||||||
pydantic==2.9.2
|
pydantic==2.9.2
|
||||||
# via anthropic
|
# via anthropic
|
||||||
# via chainlit
|
# via chainlit
|
||||||
@ -648,13 +377,9 @@ pydantic==2.9.2
|
|||||||
# via langchain-core
|
# via langchain-core
|
||||||
# via langsmith
|
# via langsmith
|
||||||
# via literalai
|
# via literalai
|
||||||
# via llama-cloud
|
|
||||||
# via llama-index-core
|
|
||||||
# via openai
|
# via openai
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
# via sagemaker-core
|
# via sagemaker-core
|
||||||
# via sqlmodel
|
|
||||||
# via unstructured-client
|
|
||||||
pydantic-core==2.23.4
|
pydantic-core==2.23.4
|
||||||
# via cohere
|
# via cohere
|
||||||
# via pydantic
|
# via pydantic
|
||||||
@ -662,44 +387,18 @@ pygments==2.18.0
|
|||||||
# via rich
|
# via rich
|
||||||
pyjwt==2.9.0
|
pyjwt==2.9.0
|
||||||
# via chainlit
|
# via chainlit
|
||||||
pymupdf==1.24.11
|
|
||||||
# via pdf2docx
|
|
||||||
pypandoc==1.14
|
|
||||||
# via unstructured
|
|
||||||
pyparsing==3.2.0
|
pyparsing==3.2.0
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
pypdf==4.3.1
|
|
||||||
# via llama-index-readers-file
|
|
||||||
# via unstructured
|
|
||||||
# via unstructured-client
|
|
||||||
pypdfium2==4.30.0
|
|
||||||
# via pdfplumber
|
|
||||||
python-dateutil==2.8.2
|
python-dateutil==2.8.2
|
||||||
# via botocore
|
# via botocore
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
# via pandas
|
# via pandas
|
||||||
# via unstructured-client
|
|
||||||
python-docx==1.1.2
|
|
||||||
# via megaparse
|
|
||||||
# via pdf2docx
|
|
||||||
# via unstructured
|
|
||||||
python-dotenv==1.0.1
|
python-dotenv==1.0.1
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via megaparse
|
|
||||||
python-engineio==4.10.1
|
python-engineio==4.10.1
|
||||||
# via python-socketio
|
# via python-socketio
|
||||||
python-iso639==2024.4.27
|
|
||||||
# via unstructured
|
|
||||||
python-magic==0.4.27
|
|
||||||
# via unstructured
|
|
||||||
python-multipart==0.0.9
|
python-multipart==0.0.9
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via unstructured-inference
|
|
||||||
python-oxmsg==0.0.1
|
|
||||||
# via unstructured
|
|
||||||
python-pptx==1.0.2
|
|
||||||
# via megaparse
|
|
||||||
# via unstructured
|
|
||||||
python-socketio==5.11.4
|
python-socketio==5.11.4
|
||||||
# via chainlit
|
# via chainlit
|
||||||
pytz==2024.2
|
pytz==2024.2
|
||||||
@ -709,46 +408,33 @@ pyyaml==6.0.2
|
|||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via langchain-core
|
# via langchain-core
|
||||||
# via layoutparser
|
|
||||||
# via llama-index-core
|
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via omegaconf
|
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via sagemaker-core
|
# via sagemaker-core
|
||||||
# via timm
|
|
||||||
# via transformers
|
# via transformers
|
||||||
quivr-core==0.0.18
|
quivr-core @ file:///${PROJECT_ROOT}/../../core
|
||||||
rapidfuzz==3.10.0
|
|
||||||
# via unstructured
|
|
||||||
# via unstructured-inference
|
|
||||||
referencing==0.35.1
|
referencing==0.35.1
|
||||||
# via jsonschema
|
# via jsonschema
|
||||||
# via jsonschema-specifications
|
# via jsonschema-specifications
|
||||||
regex==2024.9.11
|
regex==2024.9.11
|
||||||
# via nltk
|
|
||||||
# via tiktoken
|
# via tiktoken
|
||||||
# via transformers
|
# via transformers
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
# via cohere
|
# via cohere
|
||||||
# via databricks-sdk
|
# via databricks-sdk
|
||||||
# via docker
|
# via docker
|
||||||
# via google-api-core
|
|
||||||
# via huggingface-hub
|
# via huggingface-hub
|
||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via langsmith
|
# via langsmith
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
# via opentelemetry-exporter-otlp-proto-http
|
# via opentelemetry-exporter-otlp-proto-http
|
||||||
# via requests-toolbelt
|
# via requests-toolbelt
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via tiktoken
|
# via tiktoken
|
||||||
# via transformers
|
# via transformers
|
||||||
# via unstructured
|
|
||||||
requests-toolbelt==1.0.0
|
requests-toolbelt==1.0.0
|
||||||
# via langsmith
|
# via langsmith
|
||||||
# via unstructured-client
|
|
||||||
rich==13.9.2
|
rich==13.9.2
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
# via sagemaker-core
|
# via sagemaker-core
|
||||||
@ -760,7 +446,6 @@ rsa==4.9
|
|||||||
s3transfer==0.10.3
|
s3transfer==0.10.3
|
||||||
# via boto3
|
# via boto3
|
||||||
safetensors==0.4.5
|
safetensors==0.4.5
|
||||||
# via timm
|
|
||||||
# via transformers
|
# via transformers
|
||||||
sagemaker==2.232.2
|
sagemaker==2.232.2
|
||||||
# via cohere
|
# via cohere
|
||||||
@ -773,7 +458,6 @@ schema==0.7.7
|
|||||||
scikit-learn==1.5.2
|
scikit-learn==1.5.2
|
||||||
# via mlflow
|
# via mlflow
|
||||||
scipy==1.14.1
|
scipy==1.14.1
|
||||||
# via layoutparser
|
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via scikit-learn
|
# via scikit-learn
|
||||||
sentencepiece==0.2.0
|
sentencepiece==0.2.0
|
||||||
@ -784,7 +468,6 @@ simple-websocket==1.1.0
|
|||||||
# via python-engineio
|
# via python-engineio
|
||||||
six==1.16.0
|
six==1.16.0
|
||||||
# via google-pasta
|
# via google-pasta
|
||||||
# via langdetect
|
|
||||||
# via python-dateutil
|
# via python-dateutil
|
||||||
smdebug-rulesconfig==1.0.1
|
smdebug-rulesconfig==1.0.1
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
@ -795,80 +478,44 @@ sniffio==1.3.1
|
|||||||
# via anyio
|
# via anyio
|
||||||
# via httpx
|
# via httpx
|
||||||
# via openai
|
# via openai
|
||||||
soupsieve==2.6
|
|
||||||
# via beautifulsoup4
|
|
||||||
sqlalchemy==2.0.36
|
sqlalchemy==2.0.36
|
||||||
# via alembic
|
# via alembic
|
||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via mlflow
|
# via mlflow
|
||||||
# via sqlmodel
|
|
||||||
sqlmodel==0.0.22
|
|
||||||
sqlparse==0.5.1
|
sqlparse==0.5.1
|
||||||
# via mlflow-skinny
|
# via mlflow-skinny
|
||||||
starlette==0.37.2
|
starlette==0.37.2
|
||||||
# via chainlit
|
# via chainlit
|
||||||
# via fastapi
|
# via fastapi
|
||||||
striprtf==0.0.26
|
|
||||||
# via llama-index-readers-file
|
|
||||||
sympy==1.13.3
|
|
||||||
# via onnxruntime
|
|
||||||
# via torch
|
|
||||||
syncer==2.0.3
|
syncer==2.0.3
|
||||||
# via chainlit
|
# via chainlit
|
||||||
tabulate==0.9.0
|
tabulate==0.9.0
|
||||||
# via langchain-cohere
|
# via langchain-cohere
|
||||||
# via unstructured
|
|
||||||
tblib==3.0.0
|
tblib==3.0.0
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
tenacity==8.5.0
|
tenacity==8.5.0
|
||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via langchain-core
|
# via langchain-core
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
termcolor==2.5.0
|
|
||||||
# via fire
|
|
||||||
threadpoolctl==3.5.0
|
threadpoolctl==3.5.0
|
||||||
# via scikit-learn
|
# via scikit-learn
|
||||||
tiktoken==0.8.0
|
tiktoken==0.8.0
|
||||||
# via langchain-openai
|
# via langchain-openai
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
timm==1.0.11
|
|
||||||
# via effdet
|
|
||||||
# via unstructured-inference
|
|
||||||
tokenizers==0.20.1
|
tokenizers==0.20.1
|
||||||
# via anthropic
|
# via anthropic
|
||||||
# via cohere
|
# via cohere
|
||||||
# via transformers
|
# via transformers
|
||||||
tomli==2.0.2
|
tomli==2.0.2
|
||||||
# via chainlit
|
# via chainlit
|
||||||
torch==2.4.1
|
|
||||||
# via effdet
|
|
||||||
# via timm
|
|
||||||
# via torchvision
|
|
||||||
# via unstructured-inference
|
|
||||||
torchvision==0.19.1
|
|
||||||
# via effdet
|
|
||||||
# via timm
|
|
||||||
tqdm==4.66.5
|
tqdm==4.66.5
|
||||||
# via huggingface-hub
|
# via huggingface-hub
|
||||||
# via iopath
|
|
||||||
# via llama-index-core
|
|
||||||
# via nltk
|
|
||||||
# via openai
|
# via openai
|
||||||
# via sagemaker
|
# via sagemaker
|
||||||
# via transformers
|
# via transformers
|
||||||
# via unstructured
|
|
||||||
transformers==4.45.2
|
transformers==4.45.2
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
# via unstructured-inference
|
|
||||||
triton==3.0.0
|
|
||||||
# via torch
|
|
||||||
types-pyyaml==6.0.12.20240917
|
types-pyyaml==6.0.12.20240917
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
types-requests==2.32.0.20241016
|
types-requests==2.32.0.20241016
|
||||||
@ -879,37 +526,17 @@ typing-extensions==4.12.2
|
|||||||
# via cohere
|
# via cohere
|
||||||
# via fastapi
|
# via fastapi
|
||||||
# via huggingface-hub
|
# via huggingface-hub
|
||||||
# via iopath
|
|
||||||
# via langchain-core
|
# via langchain-core
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via openai
|
# via openai
|
||||||
# via opentelemetry-sdk
|
# via opentelemetry-sdk
|
||||||
# via pydantic
|
# via pydantic
|
||||||
# via pydantic-core
|
# via pydantic-core
|
||||||
# via python-docx
|
|
||||||
# via python-oxmsg
|
|
||||||
# via python-pptx
|
|
||||||
# via sqlalchemy
|
# via sqlalchemy
|
||||||
# via torch
|
|
||||||
# via typing-inspect
|
# via typing-inspect
|
||||||
# via unstructured
|
|
||||||
typing-inspect==0.9.0
|
typing-inspect==0.9.0
|
||||||
# via dataclasses-json
|
# via dataclasses-json
|
||||||
# via llama-index-core
|
|
||||||
# via llama-index-legacy
|
|
||||||
# via unstructured-client
|
|
||||||
tzdata==2024.2
|
tzdata==2024.2
|
||||||
# via pandas
|
# via pandas
|
||||||
unstructured==0.15.14
|
|
||||||
# via megaparse
|
|
||||||
# via quivr-core
|
|
||||||
unstructured-client==0.26.1
|
|
||||||
# via unstructured
|
|
||||||
unstructured-inference==0.7.36
|
|
||||||
# via unstructured
|
|
||||||
unstructured-pytesseract==0.3.13
|
|
||||||
# via unstructured
|
|
||||||
uptrace==1.27.0
|
uptrace==1.27.0
|
||||||
# via chainlit
|
# via chainlit
|
||||||
urllib3==2.2.3
|
urllib3==2.2.3
|
||||||
@ -926,15 +553,9 @@ werkzeug==3.0.4
|
|||||||
# via flask
|
# via flask
|
||||||
wrapt==1.16.0
|
wrapt==1.16.0
|
||||||
# via deprecated
|
# via deprecated
|
||||||
# via llama-index-core
|
|
||||||
# via opentelemetry-instrumentation
|
# via opentelemetry-instrumentation
|
||||||
# via unstructured
|
|
||||||
wsproto==1.2.0
|
wsproto==1.2.0
|
||||||
# via simple-websocket
|
# via simple-websocket
|
||||||
xlrd==2.0.1
|
|
||||||
# via unstructured
|
|
||||||
xlsxwriter==3.2.0
|
|
||||||
# via python-pptx
|
|
||||||
yarl==1.15.4
|
yarl==1.15.4
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
zipp==3.20.2
|
zipp==3.20.2
|
||||||
|
@ -20,13 +20,13 @@ aiosignal==1.3.1
|
|||||||
# via aiohttp
|
# via aiohttp
|
||||||
annotated-types==0.7.0
|
annotated-types==0.7.0
|
||||||
# via pydantic
|
# via pydantic
|
||||||
anthropic==0.36.1
|
anthropic==0.36.2
|
||||||
# via langchain-anthropic
|
# via langchain-anthropic
|
||||||
anyio==4.6.2.post1
|
anyio==4.6.2.post1
|
||||||
# via anthropic
|
# via anthropic
|
||||||
# via httpx
|
# via httpx
|
||||||
# via openai
|
# via openai
|
||||||
attrs==23.2.0
|
attrs==24.2.0
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
certifi==2024.8.30
|
certifi==2024.8.30
|
||||||
# via httpcore
|
# via httpcore
|
||||||
@ -53,7 +53,7 @@ filelock==3.16.1
|
|||||||
frozenlist==1.4.1
|
frozenlist==1.4.1
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
# via aiosignal
|
# via aiosignal
|
||||||
fsspec==2024.9.0
|
fsspec==2024.10.0
|
||||||
# via huggingface-hub
|
# via huggingface-hub
|
||||||
greenlet==3.1.1
|
greenlet==3.1.1
|
||||||
# via sqlalchemy
|
# via sqlalchemy
|
||||||
@ -71,7 +71,7 @@ httpx==0.27.2
|
|||||||
httpx-sse==0.4.0
|
httpx-sse==0.4.0
|
||||||
# via cohere
|
# via cohere
|
||||||
# via langgraph-sdk
|
# via langgraph-sdk
|
||||||
huggingface-hub==0.25.2
|
huggingface-hub==0.26.1
|
||||||
# via tokenizers
|
# via tokenizers
|
||||||
# via transformers
|
# via transformers
|
||||||
idna==3.10
|
idna==3.10
|
||||||
@ -113,19 +113,19 @@ langchain-openai==0.1.25
|
|||||||
# via quivr-core
|
# via quivr-core
|
||||||
langchain-text-splitters==0.2.4
|
langchain-text-splitters==0.2.4
|
||||||
# via langchain
|
# via langchain
|
||||||
langgraph==0.2.38
|
langgraph==0.2.39
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
langgraph-checkpoint==2.0.1
|
langgraph-checkpoint==2.0.1
|
||||||
# via langgraph
|
# via langgraph
|
||||||
langgraph-sdk==0.1.33
|
langgraph-sdk==0.1.33
|
||||||
# via langgraph
|
# via langgraph
|
||||||
langsmith==0.1.135
|
langsmith==0.1.136
|
||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via langchain-core
|
# via langchain-core
|
||||||
markdown-it-py==3.0.0
|
markdown-it-py==3.0.0
|
||||||
# via rich
|
# via rich
|
||||||
marshmallow==3.22.0
|
marshmallow==3.23.0
|
||||||
# via dataclasses-json
|
# via dataclasses-json
|
||||||
mdurl==0.1.2
|
mdurl==0.1.2
|
||||||
# via markdown-it-py
|
# via markdown-it-py
|
||||||
@ -144,7 +144,7 @@ numpy==1.26.4
|
|||||||
# via transformers
|
# via transformers
|
||||||
openai==1.52.0
|
openai==1.52.0
|
||||||
# via langchain-openai
|
# via langchain-openai
|
||||||
orjson==3.10.7
|
orjson==3.10.9
|
||||||
# via langgraph-sdk
|
# via langgraph-sdk
|
||||||
# via langsmith
|
# via langsmith
|
||||||
packaging==24.1
|
packaging==24.1
|
||||||
@ -159,7 +159,7 @@ parameterized==0.9.0
|
|||||||
# via cohere
|
# via cohere
|
||||||
propcache==0.2.0
|
propcache==0.2.0
|
||||||
# via yarl
|
# via yarl
|
||||||
protobuf==4.25.5
|
protobuf==5.28.2
|
||||||
# via transformers
|
# via transformers
|
||||||
pydantic==2.9.2
|
pydantic==2.9.2
|
||||||
# via anthropic
|
# via anthropic
|
||||||
@ -255,5 +255,5 @@ tzdata==2024.2
|
|||||||
urllib3==2.2.3
|
urllib3==2.2.3
|
||||||
# via requests
|
# via requests
|
||||||
# via types-requests
|
# via types-requests
|
||||||
yarl==1.15.4
|
yarl==1.15.5
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
|
@ -20,13 +20,13 @@ aiosignal==1.3.1
|
|||||||
# via aiohttp
|
# via aiohttp
|
||||||
annotated-types==0.7.0
|
annotated-types==0.7.0
|
||||||
# via pydantic
|
# via pydantic
|
||||||
anthropic==0.36.1
|
anthropic==0.36.2
|
||||||
# via langchain-anthropic
|
# via langchain-anthropic
|
||||||
anyio==4.6.2.post1
|
anyio==4.6.2.post1
|
||||||
# via anthropic
|
# via anthropic
|
||||||
# via httpx
|
# via httpx
|
||||||
# via openai
|
# via openai
|
||||||
attrs==23.2.0
|
attrs==24.2.0
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
certifi==2024.8.30
|
certifi==2024.8.30
|
||||||
# via httpcore
|
# via httpcore
|
||||||
@ -53,7 +53,7 @@ filelock==3.16.1
|
|||||||
frozenlist==1.4.1
|
frozenlist==1.4.1
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
# via aiosignal
|
# via aiosignal
|
||||||
fsspec==2024.9.0
|
fsspec==2024.10.0
|
||||||
# via huggingface-hub
|
# via huggingface-hub
|
||||||
greenlet==3.1.1
|
greenlet==3.1.1
|
||||||
# via sqlalchemy
|
# via sqlalchemy
|
||||||
@ -71,7 +71,7 @@ httpx==0.27.2
|
|||||||
httpx-sse==0.4.0
|
httpx-sse==0.4.0
|
||||||
# via cohere
|
# via cohere
|
||||||
# via langgraph-sdk
|
# via langgraph-sdk
|
||||||
huggingface-hub==0.25.2
|
huggingface-hub==0.26.1
|
||||||
# via tokenizers
|
# via tokenizers
|
||||||
# via transformers
|
# via transformers
|
||||||
idna==3.10
|
idna==3.10
|
||||||
@ -113,19 +113,19 @@ langchain-openai==0.1.25
|
|||||||
# via quivr-core
|
# via quivr-core
|
||||||
langchain-text-splitters==0.2.4
|
langchain-text-splitters==0.2.4
|
||||||
# via langchain
|
# via langchain
|
||||||
langgraph==0.2.38
|
langgraph==0.2.39
|
||||||
# via quivr-core
|
# via quivr-core
|
||||||
langgraph-checkpoint==2.0.1
|
langgraph-checkpoint==2.0.1
|
||||||
# via langgraph
|
# via langgraph
|
||||||
langgraph-sdk==0.1.33
|
langgraph-sdk==0.1.33
|
||||||
# via langgraph
|
# via langgraph
|
||||||
langsmith==0.1.135
|
langsmith==0.1.136
|
||||||
# via langchain
|
# via langchain
|
||||||
# via langchain-community
|
# via langchain-community
|
||||||
# via langchain-core
|
# via langchain-core
|
||||||
markdown-it-py==3.0.0
|
markdown-it-py==3.0.0
|
||||||
# via rich
|
# via rich
|
||||||
marshmallow==3.22.0
|
marshmallow==3.23.0
|
||||||
# via dataclasses-json
|
# via dataclasses-json
|
||||||
mdurl==0.1.2
|
mdurl==0.1.2
|
||||||
# via markdown-it-py
|
# via markdown-it-py
|
||||||
@ -144,7 +144,7 @@ numpy==1.26.4
|
|||||||
# via transformers
|
# via transformers
|
||||||
openai==1.52.0
|
openai==1.52.0
|
||||||
# via langchain-openai
|
# via langchain-openai
|
||||||
orjson==3.10.7
|
orjson==3.10.9
|
||||||
# via langgraph-sdk
|
# via langgraph-sdk
|
||||||
# via langsmith
|
# via langsmith
|
||||||
packaging==24.1
|
packaging==24.1
|
||||||
@ -159,7 +159,7 @@ parameterized==0.9.0
|
|||||||
# via cohere
|
# via cohere
|
||||||
propcache==0.2.0
|
propcache==0.2.0
|
||||||
# via yarl
|
# via yarl
|
||||||
protobuf==4.25.5
|
protobuf==5.28.2
|
||||||
# via transformers
|
# via transformers
|
||||||
pydantic==2.9.2
|
pydantic==2.9.2
|
||||||
# via anthropic
|
# via anthropic
|
||||||
@ -255,5 +255,5 @@ tzdata==2024.2
|
|||||||
urllib3==2.2.3
|
urllib3==2.2.3
|
||||||
# via requests
|
# via requests
|
||||||
# via types-requests
|
# via types-requests
|
||||||
yarl==1.15.4
|
yarl==1.15.5
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
|
@ -15,4 +15,4 @@ if __name__ == "__main__":
|
|||||||
answer = brain.ask(
|
answer = brain.ask(
|
||||||
"what is gold? asnwer in french"
|
"what is gold? asnwer in french"
|
||||||
)
|
)
|
||||||
print("answer QuivrQARAGLangGraph :", answer)
|
print("answer QuivrQARAGLangGraph :", answer.answer)
|
||||||
|
Loading…
Reference in New Issue
Block a user