2024-10-31 19:57:54 +03:00
|
|
|
from quivr_core.rag.entities.config import LLMEndpointConfig, RetrievalConfig
|
2024-07-09 18:55:14 +03:00
|
|
|
|
|
|
|
|
|
|
|
def test_default_llm_config():
|
|
|
|
config = LLMEndpointConfig()
|
|
|
|
|
2024-10-31 19:57:54 +03:00
|
|
|
assert (
|
|
|
|
config.model_dump()
|
|
|
|
== LLMEndpointConfig(
|
|
|
|
model="gpt-4o",
|
|
|
|
llm_base_url=None,
|
|
|
|
llm_api_key=None,
|
|
|
|
max_context_tokens=2000,
|
|
|
|
max_output_tokens=2000,
|
|
|
|
temperature=0.7,
|
|
|
|
streaming=True,
|
|
|
|
).model_dump()
|
|
|
|
)
|
2024-07-09 18:55:14 +03:00
|
|
|
|
|
|
|
|
2024-09-23 19:11:06 +03:00
|
|
|
def test_default_retrievalconfig():
|
|
|
|
config = RetrievalConfig()
|
2024-07-09 18:55:14 +03:00
|
|
|
|
|
|
|
assert config.max_files == 20
|
|
|
|
assert config.prompt is None
|
2024-10-31 19:57:54 +03:00
|
|
|
print("\n\n", config.llm_config, "\n\n")
|
|
|
|
print("\n\n", LLMEndpointConfig(), "\n\n")
|
|
|
|
assert config.llm_config == LLMEndpointConfig()
|