fix(assistant): summary now can output 2000 tokens

This commit is contained in:
Stan Girard 2024-04-17 17:26:46 +02:00
parent 3400b8633f
commit 4412ecde83

View File

@ -85,7 +85,7 @@ class SummaryAssistant(ITO):
data = loader.load()
llm = ChatLiteLLM(model="gpt-3.5-turbo")
llm = ChatLiteLLM(model="gpt-3.5-turbo", max_tokens=2000)
map_template = """The following is one document to summarize that has been split into multiple sections:
{docs}