mirror of
https://github.com/OpenBMB/ChatDev.git
synced 2024-09-18 21:57:07 +03:00
fix multi-turn token exceed problem
This commit is contained in:
parent
501ab6330f
commit
b8db82a2f8
@ -51,7 +51,7 @@ class OpenAIModel(ModelBackend):
|
||||
string = "\n".join([message["content"] for message in kwargs["messages"]])
|
||||
encoding = tiktoken.encoding_for_model(self.model_type.value)
|
||||
num_prompt_tokens = len(encoding.encode(string))
|
||||
gap_between_send_receive = 50 # known issue
|
||||
gap_between_send_receive = 15 * len(kwargs["messages"])
|
||||
num_prompt_tokens += gap_between_send_receive
|
||||
|
||||
num_max_token_map = {
|
||||
|
Loading…
Reference in New Issue
Block a user