update backend model name&price

This commit is contained in:
thinkwee 2024-05-07 15:34:52 +08:00
parent bbb145048e
commit 38efd452f7
4 changed files with 12 additions and 14 deletions

View File

@ -90,8 +90,7 @@ class OpenAIModel(ModelBackend):
"gpt-4": 8192,
"gpt-4-0613": 8192,
"gpt-4-32k": 32768,
"gpt-4-1106-preview": 4096,
"gpt-4-1106-vision-preview": 4096,
"gpt-4-turbo": 100000,
}
num_max_token = num_max_token_map[self.model_type.value]
num_max_completion_tokens = num_max_token - num_prompt_tokens
@ -122,6 +121,7 @@ class OpenAIModel(ModelBackend):
"gpt-4": 8192,
"gpt-4-0613": 8192,
"gpt-4-32k": 32768,
"gpt-4-turbo": 100000,
}
num_max_token = num_max_token_map[self.model_type.value]
num_max_completion_tokens = num_max_token - num_prompt_tokens

View File

@ -48,8 +48,8 @@ class ModelType(Enum):
GPT_3_5_TURBO_NEW = "gpt-3.5-turbo-16k"
GPT_4 = "gpt-4"
GPT_4_32k = "gpt-4-32k"
GPT_4_TURBO = "gpt-4-1106-preview"
GPT_4_TURBO_V = "gpt-4-1106-vision-preview"
GPT_4_TURBO = "gpt-4-turbo"
GPT_4_TURBO_V = "gpt-4-turbo"
STUB = "stub"

View File

@ -5,27 +5,25 @@ import numpy as np
def prompt_cost(model_type: str, num_prompt_tokens: float, num_completion_tokens: float):
input_cost_map = {
"gpt-3.5-turbo": 0.0015,
"gpt-3.5-turbo": 0.0005,
"gpt-3.5-turbo-16k": 0.003,
"gpt-3.5-turbo-0613": 0.0015,
"gpt-3.5-turbo-16k-0613": 0.003,
"gpt-4": 0.03,
"gpt-4-0613": 0.03,
"gpt-4-32k": 0.06,
"gpt-4-1106-preview": 0.01,
"gpt-4-1106-vision-preview": 0.01,
"gpt-4-turbo": 0.01,
}
output_cost_map = {
"gpt-3.5-turbo": 0.002,
"gpt-3.5-turbo": 0.0015,
"gpt-3.5-turbo-16k": 0.004,
"gpt-3.5-turbo-0613": 0.002,
"gpt-3.5-turbo-16k-0613": 0.004,
"gpt-4": 0.06,
"gpt-4-0613": 0.06,
"gpt-4-32k": 0.12,
"gpt-4-1106-preview": 0.03,
"gpt-4-1106-vision-preview": 0.03,
"gpt-4-turbo": 0.03,
}
if model_type not in input_cost_map or model_type not in output_cost_map:
@ -112,7 +110,7 @@ def get_info(dir, log_filepath):
elif model_type == "GPT_4_32k":
model_type = "gpt-4-32k"
elif model_type == "GPT_4_TURBO":
model_type = "gpt-4-1106-preview"
model_type = "gpt-4-turbo"
# print("model_type:", model_type)
lines = open(log_filepath, "r", encoding="utf8").read().split("\n")

6
run.py
View File

@ -79,7 +79,7 @@ parser.add_argument('--task', type=str, default="Develop a basic Gomoku game.",
parser.add_argument('--name', type=str, default="Gomoku",
help="Name of software, your software will be generated in WareHouse/name_org_timestamp")
parser.add_argument('--model', type=str, default="GPT_3_5_TURBO",
help="GPT Model, choose from {'GPT_3_5_TURBO','GPT_4','GPT_4_32K', 'GPT_4_TURBO'}")
help="GPT Model, choose from {'GPT_3_5_TURBO', 'GPT_4', 'GPT_4_TURBO'}")
parser.add_argument('--path', type=str, default="",
help="Your file directory, ChatDev will build upon your software in the Incremental mode")
args = parser.parse_args()
@ -92,9 +92,9 @@ args = parser.parse_args()
config_path, config_phase_path, config_role_path = get_config(args.config)
args2type = {'GPT_3_5_TURBO': ModelType.GPT_3_5_TURBO,
'GPT_4': ModelType.GPT_4,
'GPT_4_32K': ModelType.GPT_4_32k,
# 'GPT_4_32K': ModelType.GPT_4_32k,
'GPT_4_TURBO': ModelType.GPT_4_TURBO,
'GPT_4_TURBO_V': ModelType.GPT_4_TURBO_V
# 'GPT_4_TURBO_V': ModelType.GPT_4_TURBO_V
}
if openai_new_api:
args2type['GPT_3_5_TURBO'] = ModelType.GPT_3_5_TURBO_NEW