镜像自地址
https://github.com/binary-husky/gpt_academic.git
已同步 2025-12-06 14:36:48 +00:00
add can_multi_thread
这个提交包含在:
@@ -282,6 +282,7 @@ model_info = {
|
|||||||
"fn_with_ui": chatgpt_ui,
|
"fn_with_ui": chatgpt_ui,
|
||||||
"fn_without_ui": chatgpt_noui,
|
"fn_without_ui": chatgpt_noui,
|
||||||
"endpoint": openai_endpoint,
|
"endpoint": openai_endpoint,
|
||||||
|
"can_multi_thread": True,
|
||||||
"max_token": 128000,
|
"max_token": 128000,
|
||||||
"tokenizer": tokenizer_gpt4,
|
"tokenizer": tokenizer_gpt4,
|
||||||
"token_cnt": get_token_num_gpt4,
|
"token_cnt": get_token_num_gpt4,
|
||||||
@@ -351,6 +352,7 @@ model_info = {
|
|||||||
"fn_with_ui": chatgpt_ui,
|
"fn_with_ui": chatgpt_ui,
|
||||||
"fn_without_ui": chatgpt_noui,
|
"fn_without_ui": chatgpt_noui,
|
||||||
"has_multimodal_capacity": True,
|
"has_multimodal_capacity": True,
|
||||||
|
"can_multi_thread": True,
|
||||||
"endpoint": openai_endpoint,
|
"endpoint": openai_endpoint,
|
||||||
"max_token": 828000,
|
"max_token": 828000,
|
||||||
"tokenizer": tokenizer_gpt4,
|
"tokenizer": tokenizer_gpt4,
|
||||||
|
|||||||
在新工单中引用
屏蔽一个用户