Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ LLM 大模型是用来字幕段句、字幕优化、以及字幕翻译(如果
| -------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- |
| SiliconCloud | [SiliconCloud 官网](https://cloud.siliconflow.cn/i/onCHcaDx)配置方法请参考[配置文档](./docs/llm_config.md)<br>该并发较低,建议把线程设置为5以下。 |
| DeepSeek | [DeepSeek 官网](https://platform.deepseek.com),建议使用 `deepseek-v3` 模型,<br>官方网站最近服务好像并不太稳定。 |
| ModelScope | [ModelScope 官网](https://modelscope.cn/models?filter=inference_type&page=1&tabKey=task)配置方法请参考[配置文档](https://modelscope.cn/docs/model-service/API-Inference/intro)<br>该并发较低,建议把线程设置为5以下。 |
| OpenAI兼容接口 | 如果有其他服务商的API,可直接在软件中填写。base_url 和api_key [VideoCaptioner API](https://api.videocaptioner.cn) |

注:如果用的 API 服务商不支持高并发,请在软件设置中将“线程数”调低,避免请求错误。
Expand Down
13 changes: 13 additions & 0 deletions app/common/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,19 @@ class Config(QConfig):
"LLM", "ChatGLM_API_Base", "https://open.bigmodel.cn/api/paas/v4"
)

modelscope_model = ConfigItem("LLM", "ModelScope_Model", "Qwen/Qwen3-8B")
modelscope_api_key = ConfigItem("LLM", "ModelScope_API_Key", "")
modelscope_api_base = ConfigItem(
"LLM", "ModelScope_API_Base", "https://api-inference.modelscope.cn/v1"
)
modelscope_extra_body = ConfigItem(
"LLM",
"ModelScope_Extra_Body",
{
"enable_thinking": False,
},
)

# ------------------- 翻译配置 -------------------
translator_service = OptionsConfigItem(
"Translate",
Expand Down
4 changes: 3 additions & 1 deletion app/core/entities.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import datetime
from dataclasses import dataclass, field
from enum import Enum
from typing import TYPE_CHECKING, Literal, Optional
from typing import TYPE_CHECKING, Literal, Optional, Dict, Any

if TYPE_CHECKING:
from app.core.translate.types import TargetLanguage
Expand Down Expand Up @@ -105,6 +105,7 @@ class LLMServiceEnum(Enum):
LM_STUDIO = "LM Studio"
GEMINI = "Gemini"
CHATGLM = "ChatGLM"
MODELSCOPE = "ModelScope"


class TranscribeModelEnum(Enum):
Expand Down Expand Up @@ -558,6 +559,7 @@ class SubtitleConfig:
base_url: Optional[str] = None
api_key: Optional[str] = None
llm_model: Optional[str] = None
extra_body: Optional[Dict[str, Any]] = None
deeplx_endpoint: Optional[str] = None
# 翻译服务
translator_service: Optional[TranslatorServiceEnum] = None
Expand Down
7 changes: 7 additions & 0 deletions app/core/task_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ def create_subtitle_task(
Path(file_path).parent / f"【字幕】{output_name}{suffix}.srt"
)

extra_body = None # 默认值为None
# 根据当前选择的LLM服务获取对应的配置
current_service = cfg.llm_service.value
if current_service == LLMServiceEnum.OPENAI:
Expand Down Expand Up @@ -142,6 +143,11 @@ def create_subtitle_task(
base_url = cfg.chatglm_api_base.value
api_key = cfg.chatglm_api_key.value
llm_model = cfg.chatglm_model.value
elif current_service == LLMServiceEnum.MODELSCOPE:
base_url = cfg.modelscope_api_base.value
api_key = cfg.modelscope_api_key.value
llm_model = cfg.modelscope_model.value
extra_body = cfg.modelscope_extra_body.value
else:
base_url = ""
api_key = ""
Expand All @@ -152,6 +158,7 @@ def create_subtitle_task(
base_url=base_url,
api_key=api_key,
llm_model=llm_model,
extra_body=extra_body,
deeplx_endpoint=cfg.deeplx_endpoint.value,
# 翻译服务
translator_service=cfg.translator_service.value,
Expand Down
8 changes: 8 additions & 0 deletions app/view/setting_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,6 +353,14 @@ def __createLLMServiceCards(self):
"default_base": "https://open.bigmodel.cn/api/paas/v4",
"default_models": ["glm-4-plus", "glm-4-air-250414", "glm-4-flash"],
},
LLMServiceEnum.MODELSCOPE: {
"prefix": "modelscope",
"api_key_cfg": cfg.modelscope_api_key,
"api_base_cfg": cfg.modelscope_api_base,
"model_cfg": cfg.modelscope_model,
"default_base": "https://api-inference.modelscope.cn/v1",
"default_models": ["Qwen/Qwen3-8B", "Qwen/Qwen3-30B-A3B-Instruct-2507", "deepseek-ai/DeepSeek-V3.1"],
Copy link

Copilot AI Nov 28, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The model name "Qwen/Qwen3-30B-A3B-Instruct-2507" appears to be non-standard. Based on Qwen's naming conventions, this should likely be verified:

  • Standard Qwen3 models follow patterns like "Qwen/Qwen2.5-72B-Instruct" or "Qwen/Qwen3-8B"
  • The "A3B" designation is unusual and may be a typo or placeholder
  • The "-2507" suffix (likely representing July 2025) seems far in the future

Please verify this model name exists in ModelScope's model registry, or update to a valid model name like "Qwen/Qwen2.5-32B-Instruct" if this was intended.

Suggested change
"default_models": ["Qwen/Qwen3-8B", "Qwen/Qwen3-30B-A3B-Instruct-2507", "deepseek-ai/DeepSeek-V3.1"],
"default_models": ["Qwen/Qwen3-8B", "Qwen/Qwen2.5-32B-Instruct", "deepseek-ai/DeepSeek-V3.1"],

Copilot uses AI. Check for mistakes.
Copy link

Copilot AI Nov 28, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The model name "deepseek-ai/DeepSeek-V3.1" may be incorrect. Based on DeepSeek's release history and the documentation in this repository (which references "deepseek-ai/DeepSeek-V3"), the current latest version is V3, not V3.1.

Please verify this model exists in ModelScope's registry. If it doesn't exist, consider using "deepseek-ai/DeepSeek-V3" instead, which is the verified model name used in the documentation.

Suggested change
"default_models": ["Qwen/Qwen3-8B", "Qwen/Qwen3-30B-A3B-Instruct-2507", "deepseek-ai/DeepSeek-V3.1"],
"default_models": ["Qwen/Qwen3-8B", "Qwen/Qwen3-30B-A3B-Instruct-2507", "deepseek-ai/DeepSeek-V3"],

Copilot uses AI. Check for mistakes.
},
}

# 创建服务配置映射
Expand Down