Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 31 additions & 13 deletions backend/api/v1/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ class SettingsRequest(BaseModel):
llm_provider: Optional[str] = None
dashscope_api_key: Optional[str] = None
openai_api_key: Optional[str] = None
openai_base_url: Optional[str] = None # 支持自定义URL,用于本地模型
gemini_api_key: Optional[str] = None
siliconflow_api_key: Optional[str] = None
model_name: Optional[str] = None
Expand All @@ -29,12 +30,17 @@ class ApiKeyTestRequest(BaseModel):
provider: str
api_key: str
model_name: str
base_url: Optional[str] = None # 支持自定义URL

class ApiKeyTestResponse(BaseModel):
"""API密钥测试响应"""
success: bool
error: Optional[str] = None

class FetchModelsRequest(BaseModel):
"""获取模型列表请求"""
base_url: str

def get_settings_file_path() -> Path:
"""获取设置文件路径"""
from ...core.path_utils import get_settings_file_path as get_settings_path
Expand All @@ -47,6 +53,7 @@ def load_settings() -> Dict[str, Any]:
"llm_provider": "dashscope",
"dashscope_api_key": "",
"openai_api_key": "",
"openai_base_url": "", # 支持自定义URL,用于本地模型
"gemini_api_key": "",
"siliconflow_api_key": "",
"model_name": "qwen-plus",
Expand Down Expand Up @@ -104,6 +111,9 @@ async def update_settings(request: SettingsRequest):

if request.openai_api_key is not None:
settings["openai_api_key"] = request.openai_api_key

if request.openai_base_url is not None:
settings["openai_base_url"] = request.openai_base_url

if request.gemini_api_key is not None:
settings["gemini_api_key"] = request.gemini_api_key
Expand Down Expand Up @@ -143,24 +153,32 @@ async def test_api_key(request: ApiKeyTestRequest) -> ApiKeyTestResponse:
"""测试API密钥"""
try:
# 导入LLM管理器
from ...core.llm_manager import get_llm_manager
from ...core.llm_providers import ProviderType

from ...core.llm_providers import ProviderType, LLMProviderFactory

# 验证提供商类型
try:
provider_type = ProviderType(request.provider)
except ValueError:
return ApiKeyTestResponse(success=False, error=f"不支持的提供商类型: {request.provider}")

# 测试连接
llm_manager = get_llm_manager()
success = llm_manager.test_provider_connection(provider_type, request.api_key, request.model_name)

if success:
return ApiKeyTestResponse(success=True)
else:
return ApiKeyTestResponse(success=False, error="API连接测试失败")


# 构建额外参数
extra_kwargs = {}
if provider_type == ProviderType.OPENAI and request.base_url:
extra_kwargs["base_url"] = request.base_url

# 创建提供商并测试连接
try:
provider = LLMProviderFactory.create_provider(
provider_type, request.api_key, request.model_name, **extra_kwargs
)
success = provider.test_connection()
if success:
return ApiKeyTestResponse(success=True)
else:
return ApiKeyTestResponse(success=False, error="API连接测试失败")
except Exception as e:
return ApiKeyTestResponse(success=False, error=str(e))

except Exception as e:
return ApiKeyTestResponse(success=False, error=str(e))

Expand Down
47 changes: 34 additions & 13 deletions backend/core/llm_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def _load_settings(self) -> Dict[str, Any]:
"llm_provider": "dashscope",
"dashscope_api_key": "",
"openai_api_key": "",
"openai_base_url": "", # 支持自定义URL,用于本地模型
"gemini_api_key": "",
"siliconflow_api_key": "",
"model_name": "qwen-plus",
Expand Down Expand Up @@ -68,18 +69,28 @@ def _initialize_provider(self):
try:
provider_type = ProviderType(self.settings.get("llm_provider", "dashscope"))
model_name = self.settings.get("model_name", "qwen-plus")

# 获取对应提供商的API密钥
api_key = self._get_api_key_for_provider(provider_type)


# OpenAI 支持本地模型,API Key 可以为空
extra_kwargs = {}
if provider_type == ProviderType.OPENAI:
base_url = self.settings.get("openai_base_url", "")
if base_url:
extra_kwargs["base_url"] = base_url
# 本地模型不需要真正的 API Key,使用默认值
if not api_key:
api_key = "local-model"

if api_key:
self.current_provider = LLMProviderFactory.create_provider(
provider_type, api_key, model_name
provider_type, api_key, model_name, **extra_kwargs
)
logger.info(f"已初始化{provider_type.value}提供商,模型: {model_name}")
else:
logger.warning(f"未找到{provider_type.value}的API密钥")

except Exception as e:
logger.error(f"初始化提供商失败: {e}")
self.current_provider = None
Expand All @@ -104,36 +115,46 @@ def update_settings(self, new_settings: Dict[str, Any]):
self._save_settings()
self._initialize_provider()

def set_provider(self, provider_type: ProviderType, api_key: str, model_name: str):
def set_provider(self, provider_type: ProviderType, api_key: str, model_name: str, base_url: str = None):
"""设置提供商"""
try:
# 更新设置
provider_settings = {
"llm_provider": provider_type.value,
"model_name": model_name
}

# 更新对应提供商的API密钥
key_mapping = {
ProviderType.DASHSCOPE: "dashscope_api_key",
ProviderType.OPENAI: "openai_api_key",
ProviderType.GEMINI: "gemini_api_key",
ProviderType.SILICONFLOW: "siliconflow_api_key",
}

key_name = key_mapping.get(provider_type)
if key_name:
provider_settings[key_name] = api_key

provider_settings[key_name] = api_key or ""

# OpenAI 支持自定义 base_url
extra_kwargs = {}
if provider_type == ProviderType.OPENAI:
provider_settings["openai_base_url"] = base_url or ""
if base_url:
extra_kwargs["base_url"] = base_url
# 本地模型不需要真正的 API Key
if not api_key:
api_key = "local-model"

self.update_settings(provider_settings)

# 创建新的提供商实例
self.current_provider = LLMProviderFactory.create_provider(
provider_type, api_key, model_name
provider_type, api_key or "local-model", model_name, **extra_kwargs
)

logger.info(f"已切换到{provider_type.value}提供商,模型: {model_name}")

except Exception as e:
logger.error(f"设置提供商失败: {e}")
raise
Expand Down
13 changes: 9 additions & 4 deletions backend/core/llm_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,13 +175,18 @@ def get_available_models(self) -> List[ModelInfo]:
]

class OpenAIProvider(LLMProvider):
"""OpenAI提供商"""
def __init__(self, api_key: str, model_name: str = "gpt-3.5-turbo", **kwargs):
"""OpenAI提供商 - 支持自定义base_url以使用本地模型"""

def __init__(self, api_key: str, model_name: str = "gpt-3.5-turbo", base_url: str = None, **kwargs):
super().__init__(api_key, model_name, **kwargs)
self.base_url = base_url
try:
import openai
self.client = openai.OpenAI(api_key=api_key)
# 支持自定义base_url,用于本地模型(如Ollama、vLLM等)
if base_url:
self.client = openai.OpenAI(api_key=api_key, base_url=base_url)
else:
self.client = openai.OpenAI(api_key=api_key)
except ImportError:
raise ImportError("请安装openai: pip install openai")

Expand Down
1 change: 1 addition & 0 deletions backend/services/simple_progress.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
基于你提出的"做笨做稳"方案
"""

import os
import time
import json
import logging
Expand Down
72 changes: 56 additions & 16 deletions frontend/src/pages/SettingsPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -25,31 +25,37 @@ const SettingsPage: React.FC = () => {
color: '#1890ff',
description: '阿里云通义千问大模型服务',
apiKeyField: 'dashscope_api_key',
placeholder: '请输入通义千问API密钥'
placeholder: '请输入通义千问API密钥',
supportsCustomUrl: false
},
openai: {
name: 'OpenAI',
icon: <RobotOutlined />,
color: '#52c41a',
description: 'OpenAI GPT系列模型',
description: 'OpenAI GPT系列模型(支持本地模型)',
apiKeyField: 'openai_api_key',
placeholder: '请输入OpenAI API密钥'
placeholder: '请输入OpenAI API密钥',
supportsCustomUrl: true,
baseUrlField: 'openai_base_url',
baseUrlPlaceholder: '可选,如 http://localhost:11434/v1'
},
gemini: {
name: 'Google Gemini',
icon: <RobotOutlined />,
color: '#faad14',
description: 'Google Gemini大模型',
apiKeyField: 'gemini_api_key',
placeholder: '请输入Gemini API密钥'
placeholder: '请输入Gemini API密钥',
supportsCustomUrl: false
},
siliconflow: {
name: '硅基流动',
icon: <RobotOutlined />,
color: '#722ed1',
description: '硅基流动模型服务',
apiKeyField: 'siliconflow_api_key',
placeholder: '请输入硅基流动API密钥'
placeholder: '请输入硅基流动API密钥',
supportsCustomUrl: false
}
}

Expand Down Expand Up @@ -95,24 +101,28 @@ const SettingsPage: React.FC = () => {
const handleTestApiKey = async () => {
const apiKey = form.getFieldValue(providerConfig[selectedProvider as keyof typeof providerConfig].apiKeyField)
const modelName = form.getFieldValue('model_name')

if (!apiKey) {
const baseUrl = form.getFieldValue('openai_base_url')

// OpenAI + 本地模型时,API Key 可以为空
if (!apiKey && !(selectedProvider === 'openai' && baseUrl)) {
message.error('请先输入API密钥')
return
}

if (!modelName) {
message.error('请先选择模型')
message.error('请先输入模型名称')
return
}

try {
setLoading(true)
const result = await settingsApi.testApiKey(selectedProvider, apiKey, modelName)
// 本地模型使用默认 key
const testKey = apiKey || 'local-model'
const result = await settingsApi.testApiKey(selectedProvider, testKey, modelName, baseUrl)
if (result.success) {
message.success('API密钥测试成功!')
message.success('连接测试成功!')
} else {
message.error('API密钥测试失败: ' + (result.error || '未知错误'))
message.error('连接测试失败: ' + (result.error || '未知错误'))
}
} catch (error: any) {
message.error('测试失败: ' + (error.message || '未知错误'))
Expand Down Expand Up @@ -198,18 +208,44 @@ const SettingsPage: React.FC = () => {
label={`${providerConfig[selectedProvider as keyof typeof providerConfig].name} API Key`}
name={providerConfig[selectedProvider as keyof typeof providerConfig].apiKeyField}
className="form-item"
rules={[
{ required: true, message: '请输入API密钥' },
{ min: 10, message: 'API密钥长度不能少于10位' }
]}
rules={
selectedProvider === 'openai'
? [] // OpenAI 支持本地模型,API Key 非必填
: [
{ required: true, message: '请输入API密钥' },
{ min: 10, message: 'API密钥长度不能少于10位' }
]
}
tooltip={selectedProvider === 'openai' ? '使用本地模型(填写了Base URL)时可留空' : undefined}
>
<Input.Password
placeholder={providerConfig[selectedProvider as keyof typeof providerConfig].placeholder}
placeholder={
selectedProvider === 'openai'
? '使用官方API需填写,本地模型可留空'
: providerConfig[selectedProvider as keyof typeof providerConfig].placeholder
}
prefix={<KeyOutlined />}
className="settings-input"
/>
</Form.Item>

{/* OpenAI 自定义 Base URL (用于本地模型) */}
{selectedProvider === 'openai' && (
<Form.Item
label="API Base URL"
name="openai_base_url"
className="form-item"
extra="留空使用官方地址: https://api.openai.com/v1"
>
<Input
placeholder="https://api.openai.com/v1"
prefix={<ApiOutlined />}
className="settings-input"
allowClear
/>
</Form.Item>
)}

{/* 模型选择 */}
<Form.Item
label="选择模型"
Expand Down Expand Up @@ -340,6 +376,10 @@ const SettingsPage: React.FC = () => {
<br />• <Text strong>OpenAI</Text>:访问 platform.openai.com 获取API密钥
<br />• <Text strong>Google Gemini</Text>:访问 ai.google.dev 获取API密钥
<br />• <Text strong>硅基流动</Text>:访问 docs.siliconflow.cn 获取API密钥
<br /><br /><Text strong style={{ color: '#52c41a' }}>🔧 本地模型支持</Text>:选择 OpenAI 后,可在 "API Base URL" 中填写本地模型地址:
<br />• <Text code>Ollama</Text>: http://localhost:11434/v1
<br />• <Text code>vLLM</Text>: http://localhost:8000/v1
<br />• <Text code>其他兼容 OpenAI API 的服务</Text>
</Paragraph>
</div>

Expand Down
11 changes: 6 additions & 5 deletions frontend/src/services/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -128,11 +128,12 @@ export const settingsApi = {
},

// 测试API密钥
testApiKey: (provider: string, apiKey: string, modelName: string): Promise<{ success: boolean; error?: string }> => {
return api.post('/settings/test-api-key', {
provider,
api_key: apiKey,
model_name: modelName
testApiKey: (provider: string, apiKey: string, modelName: string, baseUrl?: string): Promise<{ success: boolean; error?: string }> => {
return api.post('/settings/test-api-key', {
provider,
api_key: apiKey,
model_name: modelName,
base_url: baseUrl || ''
})
},

Expand Down