Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
# 模型的API Key。
OPENAI_API_KEY=sk-...

# (可选) Gemini API Key。若未设置 OPENAI_API_KEY,将自动使用 Gemini OpenAI 兼容端点。
# 推荐与 OPENAI_BASE_URL / OPENAI_MODEL_NAME 配合使用;不填时会自动回退到
# https://generativelanguage.googleapis.com/v1beta/openai/ 和 gemini-2.0-flash
GEMINI_API_KEY=

# 模型的API接口地址。这里需要填写服务商提供的、兼容OpenAI格式的API地址
# 可查阅你使用的大模型API文档,如格式为 https://xx.xx.com/v1/chat/completions 则OPENAI_BASE_URL只需要填入前半段 https://xx.xx.com/v1/
OPENAI_BASE_URL=https://api-inference.modelscope.cn/v1/
Expand Down Expand Up @@ -77,4 +82,4 @@ WEBHOOK_CONTENT_TYPE="JSON"
# GET请求的查询参数 (JSON格式, 支持 {{title}}, {{content}} 占位符)
WEBHOOK_QUERY_PARAMETERS='{"title":"{{title}}","content":"{{content}}"}'
# POST请求的请求体 (JSON格式, 支持 {{title}}, {{content}} 占位符)
WEBHOOK_BODY='{"title":"{{title}}","content":"{{content}}"}'
WEBHOOK_BODY='{"title":"{{title}}","content":"{{content}}"}'
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ cp .env.example .env
| 变量 | 说明 | 必填 |
|------|------|------|
| `OPENAI_API_KEY` | AI 模型 API Key | 是 |
| `GEMINI_API_KEY` | Gemini API Key(当未设置 `OPENAI_API_KEY` 时可直接使用) | 否 |
| `OPENAI_BASE_URL` | API 接口地址(兼容 OpenAI 格式) | 是 |
| `OPENAI_MODEL_NAME` | 多模态模型名称(如 `gpt-4o`) | 是 |
| `WEB_USERNAME` / `WEB_PASSWORD` | Web 界面登录凭据(默认 `admin` / `admin123`) | 否 |
Expand All @@ -56,6 +57,9 @@ cp .env.example .env

完整配置项参考 `.env.example`

> 使用 `GEMINI_API_KEY` 时,若未手动填写 `OPENAI_BASE_URL` 和 `OPENAI_MODEL_NAME`,系统会自动使用
> `https://generativelanguage.googleapis.com/v1beta/openai/` 与 `gemini-2.5-flash`。

3. **启动服务**

```bash
Expand Down
122 changes: 93 additions & 29 deletions src/api/routes/settings.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
设置管理路由
"""

import os
from typing import Optional

Expand All @@ -12,6 +13,8 @@
from src.infrastructure.config.env_manager import env_manager
from src.infrastructure.config.settings import (
AISettings,
GEMINI_DEFAULT_MODEL_NAME,
GEMINI_OPENAI_COMPAT_BASE_URL,
notification_settings,
reload_settings,
scraper_settings,
Expand All @@ -21,10 +24,12 @@

router = APIRouter(prefix="/api/settings", tags=["settings"])


def _reload_env() -> None:
load_dotenv(dotenv_path=env_manager.env_file, override=True)
reload_settings()


def _env_bool(key: str, default: bool = False) -> bool:
value = env_manager.get_value(key)
if value is None:
Expand All @@ -48,6 +53,7 @@ def _normalize_bool_value(value: bool) -> str:

class NotificationSettingsModel(BaseModel):
"""通知设置模型"""

NTFY_TOPIC_URL: Optional[str] = None
GOTIFY_URL: Optional[str] = None
GOTIFY_TOKEN: Optional[str] = None
Expand All @@ -66,7 +72,9 @@ class NotificationSettingsModel(BaseModel):

class AISettingsModel(BaseModel):
"""AI设置模型"""

OPENAI_API_KEY: Optional[str] = None
GEMINI_API_KEY: Optional[str] = None
OPENAI_BASE_URL: Optional[str] = None
OPENAI_MODEL_NAME: Optional[str] = None
SKIP_AI_ANALYSIS: Optional[bool] = None
Expand Down Expand Up @@ -120,6 +128,7 @@ async def update_notification_settings(
return {"message": "通知设置已成功更新"}
return {"message": "更新通知设置失败"}


@router.get("/rotation")
async def get_rotation_settings():
return {
Expand Down Expand Up @@ -162,6 +171,7 @@ async def get_system_status(

# 检查关键环境变量是否设置
openai_api_key = env_manager.get_value("OPENAI_API_KEY", "")
gemini_api_key = env_manager.get_value("GEMINI_API_KEY", "")
openai_base_url = env_manager.get_value("OPENAI_BASE_URL", "")
openai_model_name = env_manager.get_value("OPENAI_MODEL_NAME", "")
ntfy_topic_url = env_manager.get_value("NTFY_TOPIC_URL", "")
Expand All @@ -180,35 +190,37 @@ async def get_system_status(
"running_in_docker": scraper_settings.running_in_docker,
"scraper_running": len(running_task_ids) > 0,
"running_task_ids": running_task_ids,
"login_state_file": {
"exists": login_state_exists,
"path": state_file
},
"login_state_file": {"exists": login_state_exists, "path": state_file},
"env_file": {
"exists": env_file_exists,
"openai_api_key_set": bool(openai_api_key),
"gemini_api_key_set": bool(gemini_api_key),
"openai_base_url_set": bool(openai_base_url),
"openai_model_name_set": bool(openai_model_name),
"ntfy_topic_url_set": bool(ntfy_topic_url)
}
"ntfy_topic_url_set": bool(ntfy_topic_url),
},
}


class AISettingsModel(BaseModel):
"""AI设置模型"""
OPENAI_API_KEY: Optional[str] = None
OPENAI_BASE_URL: Optional[str] = None
OPENAI_MODEL_NAME: Optional[str] = None
SKIP_AI_ANALYSIS: Optional[bool] = None


@router.get("/ai")
async def get_ai_settings():
"""获取AI设置"""
openai_api_key = env_manager.get_value("OPENAI_API_KEY", "")
gemini_api_key = env_manager.get_value("GEMINI_API_KEY", "")
openai_base_url = env_manager.get_value("OPENAI_BASE_URL", "")
openai_model_name = env_manager.get_value("OPENAI_MODEL_NAME", "")

if not openai_base_url and gemini_api_key and not openai_api_key:
openai_base_url = GEMINI_OPENAI_COMPAT_BASE_URL

if not openai_model_name and gemini_api_key and not openai_api_key:
openai_model_name = GEMINI_DEFAULT_MODEL_NAME

return {
"OPENAI_BASE_URL": env_manager.get_value("OPENAI_BASE_URL", ""),
"OPENAI_MODEL_NAME": env_manager.get_value("OPENAI_MODEL_NAME", ""),
"SKIP_AI_ANALYSIS": env_manager.get_value("SKIP_AI_ANALYSIS", "false").lower() == "true"
"OPENAI_BASE_URL": openai_base_url,
"OPENAI_MODEL_NAME": openai_model_name,
"SKIP_AI_ANALYSIS": str(env_manager.get_value("SKIP_AI_ANALYSIS", "false")).lower()
== "true",
}


Expand All @@ -218,15 +230,36 @@ async def update_ai_settings(
):
"""更新AI设置"""
updates = {}
current_openai_api_key = env_manager.get_value("OPENAI_API_KEY", "")
current_gemini_api_key = env_manager.get_value("GEMINI_API_KEY", "")

if settings.OPENAI_API_KEY is not None:
updates["OPENAI_API_KEY"] = settings.OPENAI_API_KEY
if settings.GEMINI_API_KEY is not None:
updates["GEMINI_API_KEY"] = settings.GEMINI_API_KEY
if settings.OPENAI_BASE_URL is not None:
updates["OPENAI_BASE_URL"] = settings.OPENAI_BASE_URL
if settings.OPENAI_MODEL_NAME is not None:
updates["OPENAI_MODEL_NAME"] = settings.OPENAI_MODEL_NAME
if settings.SKIP_AI_ANALYSIS is not None:
updates["SKIP_AI_ANALYSIS"] = str(settings.SKIP_AI_ANALYSIS).lower()

submitted_openai_api_key = (
settings.OPENAI_API_KEY if settings.OPENAI_API_KEY is not None else current_openai_api_key
)
submitted_gemini_api_key = (
settings.GEMINI_API_KEY if settings.GEMINI_API_KEY is not None else current_gemini_api_key
)

use_gemini_defaults = bool(submitted_gemini_api_key and not submitted_openai_api_key)
if use_gemini_defaults:
if settings.OPENAI_BASE_URL is None and not env_manager.get_value("OPENAI_BASE_URL", ""):
updates["OPENAI_BASE_URL"] = GEMINI_OPENAI_COMPAT_BASE_URL
if settings.OPENAI_MODEL_NAME is None and not env_manager.get_value(
"OPENAI_MODEL_NAME", ""
):
updates["OPENAI_MODEL_NAME"] = GEMINI_DEFAULT_MODEL_NAME

success = env_manager.update_values(updates)
if success:
_reload_env()
Expand All @@ -244,13 +277,49 @@ async def test_ai_settings(
import httpx

stored_api_key = env_manager.get_value("OPENAI_API_KEY", "")
stored_gemini_api_key = env_manager.get_value("GEMINI_API_KEY", "")
submitted_api_key = settings.get("OPENAI_API_KEY", "")
api_key = submitted_api_key or stored_api_key
submitted_gemini_api_key = settings.get("GEMINI_API_KEY", "")

submitted_base_url = settings.get("OPENAI_BASE_URL", "")
submitted_model_name = settings.get("OPENAI_MODEL_NAME", "")
stored_base_url = env_manager.get_value("OPENAI_BASE_URL", "")
stored_model_name = env_manager.get_value("OPENAI_MODEL_NAME", "")
submitted_or_stored_openai = submitted_api_key or stored_api_key
submitted_or_stored_gemini = submitted_gemini_api_key or stored_gemini_api_key

resolved_base_url = submitted_base_url or stored_base_url
if not resolved_base_url and submitted_or_stored_gemini and not submitted_or_stored_openai:
resolved_base_url = GEMINI_OPENAI_COMPAT_BASE_URL

resolved_model_name = submitted_model_name or stored_model_name
if (
not resolved_model_name
and submitted_or_stored_gemini
and not submitted_or_stored_openai
):
resolved_model_name = GEMINI_DEFAULT_MODEL_NAME

is_gemini_endpoint = "generativelanguage.googleapis.com" in resolved_base_url
if is_gemini_endpoint:
api_key = (
submitted_gemini_api_key
or stored_gemini_api_key
or submitted_api_key
or stored_api_key
)
else:
api_key = (
submitted_api_key
or stored_api_key
or submitted_gemini_api_key
or stored_gemini_api_key
)

# 创建OpenAI客户端
client_params = {
"api_key": api_key,
"base_url": settings.get("OPENAI_BASE_URL", ""),
"base_url": resolved_base_url,
"timeout": httpx.Timeout(30.0),
}

Expand All @@ -259,27 +328,22 @@ async def test_ai_settings(
if proxy_url:
client_params["http_client"] = httpx.Client(proxy=proxy_url)

model_name = settings.get("OPENAI_MODEL_NAME", "")
model_name = resolved_model_name
print(f"AI测试 - BASE_URL: {client_params['base_url']}, MODEL: {model_name}")

client = OpenAI(**client_params)

# 测试连接
response = client.chat.completions.create(
model=model_name,
messages=[
{"role": "user", "content": "Hello, this is a test message."}
],
max_tokens=10
messages=[{"role": "user", "content": "Hello, this is a test message."}],
max_tokens=10,
)

return {
"success": True,
"message": "AI模型连接测试成功!",
"response": response.choices[0].message.content if response.choices else "No response"
"response": response.choices[0].message.content if response.choices else "No response",
}
except Exception as e:
return {
"success": False,
"message": f"AI模型连接测试失败: {str(e)}"
}
return {"success": False, "message": f"AI模型连接测试失败: {str(e)}"}
Loading