mirror of
https://github.com/TheSmallHanCat/sora2api.git
synced 2026-03-14 16:14:44 +08:00
feat: 新增图片上传专用代理,完善轮询与POW调用链路,重构POW与token传递
This commit is contained in:
@@ -40,6 +40,8 @@ auto_disable_on_401 = true
|
|||||||
[proxy]
|
[proxy]
|
||||||
proxy_enabled = false
|
proxy_enabled = false
|
||||||
proxy_url = ""
|
proxy_url = ""
|
||||||
|
image_upload_proxy_enabled = false
|
||||||
|
image_upload_proxy_url = ""
|
||||||
|
|
||||||
[watermark_free]
|
[watermark_free]
|
||||||
watermark_free_enabled = false
|
watermark_free_enabled = false
|
||||||
@@ -63,7 +65,7 @@ timezone_offset = 8
|
|||||||
[pow_service]
|
[pow_service]
|
||||||
# beta测试,目前仍处于测试阶段
|
# beta测试,目前仍处于测试阶段
|
||||||
# POW 计算模式:local(本地计算)或 external(外部服务)
|
# POW 计算模式:local(本地计算)或 external(外部服务)
|
||||||
mode = "external"
|
mode = "local"
|
||||||
# 是否使用对应 token 进行 POW 计算(默认关闭)
|
# 是否使用对应 token 进行 POW 计算(默认关闭)
|
||||||
# local 模式开启后会使用当前轮询 token 获取 POW
|
# local 模式开启后会使用当前轮询 token 获取 POW
|
||||||
# external 模式开启后会向外部服务传递 accesstoken 字段
|
# external 模式开启后会向外部服务传递 accesstoken 字段
|
||||||
|
|||||||
@@ -35,6 +35,8 @@ error_ban_threshold = 3
|
|||||||
[proxy]
|
[proxy]
|
||||||
proxy_enabled = true
|
proxy_enabled = true
|
||||||
proxy_url = "socks5://warp:1080"
|
proxy_url = "socks5://warp:1080"
|
||||||
|
image_upload_proxy_enabled = false
|
||||||
|
image_upload_proxy_url = ""
|
||||||
|
|
||||||
[watermark_free]
|
[watermark_free]
|
||||||
watermark_free_enabled = false
|
watermark_free_enabled = false
|
||||||
|
|||||||
@@ -129,8 +129,10 @@ class UpdateAdminConfigRequest(BaseModel):
|
|||||||
auto_disable_on_401: Optional[bool] = None
|
auto_disable_on_401: Optional[bool] = None
|
||||||
|
|
||||||
class UpdateProxyConfigRequest(BaseModel):
|
class UpdateProxyConfigRequest(BaseModel):
|
||||||
proxy_enabled: bool
|
proxy_enabled: Optional[bool] = None
|
||||||
proxy_url: Optional[str] = None
|
proxy_url: Optional[str] = None
|
||||||
|
image_upload_proxy_enabled: Optional[bool] = None
|
||||||
|
image_upload_proxy_url: Optional[str] = None
|
||||||
|
|
||||||
class TestProxyRequest(BaseModel):
|
class TestProxyRequest(BaseModel):
|
||||||
test_url: Optional[str] = "https://sora.chatgpt.com"
|
test_url: Optional[str] = "https://sora.chatgpt.com"
|
||||||
@@ -166,6 +168,7 @@ class UpdateWatermarkFreeConfigRequest(BaseModel):
|
|||||||
class UpdateCallLogicConfigRequest(BaseModel):
|
class UpdateCallLogicConfigRequest(BaseModel):
|
||||||
call_mode: Optional[str] = None # "default" or "polling"
|
call_mode: Optional[str] = None # "default" or "polling"
|
||||||
polling_mode_enabled: Optional[bool] = None # Legacy support
|
polling_mode_enabled: Optional[bool] = None # Legacy support
|
||||||
|
poll_interval: Optional[float] = None # Progress polling interval (seconds)
|
||||||
|
|
||||||
class UpdatePowProxyConfigRequest(BaseModel):
|
class UpdatePowProxyConfigRequest(BaseModel):
|
||||||
pow_proxy_enabled: bool
|
pow_proxy_enabled: bool
|
||||||
@@ -943,7 +946,9 @@ async def get_proxy_config(token: str = Depends(verify_admin_token)) -> dict:
|
|||||||
config = await proxy_manager.get_proxy_config()
|
config = await proxy_manager.get_proxy_config()
|
||||||
return {
|
return {
|
||||||
"proxy_enabled": config.proxy_enabled,
|
"proxy_enabled": config.proxy_enabled,
|
||||||
"proxy_url": config.proxy_url
|
"proxy_url": config.proxy_url,
|
||||||
|
"image_upload_proxy_enabled": config.image_upload_proxy_enabled,
|
||||||
|
"image_upload_proxy_url": config.image_upload_proxy_url
|
||||||
}
|
}
|
||||||
|
|
||||||
@router.post("/api/proxy/config")
|
@router.post("/api/proxy/config")
|
||||||
@@ -953,7 +958,26 @@ async def update_proxy_config(
|
|||||||
):
|
):
|
||||||
"""Update proxy configuration"""
|
"""Update proxy configuration"""
|
||||||
try:
|
try:
|
||||||
await proxy_manager.update_proxy_config(request.proxy_enabled, request.proxy_url)
|
current_config = await proxy_manager.get_proxy_config()
|
||||||
|
proxy_enabled = current_config.proxy_enabled if request.proxy_enabled is None else request.proxy_enabled
|
||||||
|
proxy_url = current_config.proxy_url if request.proxy_url is None else request.proxy_url
|
||||||
|
image_upload_proxy_enabled = (
|
||||||
|
current_config.image_upload_proxy_enabled
|
||||||
|
if request.image_upload_proxy_enabled is None
|
||||||
|
else request.image_upload_proxy_enabled
|
||||||
|
)
|
||||||
|
image_upload_proxy_url = (
|
||||||
|
current_config.image_upload_proxy_url
|
||||||
|
if request.image_upload_proxy_url is None
|
||||||
|
else request.image_upload_proxy_url
|
||||||
|
)
|
||||||
|
|
||||||
|
await proxy_manager.update_proxy_config(
|
||||||
|
proxy_enabled,
|
||||||
|
proxy_url,
|
||||||
|
image_upload_proxy_enabled,
|
||||||
|
image_upload_proxy_url
|
||||||
|
)
|
||||||
return {"success": True, "message": "Proxy configuration updated"}
|
return {"success": True, "message": "Proxy configuration updated"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(status_code=400, detail=str(e))
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
@@ -1350,11 +1374,19 @@ async def get_call_logic_config(token: str = Depends(verify_admin_token)) -> dic
|
|||||||
call_mode = getattr(config_obj, "call_mode", None)
|
call_mode = getattr(config_obj, "call_mode", None)
|
||||||
if call_mode not in ("default", "polling"):
|
if call_mode not in ("default", "polling"):
|
||||||
call_mode = "polling" if config_obj.polling_mode_enabled else "default"
|
call_mode = "polling" if config_obj.polling_mode_enabled else "default"
|
||||||
|
poll_interval = getattr(config_obj, "poll_interval", 2.5)
|
||||||
|
try:
|
||||||
|
poll_interval = float(poll_interval)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
poll_interval = 2.5
|
||||||
|
if poll_interval <= 0:
|
||||||
|
poll_interval = 2.5
|
||||||
return {
|
return {
|
||||||
"success": True,
|
"success": True,
|
||||||
"config": {
|
"config": {
|
||||||
"call_mode": call_mode,
|
"call_mode": call_mode,
|
||||||
"polling_mode_enabled": call_mode == "polling"
|
"polling_mode_enabled": call_mode == "polling",
|
||||||
|
"poll_interval": poll_interval
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1371,13 +1403,26 @@ async def update_call_logic_config(
|
|||||||
if call_mode is None:
|
if call_mode is None:
|
||||||
raise HTTPException(status_code=400, detail="Invalid call_mode")
|
raise HTTPException(status_code=400, detail="Invalid call_mode")
|
||||||
|
|
||||||
await db.update_call_logic_config(call_mode)
|
poll_interval = request.poll_interval
|
||||||
|
if poll_interval is not None:
|
||||||
|
try:
|
||||||
|
poll_interval = float(poll_interval)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
raise HTTPException(status_code=400, detail="poll_interval must be a valid number")
|
||||||
|
if poll_interval <= 0:
|
||||||
|
raise HTTPException(status_code=400, detail="poll_interval must be greater than 0")
|
||||||
|
|
||||||
|
await db.update_call_logic_config(call_mode, poll_interval)
|
||||||
config.set_call_logic_mode(call_mode)
|
config.set_call_logic_mode(call_mode)
|
||||||
|
if poll_interval is not None:
|
||||||
|
config.set_poll_interval(poll_interval)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"success": True,
|
"success": True,
|
||||||
"message": "Call logic configuration updated",
|
"message": "Call logic configuration updated",
|
||||||
"call_mode": call_mode,
|
"call_mode": call_mode,
|
||||||
"polling_mode_enabled": call_mode == "polling"
|
"polling_mode_enabled": call_mode == "polling",
|
||||||
|
"poll_interval": config.poll_interval
|
||||||
}
|
}
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
|
|||||||
@@ -56,6 +56,12 @@ class Config:
|
|||||||
@property
|
@property
|
||||||
def poll_interval(self) -> float:
|
def poll_interval(self) -> float:
|
||||||
return self._config["sora"]["poll_interval"]
|
return self._config["sora"]["poll_interval"]
|
||||||
|
|
||||||
|
def set_poll_interval(self, interval: float):
|
||||||
|
"""Set task progress polling interval in seconds"""
|
||||||
|
if "sora" not in self._config:
|
||||||
|
self._config["sora"] = {}
|
||||||
|
self._config["sora"]["poll_interval"] = float(interval)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def max_poll_attempts(self) -> int:
|
def max_poll_attempts(self) -> int:
|
||||||
|
|||||||
@@ -83,18 +83,25 @@ class Database:
|
|||||||
# Get proxy config from config_dict if provided, otherwise use defaults
|
# Get proxy config from config_dict if provided, otherwise use defaults
|
||||||
proxy_enabled = False
|
proxy_enabled = False
|
||||||
proxy_url = None
|
proxy_url = None
|
||||||
|
image_upload_proxy_enabled = False
|
||||||
|
image_upload_proxy_url = None
|
||||||
|
|
||||||
if config_dict:
|
if config_dict:
|
||||||
proxy_config = config_dict.get("proxy", {})
|
proxy_config = config_dict.get("proxy", {})
|
||||||
proxy_enabled = proxy_config.get("proxy_enabled", False)
|
proxy_enabled = proxy_config.get("proxy_enabled", False)
|
||||||
proxy_url = proxy_config.get("proxy_url", "")
|
proxy_url = proxy_config.get("proxy_url", "")
|
||||||
|
image_upload_proxy_enabled = proxy_config.get("image_upload_proxy_enabled", False)
|
||||||
|
image_upload_proxy_url = proxy_config.get("image_upload_proxy_url", "")
|
||||||
# Convert empty string to None
|
# Convert empty string to None
|
||||||
proxy_url = proxy_url if proxy_url else None
|
proxy_url = proxy_url if proxy_url else None
|
||||||
|
image_upload_proxy_url = image_upload_proxy_url if image_upload_proxy_url else None
|
||||||
|
|
||||||
await db.execute("""
|
await db.execute("""
|
||||||
INSERT INTO proxy_config (id, proxy_enabled, proxy_url)
|
INSERT INTO proxy_config (
|
||||||
VALUES (1, ?, ?)
|
id, proxy_enabled, proxy_url, image_upload_proxy_enabled, image_upload_proxy_url
|
||||||
""", (proxy_enabled, proxy_url))
|
)
|
||||||
|
VALUES (1, ?, ?, ?, ?)
|
||||||
|
""", (proxy_enabled, proxy_url, image_upload_proxy_enabled, image_upload_proxy_url))
|
||||||
|
|
||||||
# Ensure watermark_free_config has a row
|
# Ensure watermark_free_config has a row
|
||||||
cursor = await db.execute("SELECT COUNT(*) FROM watermark_free_config")
|
cursor = await db.execute("SELECT COUNT(*) FROM watermark_free_config")
|
||||||
@@ -187,6 +194,7 @@ class Database:
|
|||||||
# Get call logic config from config_dict if provided, otherwise use defaults
|
# Get call logic config from config_dict if provided, otherwise use defaults
|
||||||
call_mode = "default"
|
call_mode = "default"
|
||||||
polling_mode_enabled = False
|
polling_mode_enabled = False
|
||||||
|
poll_interval = 2.5
|
||||||
|
|
||||||
if config_dict:
|
if config_dict:
|
||||||
call_logic_config = config_dict.get("call_logic", {})
|
call_logic_config = config_dict.get("call_logic", {})
|
||||||
@@ -199,10 +207,22 @@ class Database:
|
|||||||
else:
|
else:
|
||||||
polling_mode_enabled = call_mode == "polling"
|
polling_mode_enabled = call_mode == "polling"
|
||||||
|
|
||||||
|
sora_config = config_dict.get("sora", {})
|
||||||
|
poll_interval = sora_config.get("poll_interval", 2.5)
|
||||||
|
if "poll_interval" in call_logic_config:
|
||||||
|
poll_interval = call_logic_config.get("poll_interval", poll_interval)
|
||||||
|
|
||||||
|
try:
|
||||||
|
poll_interval = float(poll_interval)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
poll_interval = 2.5
|
||||||
|
if poll_interval <= 0:
|
||||||
|
poll_interval = 2.5
|
||||||
|
|
||||||
await db.execute("""
|
await db.execute("""
|
||||||
INSERT INTO call_logic_config (id, call_mode, polling_mode_enabled)
|
INSERT INTO call_logic_config (id, call_mode, polling_mode_enabled, poll_interval)
|
||||||
VALUES (1, ?, ?)
|
VALUES (1, ?, ?, ?)
|
||||||
""", (call_mode, polling_mode_enabled))
|
""", (call_mode, polling_mode_enabled, poll_interval))
|
||||||
|
|
||||||
# Ensure pow_proxy_config has a row
|
# Ensure pow_proxy_config has a row
|
||||||
cursor = await db.execute("SELECT COUNT(*) FROM pow_proxy_config")
|
cursor = await db.execute("SELECT COUNT(*) FROM pow_proxy_config")
|
||||||
@@ -321,6 +341,42 @@ class Database:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f" ✗ Failed to add column '{col_name}': {e}")
|
print(f" ✗ Failed to add column '{col_name}': {e}")
|
||||||
|
|
||||||
|
# Check and add missing columns to proxy_config table
|
||||||
|
if await self._table_exists(db, "proxy_config"):
|
||||||
|
added_image_upload_proxy_enabled_column = False
|
||||||
|
added_image_upload_proxy_url_column = False
|
||||||
|
columns_to_add = [
|
||||||
|
("image_upload_proxy_enabled", "BOOLEAN DEFAULT 0"),
|
||||||
|
("image_upload_proxy_url", "TEXT"),
|
||||||
|
]
|
||||||
|
|
||||||
|
for col_name, col_type in columns_to_add:
|
||||||
|
if not await self._column_exists(db, "proxy_config", col_name):
|
||||||
|
try:
|
||||||
|
await db.execute(f"ALTER TABLE proxy_config ADD COLUMN {col_name} {col_type}")
|
||||||
|
print(f" ✓ Added column '{col_name}' to proxy_config table")
|
||||||
|
if col_name == "image_upload_proxy_enabled":
|
||||||
|
added_image_upload_proxy_enabled_column = True
|
||||||
|
if col_name == "image_upload_proxy_url":
|
||||||
|
added_image_upload_proxy_url_column = True
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ✗ Failed to add column '{col_name}': {e}")
|
||||||
|
|
||||||
|
# On upgrade, initialize value from setting.toml only when columns are newly added
|
||||||
|
if config_dict and (added_image_upload_proxy_enabled_column or added_image_upload_proxy_url_column):
|
||||||
|
try:
|
||||||
|
proxy_config = config_dict.get("proxy", {})
|
||||||
|
image_upload_proxy_enabled = proxy_config.get("image_upload_proxy_enabled", False)
|
||||||
|
image_upload_proxy_url = proxy_config.get("image_upload_proxy_url", "")
|
||||||
|
image_upload_proxy_url = image_upload_proxy_url if image_upload_proxy_url else None
|
||||||
|
await db.execute("""
|
||||||
|
UPDATE proxy_config
|
||||||
|
SET image_upload_proxy_enabled = ?, image_upload_proxy_url = ?
|
||||||
|
WHERE id = 1
|
||||||
|
""", (image_upload_proxy_enabled, image_upload_proxy_url))
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ✗ Failed to initialize image upload proxy config from config: {e}")
|
||||||
|
|
||||||
# Check and add missing columns to pow_service_config table
|
# Check and add missing columns to pow_service_config table
|
||||||
if await self._table_exists(db, "pow_service_config"):
|
if await self._table_exists(db, "pow_service_config"):
|
||||||
added_use_token_for_pow_column = False
|
added_use_token_for_pow_column = False
|
||||||
@@ -350,6 +406,38 @@ class Database:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f" ✗ Failed to initialize use_token_for_pow from config: {e}")
|
print(f" ✗ Failed to initialize use_token_for_pow from config: {e}")
|
||||||
|
|
||||||
|
# Check and add missing columns to call_logic_config table
|
||||||
|
if await self._table_exists(db, "call_logic_config"):
|
||||||
|
added_poll_interval_column = False
|
||||||
|
columns_to_add = [
|
||||||
|
("poll_interval", "REAL DEFAULT 2.5"),
|
||||||
|
]
|
||||||
|
|
||||||
|
for col_name, col_type in columns_to_add:
|
||||||
|
if not await self._column_exists(db, "call_logic_config", col_name):
|
||||||
|
try:
|
||||||
|
await db.execute(f"ALTER TABLE call_logic_config ADD COLUMN {col_name} {col_type}")
|
||||||
|
print(f" ✓ Added column '{col_name}' to call_logic_config table")
|
||||||
|
if col_name == "poll_interval":
|
||||||
|
added_poll_interval_column = True
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ✗ Failed to add column '{col_name}': {e}")
|
||||||
|
|
||||||
|
# On upgrade, initialize value from setting.toml only when this column is newly added
|
||||||
|
if config_dict and added_poll_interval_column:
|
||||||
|
try:
|
||||||
|
poll_interval = config_dict.get("sora", {}).get("poll_interval", 2.5)
|
||||||
|
poll_interval = float(poll_interval)
|
||||||
|
if poll_interval <= 0:
|
||||||
|
poll_interval = 2.5
|
||||||
|
await db.execute("""
|
||||||
|
UPDATE call_logic_config
|
||||||
|
SET poll_interval = ?
|
||||||
|
WHERE id = 1
|
||||||
|
""", (poll_interval,))
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ✗ Failed to initialize poll_interval from config: {e}")
|
||||||
|
|
||||||
# Check and add missing columns to watermark_free_config table
|
# Check and add missing columns to watermark_free_config table
|
||||||
if await self._table_exists(db, "watermark_free_config"):
|
if await self._table_exists(db, "watermark_free_config"):
|
||||||
columns_to_add = [
|
columns_to_add = [
|
||||||
@@ -389,8 +477,13 @@ class Database:
|
|||||||
await db.commit()
|
await db.commit()
|
||||||
print("Database migration check completed.")
|
print("Database migration check completed.")
|
||||||
|
|
||||||
async def init_db(self):
|
async def init_db(self, config_dict: dict = None):
|
||||||
"""Initialize database tables - creates all tables and ensures data integrity"""
|
"""Initialize database tables - creates all tables and ensures data integrity
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config_dict: Configuration dictionary from setting.toml (optional).
|
||||||
|
Used to initialize newly-added proxy columns during migration.
|
||||||
|
"""
|
||||||
async with aiosqlite.connect(self.db_path) as db:
|
async with aiosqlite.connect(self.db_path) as db:
|
||||||
# Tokens table
|
# Tokens table
|
||||||
await db.execute("""
|
await db.execute("""
|
||||||
@@ -503,6 +596,8 @@ class Database:
|
|||||||
id INTEGER PRIMARY KEY DEFAULT 1,
|
id INTEGER PRIMARY KEY DEFAULT 1,
|
||||||
proxy_enabled BOOLEAN DEFAULT 0,
|
proxy_enabled BOOLEAN DEFAULT 0,
|
||||||
proxy_url TEXT,
|
proxy_url TEXT,
|
||||||
|
image_upload_proxy_enabled BOOLEAN DEFAULT 0,
|
||||||
|
image_upload_proxy_url TEXT,
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
)
|
)
|
||||||
@@ -561,6 +656,7 @@ class Database:
|
|||||||
id INTEGER PRIMARY KEY DEFAULT 1,
|
id INTEGER PRIMARY KEY DEFAULT 1,
|
||||||
call_mode TEXT DEFAULT 'default',
|
call_mode TEXT DEFAULT 'default',
|
||||||
polling_mode_enabled BOOLEAN DEFAULT 0,
|
polling_mode_enabled BOOLEAN DEFAULT 0,
|
||||||
|
poll_interval REAL DEFAULT 2.5,
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
)
|
)
|
||||||
@@ -619,6 +715,28 @@ class Database:
|
|||||||
if not await self._column_exists(db, "admin_config", "auto_disable_on_401"):
|
if not await self._column_exists(db, "admin_config", "auto_disable_on_401"):
|
||||||
await db.execute("ALTER TABLE admin_config ADD COLUMN auto_disable_on_401 BOOLEAN DEFAULT 1")
|
await db.execute("ALTER TABLE admin_config ADD COLUMN auto_disable_on_401 BOOLEAN DEFAULT 1")
|
||||||
|
|
||||||
|
# Migration: Add image upload proxy columns to proxy_config table if they don't exist
|
||||||
|
added_image_upload_proxy_enabled_column = False
|
||||||
|
added_image_upload_proxy_url_column = False
|
||||||
|
if not await self._column_exists(db, "proxy_config", "image_upload_proxy_enabled"):
|
||||||
|
await db.execute("ALTER TABLE proxy_config ADD COLUMN image_upload_proxy_enabled BOOLEAN DEFAULT 0")
|
||||||
|
added_image_upload_proxy_enabled_column = True
|
||||||
|
if not await self._column_exists(db, "proxy_config", "image_upload_proxy_url"):
|
||||||
|
await db.execute("ALTER TABLE proxy_config ADD COLUMN image_upload_proxy_url TEXT")
|
||||||
|
added_image_upload_proxy_url_column = True
|
||||||
|
|
||||||
|
# If migration added image upload proxy columns, initialize them from setting.toml defaults
|
||||||
|
if config_dict and (added_image_upload_proxy_enabled_column or added_image_upload_proxy_url_column):
|
||||||
|
proxy_config = config_dict.get("proxy", {})
|
||||||
|
image_upload_proxy_enabled = proxy_config.get("image_upload_proxy_enabled", False)
|
||||||
|
image_upload_proxy_url = proxy_config.get("image_upload_proxy_url", "")
|
||||||
|
image_upload_proxy_url = image_upload_proxy_url if image_upload_proxy_url else None
|
||||||
|
await db.execute("""
|
||||||
|
UPDATE proxy_config
|
||||||
|
SET image_upload_proxy_enabled = ?, image_upload_proxy_url = ?
|
||||||
|
WHERE id = 1
|
||||||
|
""", (image_upload_proxy_enabled, image_upload_proxy_url))
|
||||||
|
|
||||||
# Migration: Add disabled_reason column to tokens table if it doesn't exist
|
# Migration: Add disabled_reason column to tokens table if it doesn't exist
|
||||||
if not await self._column_exists(db, "tokens", "disabled_reason"):
|
if not await self._column_exists(db, "tokens", "disabled_reason"):
|
||||||
await db.execute("ALTER TABLE tokens ADD COLUMN disabled_reason TEXT")
|
await db.execute("ALTER TABLE tokens ADD COLUMN disabled_reason TEXT")
|
||||||
@@ -1188,14 +1306,26 @@ class Database:
|
|||||||
# This should not happen in normal operation as _ensure_config_rows should create it
|
# This should not happen in normal operation as _ensure_config_rows should create it
|
||||||
return ProxyConfig(proxy_enabled=False)
|
return ProxyConfig(proxy_enabled=False)
|
||||||
|
|
||||||
async def update_proxy_config(self, enabled: bool, proxy_url: Optional[str]):
|
async def update_proxy_config(
|
||||||
|
self,
|
||||||
|
enabled: bool,
|
||||||
|
proxy_url: Optional[str],
|
||||||
|
image_upload_proxy_enabled: bool = False,
|
||||||
|
image_upload_proxy_url: Optional[str] = None
|
||||||
|
):
|
||||||
"""Update proxy configuration"""
|
"""Update proxy configuration"""
|
||||||
|
proxy_url = proxy_url if proxy_url else None
|
||||||
|
image_upload_proxy_url = image_upload_proxy_url if image_upload_proxy_url else None
|
||||||
async with aiosqlite.connect(self.db_path) as db:
|
async with aiosqlite.connect(self.db_path) as db:
|
||||||
await db.execute("""
|
await db.execute("""
|
||||||
UPDATE proxy_config
|
UPDATE proxy_config
|
||||||
SET proxy_enabled = ?, proxy_url = ?, updated_at = CURRENT_TIMESTAMP
|
SET proxy_enabled = ?,
|
||||||
|
proxy_url = ?,
|
||||||
|
image_upload_proxy_enabled = ?,
|
||||||
|
image_upload_proxy_url = ?,
|
||||||
|
updated_at = CURRENT_TIMESTAMP
|
||||||
WHERE id = 1
|
WHERE id = 1
|
||||||
""", (enabled, proxy_url))
|
""", (enabled, proxy_url, image_upload_proxy_enabled, image_upload_proxy_url))
|
||||||
await db.commit()
|
await db.commit()
|
||||||
|
|
||||||
# Watermark-free config operations
|
# Watermark-free config operations
|
||||||
@@ -1348,19 +1478,46 @@ class Database:
|
|||||||
row_dict = dict(row)
|
row_dict = dict(row)
|
||||||
if not row_dict.get("call_mode"):
|
if not row_dict.get("call_mode"):
|
||||||
row_dict["call_mode"] = "polling" if row_dict.get("polling_mode_enabled") else "default"
|
row_dict["call_mode"] = "polling" if row_dict.get("polling_mode_enabled") else "default"
|
||||||
|
poll_interval = row_dict.get("poll_interval", 2.5)
|
||||||
|
try:
|
||||||
|
poll_interval = float(poll_interval)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
poll_interval = 2.5
|
||||||
|
if poll_interval <= 0:
|
||||||
|
poll_interval = 2.5
|
||||||
|
row_dict["poll_interval"] = poll_interval
|
||||||
return CallLogicConfig(**row_dict)
|
return CallLogicConfig(**row_dict)
|
||||||
return CallLogicConfig(call_mode="default", polling_mode_enabled=False)
|
return CallLogicConfig(call_mode="default", polling_mode_enabled=False, poll_interval=2.5)
|
||||||
|
|
||||||
async def update_call_logic_config(self, call_mode: str):
|
async def update_call_logic_config(self, call_mode: str, poll_interval: Optional[float] = None):
|
||||||
"""Update call logic configuration"""
|
"""Update call logic configuration"""
|
||||||
normalized = "polling" if call_mode == "polling" else "default"
|
normalized = "polling" if call_mode == "polling" else "default"
|
||||||
polling_mode_enabled = normalized == "polling"
|
polling_mode_enabled = normalized == "polling"
|
||||||
async with aiosqlite.connect(self.db_path) as db:
|
async with aiosqlite.connect(self.db_path) as db:
|
||||||
|
effective_poll_interval = 2.5
|
||||||
|
cursor = await db.execute("SELECT poll_interval FROM call_logic_config WHERE id = 1")
|
||||||
|
row = await cursor.fetchone()
|
||||||
|
if row and row[0] is not None:
|
||||||
|
try:
|
||||||
|
effective_poll_interval = float(row[0])
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
effective_poll_interval = 2.5
|
||||||
|
if effective_poll_interval <= 0:
|
||||||
|
effective_poll_interval = 2.5
|
||||||
|
|
||||||
|
if poll_interval is not None:
|
||||||
|
try:
|
||||||
|
effective_poll_interval = float(poll_interval)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
effective_poll_interval = 2.5
|
||||||
|
if effective_poll_interval <= 0:
|
||||||
|
effective_poll_interval = 2.5
|
||||||
|
|
||||||
# Use INSERT OR REPLACE to ensure the row exists
|
# Use INSERT OR REPLACE to ensure the row exists
|
||||||
await db.execute("""
|
await db.execute("""
|
||||||
INSERT OR REPLACE INTO call_logic_config (id, call_mode, polling_mode_enabled, updated_at)
|
INSERT OR REPLACE INTO call_logic_config (id, call_mode, polling_mode_enabled, poll_interval, updated_at)
|
||||||
VALUES (1, ?, ?, CURRENT_TIMESTAMP)
|
VALUES (1, ?, ?, ?, CURRENT_TIMESTAMP)
|
||||||
""", (normalized, polling_mode_enabled))
|
""", (normalized, polling_mode_enabled, effective_poll_interval))
|
||||||
await db.commit()
|
await db.commit()
|
||||||
|
|
||||||
# POW proxy config operations
|
# POW proxy config operations
|
||||||
|
|||||||
@@ -102,6 +102,8 @@ class ProxyConfig(BaseModel):
|
|||||||
id: int = 1
|
id: int = 1
|
||||||
proxy_enabled: bool # Read from database, initialized from setting.toml on first startup
|
proxy_enabled: bool # Read from database, initialized from setting.toml on first startup
|
||||||
proxy_url: Optional[str] = None # Read from database, initialized from setting.toml on first startup
|
proxy_url: Optional[str] = None # Read from database, initialized from setting.toml on first startup
|
||||||
|
image_upload_proxy_enabled: bool = False # Image upload proxy enabled
|
||||||
|
image_upload_proxy_url: Optional[str] = None # Image upload proxy URL
|
||||||
created_at: Optional[datetime] = None
|
created_at: Optional[datetime] = None
|
||||||
updated_at: Optional[datetime] = None
|
updated_at: Optional[datetime] = None
|
||||||
|
|
||||||
@@ -145,6 +147,7 @@ class CallLogicConfig(BaseModel):
|
|||||||
id: int = 1
|
id: int = 1
|
||||||
call_mode: str = "default" # "default" or "polling"
|
call_mode: str = "default" # "default" or "polling"
|
||||||
polling_mode_enabled: bool = False # Read from database, initialized from setting.toml on first startup
|
polling_mode_enabled: bool = False # Read from database, initialized from setting.toml on first startup
|
||||||
|
poll_interval: float = 2.5 # Progress polling interval in seconds
|
||||||
created_at: Optional[datetime] = None
|
created_at: Optional[datetime] = None
|
||||||
updated_at: Optional[datetime] = None
|
updated_at: Optional[datetime] = None
|
||||||
|
|
||||||
|
|||||||
@@ -103,7 +103,7 @@ async def startup_event():
|
|||||||
is_first_startup = not db.db_exists()
|
is_first_startup = not db.db_exists()
|
||||||
|
|
||||||
# Initialize database tables
|
# Initialize database tables
|
||||||
await db.init_db()
|
await db.init_db(config_dict)
|
||||||
|
|
||||||
# Handle database initialization based on startup type
|
# Handle database initialization based on startup type
|
||||||
if is_first_startup:
|
if is_first_startup:
|
||||||
@@ -142,7 +142,8 @@ async def startup_event():
|
|||||||
# Load call logic configuration from database
|
# Load call logic configuration from database
|
||||||
call_logic_config = await db.get_call_logic_config()
|
call_logic_config = await db.get_call_logic_config()
|
||||||
config.set_call_logic_mode(call_logic_config.call_mode)
|
config.set_call_logic_mode(call_logic_config.call_mode)
|
||||||
print(f"✓ Call logic mode: {call_logic_config.call_mode}")
|
config.set_poll_interval(call_logic_config.poll_interval)
|
||||||
|
print(f"✓ Call logic mode: {call_logic_config.call_mode}, poll_interval: {call_logic_config.poll_interval}s")
|
||||||
|
|
||||||
# Load POW service configuration from database
|
# Load POW service configuration from database
|
||||||
pow_service_config = await db.get_pow_service_config()
|
pow_service_config = await db.get_pow_service_config()
|
||||||
|
|||||||
@@ -611,7 +611,11 @@ class GenerationHandler:
|
|||||||
is_first_chunk = False
|
is_first_chunk = False
|
||||||
|
|
||||||
image_data = self._decode_base64_image(image)
|
image_data = self._decode_base64_image(image)
|
||||||
media_id = await self.sora_client.upload_image(image_data, token_obj.token)
|
media_id = await self.sora_client.upload_image(
|
||||||
|
image_data,
|
||||||
|
token_obj.token,
|
||||||
|
token_id=token_obj.id
|
||||||
|
)
|
||||||
|
|
||||||
if stream:
|
if stream:
|
||||||
yield self._format_stream_chunk(
|
yield self._format_stream_chunk(
|
||||||
@@ -1712,7 +1716,11 @@ class GenerationHandler:
|
|||||||
yield self._format_stream_chunk(
|
yield self._format_stream_chunk(
|
||||||
reasoning_content="Uploading character avatar...\n"
|
reasoning_content="Uploading character avatar...\n"
|
||||||
)
|
)
|
||||||
asset_pointer = await self.sora_client.upload_character_image(avatar_data, token_obj.token)
|
asset_pointer = await self.sora_client.upload_character_image(
|
||||||
|
avatar_data,
|
||||||
|
token_obj.token,
|
||||||
|
token_id=token_obj.id
|
||||||
|
)
|
||||||
debug_logger.log_info(f"Avatar uploaded, asset_pointer: {asset_pointer}")
|
debug_logger.log_info(f"Avatar uploaded, asset_pointer: {asset_pointer}")
|
||||||
|
|
||||||
# Step 5: Finalize character
|
# Step 5: Finalize character
|
||||||
@@ -1896,7 +1904,11 @@ class GenerationHandler:
|
|||||||
yield self._format_stream_chunk(
|
yield self._format_stream_chunk(
|
||||||
reasoning_content="Uploading character avatar...\n"
|
reasoning_content="Uploading character avatar...\n"
|
||||||
)
|
)
|
||||||
asset_pointer = await self.sora_client.upload_character_image(avatar_data, token_obj.token)
|
asset_pointer = await self.sora_client.upload_character_image(
|
||||||
|
avatar_data,
|
||||||
|
token_obj.token,
|
||||||
|
token_id=token_obj.id
|
||||||
|
)
|
||||||
debug_logger.log_info(f"Avatar uploaded, asset_pointer: {asset_pointer}")
|
debug_logger.log_info(f"Avatar uploaded, asset_pointer: {asset_pointer}")
|
||||||
|
|
||||||
# Step 5: Finalize character
|
# Step 5: Finalize character
|
||||||
@@ -2096,7 +2108,11 @@ class GenerationHandler:
|
|||||||
yield self._format_stream_chunk(
|
yield self._format_stream_chunk(
|
||||||
reasoning_content="Uploading character avatar...\n"
|
reasoning_content="Uploading character avatar...\n"
|
||||||
)
|
)
|
||||||
asset_pointer = await self.sora_client.upload_character_image(avatar_data, token_obj.token)
|
asset_pointer = await self.sora_client.upload_character_image(
|
||||||
|
avatar_data,
|
||||||
|
token_obj.token,
|
||||||
|
token_id=token_obj.id
|
||||||
|
)
|
||||||
debug_logger.log_info(f"Avatar uploaded, asset_pointer: {asset_pointer}")
|
debug_logger.log_info(f"Avatar uploaded, asset_pointer: {asset_pointer}")
|
||||||
|
|
||||||
# Step 5: Finalize character
|
# Step 5: Finalize character
|
||||||
|
|||||||
@@ -1,29 +1,44 @@
|
|||||||
"""POW Service Client - External POW service integration"""
|
"""POW Service Client - External POW service integration (POST /api/v1/sora/sentinel-token)"""
|
||||||
import json
|
from typing import NamedTuple, Optional
|
||||||
from typing import Optional, Tuple
|
|
||||||
from curl_cffi.requests import AsyncSession
|
from curl_cffi.requests import AsyncSession
|
||||||
|
|
||||||
from ..core.config import config
|
from ..core.config import config
|
||||||
from ..core.logger import debug_logger
|
from ..core.logger import debug_logger
|
||||||
|
|
||||||
|
|
||||||
class POWServiceClient:
|
class SentinelResult(NamedTuple):
|
||||||
"""Client for external POW service API"""
|
"""Result from external sentinel-token API."""
|
||||||
|
|
||||||
async def get_sentinel_token(self, access_token: Optional[str] = None) -> Optional[Tuple[str, str, str]]:
|
sentinel_token: str
|
||||||
"""Get sentinel token from external POW service
|
device_id: Optional[str]
|
||||||
|
user_agent: Optional[str]
|
||||||
|
cookie_header: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class POWServiceClient:
|
||||||
|
"""Client for external POW service API."""
|
||||||
|
|
||||||
|
async def get_sentinel_token(
|
||||||
|
self,
|
||||||
|
access_token: Optional[str] = None,
|
||||||
|
session_token: Optional[str] = None,
|
||||||
|
proxy_url: Optional[str] = None,
|
||||||
|
device_type: str = "ios",
|
||||||
|
) -> Optional[SentinelResult]:
|
||||||
|
"""Get sentinel token from external POW service.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
access_token: Optional access token to send to POW service
|
access_token: Sora access token (optional).
|
||||||
|
session_token: Sora session token (optional).
|
||||||
|
proxy_url: Proxy URL for upstream solver (optional).
|
||||||
|
device_type: Device type hint for upstream solver.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Tuple of (sentinel_token, device_id, user_agent) or None on failure
|
SentinelResult or None on failure.
|
||||||
"""
|
"""
|
||||||
# Read configuration dynamically on each call
|
|
||||||
server_url = config.pow_service_server_url
|
server_url = config.pow_service_server_url
|
||||||
api_key = config.pow_service_api_key
|
api_key = config.pow_service_api_key
|
||||||
proxy_enabled = config.pow_service_proxy_enabled
|
request_proxy = config.pow_service_proxy_url if config.pow_service_proxy_enabled else None
|
||||||
proxy_url = config.pow_service_proxy_url if proxy_enabled else None
|
|
||||||
|
|
||||||
if not server_url or not api_key:
|
if not server_url or not api_key:
|
||||||
debug_logger.log_error(
|
debug_logger.log_error(
|
||||||
@@ -34,8 +49,7 @@ class POWServiceClient:
|
|||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Construct API endpoint
|
api_url = f"{server_url.rstrip('/')}/api/v1/sora/sentinel-token"
|
||||||
api_url = f"{server_url.rstrip('/')}/api/pow/token"
|
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"Bearer {api_key}",
|
"Authorization": f"Bearer {api_key}",
|
||||||
@@ -43,10 +57,15 @@ class POWServiceClient:
|
|||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
}
|
}
|
||||||
|
|
||||||
# Controlled by config switch: whether to pass current token to POW service
|
payload = {"device_type": device_type}
|
||||||
send_access_token = bool(config.pow_service_use_token_for_pow and access_token)
|
if access_token:
|
||||||
|
payload["access_token"] = access_token
|
||||||
|
if session_token:
|
||||||
|
payload["session_token"] = session_token
|
||||||
|
if proxy_url:
|
||||||
|
payload["proxy_url"] = proxy_url
|
||||||
|
|
||||||
def _mask_token(token_value: Optional[str]) -> str:
|
def _mask(token_value: Optional[str]) -> str:
|
||||||
if not token_value:
|
if not token_value:
|
||||||
return "none"
|
return "none"
|
||||||
if len(token_value) <= 10:
|
if len(token_value) <= 10:
|
||||||
@@ -54,69 +73,33 @@ class POWServiceClient:
|
|||||||
return f"{token_value[:6]}...{token_value[-4:]}"
|
return f"{token_value[:6]}...{token_value[-4:]}"
|
||||||
|
|
||||||
debug_logger.log_info(
|
debug_logger.log_info(
|
||||||
f"[POW Service] use_token_for_pow={config.pow_service_use_token_for_pow}, access_token={_mask_token(access_token)}"
|
f"[POW Service] POST {api_url} access_token={_mask(access_token)} proxy_url={proxy_url or 'none'}"
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
debug_logger.log_info(f"[POW Service] Requesting token from {api_url}")
|
|
||||||
|
|
||||||
async with AsyncSession(impersonate="chrome131") as session:
|
async with AsyncSession(impersonate="chrome131") as session:
|
||||||
# Preferred protocol: POST + JSON body
|
|
||||||
payload = {"flow": "sora_init"}
|
|
||||||
if send_access_token:
|
|
||||||
payload["accesstoken"] = access_token
|
|
||||||
|
|
||||||
response = await session.post(
|
response = await session.post(
|
||||||
api_url,
|
api_url,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
json=payload,
|
json=payload,
|
||||||
proxy=proxy_url,
|
proxy=request_proxy,
|
||||||
timeout=30
|
timeout=30,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Backward compatibility: older services may only support GET + X-Access-Token
|
|
||||||
if response.status_code in (404, 405, 415):
|
|
||||||
fallback_headers = {
|
|
||||||
"Authorization": f"Bearer {api_key}",
|
|
||||||
"Accept": "application/json"
|
|
||||||
}
|
|
||||||
if send_access_token:
|
|
||||||
fallback_headers["X-Access-Token"] = access_token
|
|
||||||
debug_logger.log_info(
|
|
||||||
f"[POW Service] POST unsupported ({response.status_code}), fallback to GET compatibility mode"
|
|
||||||
)
|
|
||||||
response = await session.get(
|
|
||||||
api_url,
|
|
||||||
headers=fallback_headers,
|
|
||||||
proxy=proxy_url,
|
|
||||||
timeout=30
|
|
||||||
)
|
|
||||||
|
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
error_msg = f"POW service request failed: {response.status_code}"
|
|
||||||
debug_logger.log_error(
|
debug_logger.log_error(
|
||||||
error_message=error_msg,
|
error_message=f"POW service request failed: {response.status_code}",
|
||||||
status_code=response.status_code,
|
status_code=response.status_code,
|
||||||
response_text=response.text,
|
response_text=response.text,
|
||||||
source="POWServiceClient"
|
source="POWServiceClient",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
data = response.json()
|
data = response.json()
|
||||||
|
token = data.get("sentinel_token")
|
||||||
if not data.get("success"):
|
|
||||||
debug_logger.log_error(
|
|
||||||
error_message="POW service returned success=false",
|
|
||||||
status_code=response.status_code,
|
|
||||||
response_text=response.text,
|
|
||||||
source="POWServiceClient"
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
token = data.get("token")
|
|
||||||
device_id = data.get("device_id")
|
device_id = data.get("device_id")
|
||||||
user_agent = data.get("user_agent")
|
user_agent = data.get("user_agent")
|
||||||
cached = data.get("cached", False)
|
cookie_header = data.get("cookie_header")
|
||||||
|
|
||||||
if not token:
|
if not token:
|
||||||
debug_logger.log_error(
|
debug_logger.log_error(
|
||||||
@@ -127,42 +110,16 @@ class POWServiceClient:
|
|||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Parse token to extract device_id if not provided
|
debug_logger.log_info(
|
||||||
token_data = None
|
f"[POW Service] sentinel_token len={len(token)} device_id={device_id} "
|
||||||
if not device_id:
|
f"ua={bool(user_agent)} cookie_header={bool(cookie_header)}"
|
||||||
try:
|
)
|
||||||
token_data = json.loads(token)
|
return SentinelResult(
|
||||||
device_id = token_data.get("id")
|
sentinel_token=token,
|
||||||
except:
|
device_id=device_id,
|
||||||
pass
|
user_agent=user_agent,
|
||||||
|
cookie_header=cookie_header,
|
||||||
# 记录详细的 token 信息
|
)
|
||||||
cache_status = "cached" if cached else "fresh"
|
|
||||||
debug_logger.log_info("=" * 100)
|
|
||||||
debug_logger.log_info(f"[POW Service] Token obtained successfully ({cache_status})")
|
|
||||||
debug_logger.log_info(f"[POW Service] Token length: {len(token)}")
|
|
||||||
debug_logger.log_info(f"[POW Service] Device ID: {device_id}")
|
|
||||||
debug_logger.log_info(f"[POW Service] User Agent: {user_agent}")
|
|
||||||
|
|
||||||
# 解析并显示 token 结构
|
|
||||||
if not token_data:
|
|
||||||
try:
|
|
||||||
token_data = json.loads(token)
|
|
||||||
except:
|
|
||||||
debug_logger.log_info(f"[POW Service] Token is not valid JSON")
|
|
||||||
token_data = None
|
|
||||||
|
|
||||||
if token_data:
|
|
||||||
debug_logger.log_info(f"[POW Service] Token structure keys: {list(token_data.keys())}")
|
|
||||||
for key, value in token_data.items():
|
|
||||||
if isinstance(value, str) and len(value) > 100:
|
|
||||||
debug_logger.log_info(f"[POW Service] Token[{key}]: <string, length={len(value)}>")
|
|
||||||
else:
|
|
||||||
debug_logger.log_info(f"[POW Service] Token[{key}]: {value}")
|
|
||||||
|
|
||||||
debug_logger.log_info("=" * 100)
|
|
||||||
|
|
||||||
return token, device_id, user_agent
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
debug_logger.log_error(
|
debug_logger.log_error(
|
||||||
@@ -174,5 +131,4 @@ class POWServiceClient:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
# Global instance
|
|
||||||
pow_service_client = POWServiceClient()
|
pow_service_client = POWServiceClient()
|
||||||
|
|||||||
@@ -36,9 +36,42 @@ class ProxyManager:
|
|||||||
return config.proxy_url
|
return config.proxy_url
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async def update_proxy_config(self, enabled: bool, proxy_url: Optional[str]):
|
async def get_image_upload_proxy_url(self, token_id: Optional[int] = None) -> Optional[str]:
|
||||||
|
"""Get proxy URL specifically for image uploads
|
||||||
|
|
||||||
|
Priority:
|
||||||
|
1. Image upload proxy (if enabled in config)
|
||||||
|
2. Token-specific proxy (if token_id provided)
|
||||||
|
3. Global proxy (fallback)
|
||||||
|
4. None (no proxy)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
token_id: Token ID (optional). Used for fallback to token-specific proxy.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Proxy URL string or None
|
||||||
|
"""
|
||||||
|
config = await self.db.get_proxy_config()
|
||||||
|
if config.image_upload_proxy_enabled and config.image_upload_proxy_url:
|
||||||
|
return config.image_upload_proxy_url
|
||||||
|
|
||||||
|
# Fallback to standard proxy resolution
|
||||||
|
return await self.get_proxy_url(token_id=token_id)
|
||||||
|
|
||||||
|
async def update_proxy_config(
|
||||||
|
self,
|
||||||
|
enabled: bool,
|
||||||
|
proxy_url: Optional[str],
|
||||||
|
image_upload_proxy_enabled: bool = False,
|
||||||
|
image_upload_proxy_url: Optional[str] = None
|
||||||
|
):
|
||||||
"""Update proxy configuration"""
|
"""Update proxy configuration"""
|
||||||
await self.db.update_proxy_config(enabled, proxy_url)
|
await self.db.update_proxy_config(
|
||||||
|
enabled,
|
||||||
|
proxy_url,
|
||||||
|
image_upload_proxy_enabled,
|
||||||
|
image_upload_proxy_url
|
||||||
|
)
|
||||||
|
|
||||||
async def get_proxy_config(self) -> ProxyConfig:
|
async def get_proxy_config(self) -> ProxyConfig:
|
||||||
"""Get proxy configuration"""
|
"""Get proxy configuration"""
|
||||||
|
|||||||
@@ -36,6 +36,54 @@ _cached_sentinel_token_map = {}
|
|||||||
_cached_device_id = None
|
_cached_device_id = None
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_device_id_from_sentinel(sentinel_token: Optional[str]) -> Optional[str]:
|
||||||
|
"""Extract device id from sentinel token JSON."""
|
||||||
|
if not sentinel_token:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
data = json.loads(sentinel_token)
|
||||||
|
if isinstance(data, dict):
|
||||||
|
value = data.get("id")
|
||||||
|
return str(value) if value else None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _build_session_cookie_header(session_token: str) -> str:
|
||||||
|
"""Build session cookie header used by ChatGPT/Sora requests."""
|
||||||
|
return f"__Secure-next-auth.session-token={session_token}"
|
||||||
|
|
||||||
|
|
||||||
|
async def _resolve_session_token(
|
||||||
|
access_token: Optional[str] = None,
|
||||||
|
token_id: Optional[int] = None,
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""Resolve session token (st) from token_id or access token."""
|
||||||
|
if not token_id and not access_token:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ..core.database import Database
|
||||||
|
|
||||||
|
db = Database()
|
||||||
|
token_obj = None
|
||||||
|
|
||||||
|
if token_id:
|
||||||
|
token_obj = await db.get_token(token_id)
|
||||||
|
|
||||||
|
# Fallback by access token if token_id is unavailable or has no st
|
||||||
|
if (not token_obj or not token_obj.st) and access_token:
|
||||||
|
token_obj = await db.get_token_by_value(access_token)
|
||||||
|
|
||||||
|
if token_obj and token_obj.st:
|
||||||
|
return token_obj.st
|
||||||
|
except Exception as e:
|
||||||
|
debug_logger.log_warning(f"[Sentinel] Failed to resolve session token: {e}")
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
async def _get_browser(proxy_url: str = None):
|
async def _get_browser(proxy_url: str = None):
|
||||||
"""Get or create browser instance (reuses existing browser)"""
|
"""Get or create browser instance (reuses existing browser)"""
|
||||||
global _browser, _playwright, _current_proxy
|
global _browser, _playwright, _current_proxy
|
||||||
@@ -81,7 +129,12 @@ async def _close_browser():
|
|||||||
_playwright = None
|
_playwright = None
|
||||||
|
|
||||||
|
|
||||||
async def _fetch_oai_did(proxy_url: str = None, max_retries: int = 3) -> str:
|
async def _fetch_oai_did(
|
||||||
|
proxy_url: str = None,
|
||||||
|
max_retries: int = 3,
|
||||||
|
session_token: Optional[str] = None,
|
||||||
|
cookie_header: Optional[str] = None,
|
||||||
|
) -> str:
|
||||||
"""Fetch oai-did using curl_cffi (lightweight approach)
|
"""Fetch oai-did using curl_cffi (lightweight approach)
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
@@ -92,8 +145,15 @@ async def _fetch_oai_did(proxy_url: str = None, max_retries: int = 3) -> str:
|
|||||||
for attempt in range(max_retries):
|
for attempt in range(max_retries):
|
||||||
try:
|
try:
|
||||||
async with AsyncSession(impersonate="chrome120") as session:
|
async with AsyncSession(impersonate="chrome120") as session:
|
||||||
|
headers = None
|
||||||
|
if cookie_header:
|
||||||
|
headers = {"Cookie": cookie_header}
|
||||||
|
elif session_token:
|
||||||
|
headers = {"Cookie": _build_session_cookie_header(session_token)}
|
||||||
|
|
||||||
response = await session.get(
|
response = await session.get(
|
||||||
"https://chatgpt.com/",
|
"https://chatgpt.com/",
|
||||||
|
headers=headers,
|
||||||
proxy=proxy_url,
|
proxy=proxy_url,
|
||||||
timeout=30,
|
timeout=30,
|
||||||
allow_redirects=True
|
allow_redirects=True
|
||||||
@@ -130,7 +190,11 @@ async def _fetch_oai_did(proxy_url: str = None, max_retries: int = 3) -> str:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
async def _generate_sentinel_token_lightweight(proxy_url: str = None, device_id: str = None) -> str:
|
async def _generate_sentinel_token_lightweight(
|
||||||
|
proxy_url: str = None,
|
||||||
|
device_id: str = None,
|
||||||
|
session_token: Optional[str] = None,
|
||||||
|
) -> str:
|
||||||
"""Generate sentinel token using lightweight Playwright approach
|
"""Generate sentinel token using lightweight Playwright approach
|
||||||
|
|
||||||
Uses route interception and SDK injection for minimal resource usage.
|
Uses route interception and SDK injection for minimal resource usage.
|
||||||
@@ -154,7 +218,7 @@ async def _generate_sentinel_token_lightweight(proxy_url: str = None, device_id:
|
|||||||
|
|
||||||
# Get oai-did
|
# Get oai-did
|
||||||
if not device_id:
|
if not device_id:
|
||||||
device_id = await _fetch_oai_did(proxy_url)
|
device_id = await _fetch_oai_did(proxy_url, session_token=session_token)
|
||||||
|
|
||||||
if not device_id:
|
if not device_id:
|
||||||
debug_logger.log_info("[Sentinel] Failed to get oai-did")
|
debug_logger.log_info("[Sentinel] Failed to get oai-did")
|
||||||
@@ -171,13 +235,24 @@ async def _generate_sentinel_token_lightweight(proxy_url: str = None, device_id:
|
|||||||
bypass_csp=True
|
bypass_csp=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Set cookie
|
# Set oai-did cookie (+ session cookie when token-aware POW is enabled)
|
||||||
await context.add_cookies([{
|
cookies_to_set = [{
|
||||||
'name': 'oai-did',
|
'name': 'oai-did',
|
||||||
'value': device_id,
|
'value': device_id,
|
||||||
'domain': 'sora.chatgpt.com',
|
'domain': 'sora.chatgpt.com',
|
||||||
'path': '/'
|
'path': '/'
|
||||||
}])
|
}]
|
||||||
|
if session_token:
|
||||||
|
cookies_to_set.append({
|
||||||
|
'name': '__Secure-next-auth.session-token',
|
||||||
|
'value': session_token,
|
||||||
|
'domain': '.chatgpt.com',
|
||||||
|
'path': '/',
|
||||||
|
'secure': True,
|
||||||
|
'httpOnly': True,
|
||||||
|
'sameSite': 'None',
|
||||||
|
})
|
||||||
|
await context.add_cookies(cookies_to_set)
|
||||||
|
|
||||||
page = await context.new_page()
|
page = await context.new_page()
|
||||||
|
|
||||||
@@ -231,16 +306,22 @@ async def _generate_sentinel_token_lightweight(proxy_url: str = None, device_id:
|
|||||||
await context.close()
|
await context.close()
|
||||||
|
|
||||||
|
|
||||||
async def _get_cached_sentinel_token(proxy_url: str = None, force_refresh: bool = False, access_token: Optional[str] = None) -> str:
|
async def _get_cached_sentinel_token(
|
||||||
|
proxy_url: str = None,
|
||||||
|
force_refresh: bool = False,
|
||||||
|
access_token: Optional[str] = None,
|
||||||
|
token_id: Optional[int] = None,
|
||||||
|
) -> Optional[Dict[str, Optional[str]]]:
|
||||||
"""Get sentinel token with caching support
|
"""Get sentinel token with caching support
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
proxy_url: Optional proxy URL
|
proxy_url: Optional proxy URL
|
||||||
force_refresh: Force refresh token (e.g., after 400 error)
|
force_refresh: Force refresh token (e.g., after 400 error)
|
||||||
access_token: Optional access token to send to external POW service
|
access_token: Optional access token to send to external POW service
|
||||||
|
token_id: Optional token id to resolve session token for local POW
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Sentinel token string or None
|
Dict with sentinel_token/device_id/user_agent/cookie_header or None
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
Exception: If 403/429 when fetching oai-did
|
Exception: If 403/429 when fetching oai-did
|
||||||
@@ -248,43 +329,82 @@ async def _get_cached_sentinel_token(proxy_url: str = None, force_refresh: bool
|
|||||||
global _cached_sentinel_token_map
|
global _cached_sentinel_token_map
|
||||||
|
|
||||||
# Whether current request should be token-aware for POW
|
# Whether current request should be token-aware for POW
|
||||||
use_token_for_pow = bool(config.pow_service_use_token_for_pow and access_token)
|
use_token_for_pow = bool(config.pow_service_use_token_for_pow and (access_token or token_id))
|
||||||
cache_key = access_token if use_token_for_pow else "__default__"
|
disable_cache_for_local_token_pow = bool(use_token_for_pow and config.pow_service_mode == "local")
|
||||||
|
if use_token_for_pow and access_token:
|
||||||
|
cache_key = access_token
|
||||||
|
elif use_token_for_pow and token_id:
|
||||||
|
cache_key = f"token_id:{token_id}"
|
||||||
|
else:
|
||||||
|
cache_key = "__default__"
|
||||||
|
session_token = await _resolve_session_token(access_token=access_token, token_id=token_id) if use_token_for_pow else None
|
||||||
|
|
||||||
# Check if external POW service is configured
|
# Check if external POW service is configured
|
||||||
if config.pow_service_mode == "external":
|
if config.pow_service_mode == "external":
|
||||||
debug_logger.log_info("[POW] Using external POW service (cached sentinel)")
|
debug_logger.log_info("[POW] Using external POW service (cached sentinel)")
|
||||||
from .pow_service_client import pow_service_client
|
|
||||||
result = await pow_service_client.get_sentinel_token(
|
result = await pow_service_client.get_sentinel_token(
|
||||||
access_token=access_token if use_token_for_pow else None
|
access_token=access_token if use_token_for_pow else None,
|
||||||
|
session_token=session_token if use_token_for_pow else None,
|
||||||
|
proxy_url=proxy_url,
|
||||||
)
|
)
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
sentinel_token, device_id, service_user_agent = result
|
sentinel_data = {
|
||||||
|
"sentinel_token": result.sentinel_token,
|
||||||
|
"device_id": result.device_id or _extract_device_id_from_sentinel(result.sentinel_token),
|
||||||
|
"user_agent": result.user_agent,
|
||||||
|
"cookie_header": result.cookie_header,
|
||||||
|
}
|
||||||
debug_logger.log_info("[POW] External service returned sentinel token successfully")
|
debug_logger.log_info("[POW] External service returned sentinel token successfully")
|
||||||
return sentinel_token
|
return sentinel_data
|
||||||
else:
|
else:
|
||||||
# Fallback to local mode if external service fails
|
# Fallback to local mode if external service fails
|
||||||
debug_logger.log_info("[POW] External service failed, falling back to local mode")
|
debug_logger.log_info("[POW] External service failed, falling back to local mode")
|
||||||
|
|
||||||
# Local mode (original logic)
|
# Local mode
|
||||||
# Return cached token if available and not forcing refresh
|
# local + token-aware POW: do not use cache (compute each time)
|
||||||
if not force_refresh and cache_key in _cached_sentinel_token_map:
|
if disable_cache_for_local_token_pow:
|
||||||
|
debug_logger.log_info("[Sentinel] Local token-aware POW enabled, cache bypassed")
|
||||||
|
# Otherwise keep legacy cache behavior
|
||||||
|
elif not force_refresh and cache_key in _cached_sentinel_token_map:
|
||||||
if use_token_for_pow:
|
if use_token_for_pow:
|
||||||
debug_logger.log_info("[Sentinel] Using token-scoped cached token")
|
debug_logger.log_info("[Sentinel] Using token-scoped cached token")
|
||||||
else:
|
else:
|
||||||
debug_logger.log_info("[Sentinel] Using shared cached token")
|
debug_logger.log_info("[Sentinel] Using shared cached token")
|
||||||
return _cached_sentinel_token_map[cache_key]
|
cached_value = _cached_sentinel_token_map[cache_key]
|
||||||
|
# Backward compatibility: migrate legacy string cache to structured cache.
|
||||||
|
if isinstance(cached_value, str):
|
||||||
|
cached_value = {
|
||||||
|
"sentinel_token": cached_value,
|
||||||
|
"device_id": _extract_device_id_from_sentinel(cached_value),
|
||||||
|
"user_agent": None,
|
||||||
|
"cookie_header": None,
|
||||||
|
}
|
||||||
|
_cached_sentinel_token_map[cache_key] = cached_value
|
||||||
|
return cached_value
|
||||||
|
|
||||||
# Generate new token
|
# Generate new token
|
||||||
debug_logger.log_info("[Sentinel] Generating new token...")
|
debug_logger.log_info("[Sentinel] Generating new token...")
|
||||||
token = await _generate_sentinel_token_lightweight(proxy_url)
|
token = await _generate_sentinel_token_lightweight(
|
||||||
|
proxy_url=proxy_url,
|
||||||
|
session_token=session_token if use_token_for_pow else None,
|
||||||
|
)
|
||||||
|
|
||||||
if token:
|
if token:
|
||||||
_cached_sentinel_token_map[cache_key] = token
|
sentinel_data = {
|
||||||
debug_logger.log_info("[Sentinel] Token cached successfully")
|
"sentinel_token": token,
|
||||||
|
"device_id": _extract_device_id_from_sentinel(token),
|
||||||
|
"user_agent": None,
|
||||||
|
"cookie_header": None,
|
||||||
|
}
|
||||||
|
if not disable_cache_for_local_token_pow:
|
||||||
|
_cached_sentinel_token_map[cache_key] = sentinel_data
|
||||||
|
debug_logger.log_info("[Sentinel] Token cached successfully")
|
||||||
|
else:
|
||||||
|
debug_logger.log_info("[Sentinel] Local token-aware POW generated (not cached)")
|
||||||
|
return sentinel_data
|
||||||
|
|
||||||
return token
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _invalidate_sentinel_cache(access_token: Optional[str] = None):
|
def _invalidate_sentinel_cache(access_token: Optional[str] = None):
|
||||||
@@ -632,9 +752,17 @@ class SoraClient:
|
|||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async def _nf_create_urllib(self, token: str, payload: dict, sentinel_token: str,
|
async def _nf_create_urllib(
|
||||||
proxy_url: Optional[str], token_id: Optional[int] = None,
|
self,
|
||||||
user_agent: Optional[str] = None) -> Dict[str, Any]:
|
token: str,
|
||||||
|
payload: dict,
|
||||||
|
sentinel_token: str,
|
||||||
|
proxy_url: Optional[str],
|
||||||
|
token_id: Optional[int] = None,
|
||||||
|
user_agent: Optional[str] = None,
|
||||||
|
device_id: Optional[str] = None,
|
||||||
|
cookie_header: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
"""Make nf/create request
|
"""Make nf/create request
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -647,9 +775,17 @@ class SoraClient:
|
|||||||
if not user_agent:
|
if not user_agent:
|
||||||
user_agent = random.choice(DESKTOP_USER_AGENTS)
|
user_agent = random.choice(DESKTOP_USER_AGENTS)
|
||||||
|
|
||||||
import json as json_mod
|
sentinel_data = {}
|
||||||
sentinel_data = json_mod.loads(sentinel_token)
|
if not device_id:
|
||||||
device_id = sentinel_data.get("id", str(uuid4()))
|
device_id = _extract_device_id_from_sentinel(sentinel_token)
|
||||||
|
if not device_id:
|
||||||
|
device_id = str(uuid4())
|
||||||
|
try:
|
||||||
|
parsed_data = json.loads(sentinel_token)
|
||||||
|
if isinstance(parsed_data, dict):
|
||||||
|
sentinel_data = parsed_data
|
||||||
|
except Exception:
|
||||||
|
sentinel_data = {}
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"Bearer {token}",
|
"Authorization": f"Bearer {token}",
|
||||||
@@ -674,7 +810,10 @@ class SoraClient:
|
|||||||
}
|
}
|
||||||
|
|
||||||
# 添加 Cookie 头(关键修复)
|
# 添加 Cookie 头(关键修复)
|
||||||
if token_id:
|
if cookie_header:
|
||||||
|
headers["Cookie"] = cookie_header
|
||||||
|
debug_logger.log_info(f"[nf/create] Using cookie header from POW service (length: {len(cookie_header)})")
|
||||||
|
elif token_id:
|
||||||
try:
|
try:
|
||||||
from src.core.database import Database
|
from src.core.database import Database
|
||||||
db = Database()
|
db = Database()
|
||||||
@@ -762,31 +901,46 @@ class SoraClient:
|
|||||||
except URLError as exc:
|
except URLError as exc:
|
||||||
raise Exception(f"URL Error: {exc}") from exc
|
raise Exception(f"URL Error: {exc}") from exc
|
||||||
|
|
||||||
async def _generate_sentinel_token(self, token: Optional[str] = None, user_agent: Optional[str] = None) -> Tuple[str, str]:
|
async def _generate_sentinel_token(
|
||||||
|
self,
|
||||||
|
token: Optional[str] = None,
|
||||||
|
user_agent: Optional[str] = None,
|
||||||
|
pow_proxy_url: Optional[str] = None,
|
||||||
|
token_id: Optional[int] = None,
|
||||||
|
) -> Dict[str, Optional[str]]:
|
||||||
"""Generate openai-sentinel-token by calling /backend-api/sentinel/req and solving PoW
|
"""Generate openai-sentinel-token by calling /backend-api/sentinel/req and solving PoW
|
||||||
|
|
||||||
Supports two modes:
|
Supports two modes:
|
||||||
- external: Get complete sentinel token from external POW service
|
- external: Get complete sentinel token from external POW service
|
||||||
- local: Generate POW locally and call sentinel/req endpoint
|
- local: Generate POW locally and call sentinel/req endpoint
|
||||||
"""
|
"""
|
||||||
|
use_token_for_pow = bool(config.pow_service_use_token_for_pow and (token or token_id))
|
||||||
|
session_token = await _resolve_session_token(access_token=token, token_id=token_id) if use_token_for_pow else None
|
||||||
|
|
||||||
# Check if external POW service is configured
|
# Check if external POW service is configured
|
||||||
if config.pow_service_mode == "external":
|
if config.pow_service_mode == "external":
|
||||||
debug_logger.log_info("[Sentinel] Using external POW service...")
|
debug_logger.log_info("[Sentinel] Using external POW service...")
|
||||||
result = await pow_service_client.get_sentinel_token(
|
result = await pow_service_client.get_sentinel_token(
|
||||||
access_token=token if config.pow_service_use_token_for_pow else None
|
access_token=token if use_token_for_pow else None,
|
||||||
|
session_token=session_token if use_token_for_pow else None,
|
||||||
|
proxy_url=pow_proxy_url,
|
||||||
)
|
)
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
sentinel_token, device_id, service_user_agent = result
|
|
||||||
# Use service user agent if provided, otherwise use default
|
# Use service user agent if provided, otherwise use default
|
||||||
final_user_agent = service_user_agent if service_user_agent else (
|
final_user_agent = result.user_agent if result.user_agent else (
|
||||||
user_agent if user_agent else
|
user_agent if user_agent else
|
||||||
"Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Mobile Safari/537.36"
|
"Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Mobile Safari/537.36"
|
||||||
)
|
)
|
||||||
|
|
||||||
debug_logger.log_info(f"[Sentinel] Got token from external service")
|
debug_logger.log_info(f"[Sentinel] Got token from external service")
|
||||||
debug_logger.log_info(f"[Sentinel] Token cached successfully (external)")
|
debug_logger.log_info(f"[Sentinel] Token cached successfully (external)")
|
||||||
return sentinel_token, final_user_agent
|
return {
|
||||||
|
"sentinel_token": result.sentinel_token,
|
||||||
|
"user_agent": final_user_agent,
|
||||||
|
"device_id": result.device_id or _extract_device_id_from_sentinel(result.sentinel_token),
|
||||||
|
"cookie_header": result.cookie_header,
|
||||||
|
}
|
||||||
else:
|
else:
|
||||||
# Fallback to local mode if external service fails
|
# Fallback to local mode if external service fails
|
||||||
debug_logger.log_info("[Sentinel] External service failed, falling back to local mode")
|
debug_logger.log_info("[Sentinel] External service failed, falling back to local mode")
|
||||||
@@ -806,7 +960,7 @@ class SoraClient:
|
|||||||
}
|
}
|
||||||
ua_with_pow = f"{user_agent} {json.dumps(init_payload, separators=(',', ':'))}"
|
ua_with_pow = f"{user_agent} {json.dumps(init_payload, separators=(',', ':'))}"
|
||||||
|
|
||||||
proxy_url = await self.proxy_manager.get_proxy_url()
|
proxy_url = pow_proxy_url or await self.proxy_manager.get_proxy_url(token_id)
|
||||||
|
|
||||||
# Request sentinel/req endpoint
|
# Request sentinel/req endpoint
|
||||||
url = f"{self.CHATGPT_BASE_URL}/backend-api/sentinel/req"
|
url = f"{self.CHATGPT_BASE_URL}/backend-api/sentinel/req"
|
||||||
@@ -827,6 +981,9 @@ class SoraClient:
|
|||||||
"sec-ch-ua-mobile": "?1",
|
"sec-ch-ua-mobile": "?1",
|
||||||
"sec-ch-ua-platform": '"Android"',
|
"sec-ch-ua-platform": '"Android"',
|
||||||
}
|
}
|
||||||
|
if use_token_for_pow and session_token:
|
||||||
|
headers["Cookie"] = _build_session_cookie_header(session_token)
|
||||||
|
debug_logger.log_info("[Sentinel] Local mode enabled token-aware cookie for sentinel/req")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with AsyncSession(impersonate="chrome131") as session:
|
async with AsyncSession(impersonate="chrome131") as session:
|
||||||
@@ -860,7 +1017,12 @@ class SoraClient:
|
|||||||
parsed = json.loads(sentinel_token)
|
parsed = json.loads(sentinel_token)
|
||||||
debug_logger.log_info(f"Final sentinel: p_prefix={parsed['p'][:10]}, p_suffix={parsed['p'][-5:]}, t_len={len(parsed['t'])}, c_len={len(parsed['c'])}, flow={parsed['flow']}")
|
debug_logger.log_info(f"Final sentinel: p_prefix={parsed['p'][:10]}, p_suffix={parsed['p'][-5:]}, t_len={len(parsed['t'])}, c_len={len(parsed['c'])}, flow={parsed['flow']}")
|
||||||
|
|
||||||
return sentinel_token, user_agent
|
return {
|
||||||
|
"sentinel_token": sentinel_token,
|
||||||
|
"user_agent": user_agent,
|
||||||
|
"device_id": _extract_device_id_from_sentinel(sentinel_token),
|
||||||
|
"cookie_header": None,
|
||||||
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_storyboard_prompt(prompt: str) -> bool:
|
def is_storyboard_prompt(prompt: str) -> bool:
|
||||||
@@ -928,7 +1090,8 @@ class SoraClient:
|
|||||||
json_data: Optional[Dict] = None,
|
json_data: Optional[Dict] = None,
|
||||||
multipart: Optional[Dict] = None,
|
multipart: Optional[Dict] = None,
|
||||||
add_sentinel_token: bool = False,
|
add_sentinel_token: bool = False,
|
||||||
token_id: Optional[int] = None) -> Dict[str, Any]:
|
token_id: Optional[int] = None,
|
||||||
|
use_image_upload_proxy: bool = False) -> Dict[str, Any]:
|
||||||
"""Make HTTP request with proxy support
|
"""Make HTTP request with proxy support
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -939,8 +1102,12 @@ class SoraClient:
|
|||||||
multipart: Multipart form data (for file uploads)
|
multipart: Multipart form data (for file uploads)
|
||||||
add_sentinel_token: Whether to add openai-sentinel-token header (only for generation requests)
|
add_sentinel_token: Whether to add openai-sentinel-token header (only for generation requests)
|
||||||
token_id: Token ID for getting token-specific proxy (optional)
|
token_id: Token ID for getting token-specific proxy (optional)
|
||||||
|
use_image_upload_proxy: Whether to use dedicated image upload proxy selection
|
||||||
"""
|
"""
|
||||||
proxy_url = await self.proxy_manager.get_proxy_url(token_id)
|
if use_image_upload_proxy:
|
||||||
|
proxy_url = await self.proxy_manager.get_image_upload_proxy_url(token_id)
|
||||||
|
else:
|
||||||
|
proxy_url = await self.proxy_manager.get_proxy_url(token_id)
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"Bearer {token}",
|
"Authorization": f"Bearer {token}",
|
||||||
@@ -949,9 +1116,14 @@ class SoraClient:
|
|||||||
|
|
||||||
# 只在生成请求时添加 sentinel token
|
# 只在生成请求时添加 sentinel token
|
||||||
if add_sentinel_token:
|
if add_sentinel_token:
|
||||||
sentinel_token, ua = await self._generate_sentinel_token(token)
|
sentinel_context = await self._generate_sentinel_token(token, token_id=token_id)
|
||||||
headers["openai-sentinel-token"] = sentinel_token
|
headers["openai-sentinel-token"] = sentinel_context["sentinel_token"]
|
||||||
headers["User-Agent"] = ua
|
if sentinel_context.get("user_agent"):
|
||||||
|
headers["User-Agent"] = sentinel_context["user_agent"]
|
||||||
|
if sentinel_context.get("device_id"):
|
||||||
|
headers["oai-device-id"] = sentinel_context["device_id"]
|
||||||
|
if sentinel_context.get("cookie_header"):
|
||||||
|
headers["Cookie"] = sentinel_context["cookie_header"]
|
||||||
|
|
||||||
if not multipart:
|
if not multipart:
|
||||||
headers["Content-Type"] = "application/json"
|
headers["Content-Type"] = "application/json"
|
||||||
@@ -1055,7 +1227,13 @@ class SoraClient:
|
|||||||
"""Get user information"""
|
"""Get user information"""
|
||||||
return await self._make_request("GET", "/me", token)
|
return await self._make_request("GET", "/me", token)
|
||||||
|
|
||||||
async def upload_image(self, image_data: bytes, token: str, filename: str = "image.png") -> str:
|
async def upload_image(
|
||||||
|
self,
|
||||||
|
image_data: bytes,
|
||||||
|
token: str,
|
||||||
|
filename: str = "image.png",
|
||||||
|
token_id: Optional[int] = None
|
||||||
|
) -> str:
|
||||||
"""Upload image and return media_id
|
"""Upload image and return media_id
|
||||||
|
|
||||||
使用 CurlMime 对象上传文件(curl_cffi 的正确方式)
|
使用 CurlMime 对象上传文件(curl_cffi 的正确方式)
|
||||||
@@ -1085,7 +1263,14 @@ class SoraClient:
|
|||||||
data=filename.encode('utf-8')
|
data=filename.encode('utf-8')
|
||||||
)
|
)
|
||||||
|
|
||||||
result = await self._make_request("POST", "/uploads", token, multipart=mp)
|
result = await self._make_request(
|
||||||
|
"POST",
|
||||||
|
"/uploads",
|
||||||
|
token,
|
||||||
|
multipart=mp,
|
||||||
|
token_id=token_id,
|
||||||
|
use_image_upload_proxy=True
|
||||||
|
)
|
||||||
return result["id"]
|
return result["id"]
|
||||||
|
|
||||||
async def generate_image(self, prompt: str, token: str, width: int = 360,
|
async def generate_image(self, prompt: str, token: str, width: int = 360,
|
||||||
@@ -1161,7 +1346,12 @@ class SoraClient:
|
|||||||
|
|
||||||
# Try to get cached sentinel token first (using lightweight Playwright approach)
|
# Try to get cached sentinel token first (using lightweight Playwright approach)
|
||||||
try:
|
try:
|
||||||
sentinel_token = await _get_cached_sentinel_token(pow_proxy_url, force_refresh=False, access_token=token)
|
sentinel_context = await _get_cached_sentinel_token(
|
||||||
|
pow_proxy_url,
|
||||||
|
force_refresh=False,
|
||||||
|
access_token=token,
|
||||||
|
token_id=token_id,
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# 403/429 errors from oai-did fetch - don't retry, just fail
|
# 403/429 errors from oai-did fetch - don't retry, just fail
|
||||||
error_str = str(e)
|
error_str = str(e)
|
||||||
@@ -1173,16 +1363,30 @@ class SoraClient:
|
|||||||
source="Server"
|
source="Server"
|
||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
sentinel_token = None
|
sentinel_context = None
|
||||||
|
|
||||||
if not sentinel_token:
|
if not sentinel_context:
|
||||||
# Fallback to manual POW if lightweight approach fails
|
# Fallback to manual POW if lightweight approach fails
|
||||||
debug_logger.log_info("[Warning] Lightweight sentinel token failed, falling back to manual POW")
|
debug_logger.log_info("[Warning] Lightweight sentinel token failed, falling back to manual POW")
|
||||||
sentinel_token, user_agent = await self._generate_sentinel_token(token)
|
sentinel_context = await self._generate_sentinel_token(
|
||||||
|
token,
|
||||||
|
user_agent=user_agent,
|
||||||
|
pow_proxy_url=pow_proxy_url,
|
||||||
|
token_id=token_id,
|
||||||
|
)
|
||||||
|
|
||||||
# First attempt with cached/generated token
|
# First attempt with cached/generated token
|
||||||
try:
|
try:
|
||||||
result = await self._nf_create_urllib(token, json_data, sentinel_token, proxy_url, token_id, user_agent)
|
result = await self._nf_create_urllib(
|
||||||
|
token,
|
||||||
|
json_data,
|
||||||
|
sentinel_context["sentinel_token"],
|
||||||
|
proxy_url,
|
||||||
|
token_id,
|
||||||
|
sentinel_context.get("user_agent") or user_agent,
|
||||||
|
sentinel_context.get("device_id"),
|
||||||
|
sentinel_context.get("cookie_header"),
|
||||||
|
)
|
||||||
return result["id"]
|
return result["id"]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_str = str(e)
|
error_str = str(e)
|
||||||
@@ -1195,21 +1399,40 @@ class SoraClient:
|
|||||||
_invalidate_sentinel_cache(token)
|
_invalidate_sentinel_cache(token)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sentinel_token = await _get_cached_sentinel_token(pow_proxy_url, force_refresh=True, access_token=token)
|
sentinel_context = await _get_cached_sentinel_token(
|
||||||
|
pow_proxy_url,
|
||||||
|
force_refresh=True,
|
||||||
|
access_token=token,
|
||||||
|
token_id=token_id,
|
||||||
|
)
|
||||||
except Exception as refresh_e:
|
except Exception as refresh_e:
|
||||||
# 403/429 errors - don't continue
|
# 403/429 errors - don't continue
|
||||||
error_str = str(refresh_e)
|
error_str = str(refresh_e)
|
||||||
if "403" in error_str or "429" in error_str:
|
if "403" in error_str or "429" in error_str:
|
||||||
raise refresh_e
|
raise refresh_e
|
||||||
sentinel_token = None
|
sentinel_context = None
|
||||||
|
|
||||||
if not sentinel_token:
|
if not sentinel_context:
|
||||||
# Fallback to manual POW
|
# Fallback to manual POW
|
||||||
debug_logger.log_info("[Warning] Refresh failed, falling back to manual POW")
|
debug_logger.log_info("[Warning] Refresh failed, falling back to manual POW")
|
||||||
sentinel_token, user_agent = await self._generate_sentinel_token(token)
|
sentinel_context = await self._generate_sentinel_token(
|
||||||
|
token,
|
||||||
|
user_agent=user_agent,
|
||||||
|
pow_proxy_url=pow_proxy_url,
|
||||||
|
token_id=token_id,
|
||||||
|
)
|
||||||
|
|
||||||
# Retry with fresh token
|
# Retry with fresh token
|
||||||
result = await self._nf_create_urllib(token, json_data, sentinel_token, proxy_url, token_id, user_agent)
|
result = await self._nf_create_urllib(
|
||||||
|
token,
|
||||||
|
json_data,
|
||||||
|
sentinel_context["sentinel_token"],
|
||||||
|
proxy_url,
|
||||||
|
token_id,
|
||||||
|
sentinel_context.get("user_agent") or user_agent,
|
||||||
|
sentinel_context.get("device_id"),
|
||||||
|
sentinel_context.get("cookie_header"),
|
||||||
|
)
|
||||||
return result["id"]
|
return result["id"]
|
||||||
|
|
||||||
# For other errors, just re-raise
|
# For other errors, just re-raise
|
||||||
@@ -1560,7 +1783,12 @@ class SoraClient:
|
|||||||
await self._make_request("POST", f"/project_y/cameos/by_id/{cameo_id}/update_v2", token, json_data=json_data)
|
await self._make_request("POST", f"/project_y/cameos/by_id/{cameo_id}/update_v2", token, json_data=json_data)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def upload_character_image(self, image_data: bytes, token: str) -> str:
|
async def upload_character_image(
|
||||||
|
self,
|
||||||
|
image_data: bytes,
|
||||||
|
token: str,
|
||||||
|
token_id: Optional[int] = None
|
||||||
|
) -> str:
|
||||||
"""Upload character image and return asset_pointer
|
"""Upload character image and return asset_pointer
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -1582,7 +1810,14 @@ class SoraClient:
|
|||||||
data=b"profile"
|
data=b"profile"
|
||||||
)
|
)
|
||||||
|
|
||||||
result = await self._make_request("POST", "/project_y/file/upload", token, multipart=mp)
|
result = await self._make_request(
|
||||||
|
"POST",
|
||||||
|
"/project_y/file/upload",
|
||||||
|
token,
|
||||||
|
multipart=mp,
|
||||||
|
token_id=token_id,
|
||||||
|
use_image_upload_proxy=True
|
||||||
|
)
|
||||||
return result.get("asset_pointer")
|
return result.get("asset_pointer")
|
||||||
|
|
||||||
async def delete_character(self, character_id: str, token: str) -> bool:
|
async def delete_character(self, character_id: str, token: str) -> bool:
|
||||||
@@ -1648,8 +1883,16 @@ class SoraClient:
|
|||||||
|
|
||||||
# Generate sentinel token and call /nf/create using urllib
|
# Generate sentinel token and call /nf/create using urllib
|
||||||
proxy_url = await self.proxy_manager.get_proxy_url()
|
proxy_url = await self.proxy_manager.get_proxy_url()
|
||||||
sentinel_token, user_agent = await self._generate_sentinel_token(token)
|
sentinel_context = await self._generate_sentinel_token(token)
|
||||||
result = await self._nf_create_urllib(token, json_data, sentinel_token, proxy_url, user_agent=user_agent)
|
result = await self._nf_create_urllib(
|
||||||
|
token,
|
||||||
|
json_data,
|
||||||
|
sentinel_context["sentinel_token"],
|
||||||
|
proxy_url,
|
||||||
|
user_agent=sentinel_context.get("user_agent"),
|
||||||
|
device_id=sentinel_context.get("device_id"),
|
||||||
|
cookie_header=sentinel_context.get("cookie_header"),
|
||||||
|
)
|
||||||
return result.get("id")
|
return result.get("id")
|
||||||
|
|
||||||
async def extend_video(self, generation_id: str, prompt: str, extension_duration_s: int,
|
async def extend_video(self, generation_id: str, prompt: str, extension_duration_s: int,
|
||||||
|
|||||||
@@ -374,6 +374,28 @@
|
|||||||
<input id="cfgProxyUrl" class="flex h-9 w-full rounded-md border border-input bg-background px-3 py-2 text-sm" placeholder="http://127.0.0.1:7890 或 socks5://127.0.0.1:1080">
|
<input id="cfgProxyUrl" class="flex h-9 w-full rounded-md border border-input bg-background px-3 py-2 text-sm" placeholder="http://127.0.0.1:7890 或 socks5://127.0.0.1:1080">
|
||||||
<p class="text-xs text-muted-foreground mt-1">支持 HTTP 和 SOCKS5 代理</p>
|
<p class="text-xs text-muted-foreground mt-1">支持 HTTP 和 SOCKS5 代理</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="space-y-4 p-4 rounded-md bg-blue-50/50 dark:bg-blue-950/20 border border-blue-200 dark:border-blue-800">
|
||||||
|
<div class="flex items-center gap-2">
|
||||||
|
<svg class="h-4 w-4 text-blue-600" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
|
||||||
|
<rect x="3" y="3" width="18" height="18" rx="2" ry="2"/>
|
||||||
|
<circle cx="8.5" cy="8.5" r="1.5"/>
|
||||||
|
<polyline points="21 15 16 10 5 21"/>
|
||||||
|
</svg>
|
||||||
|
<h4 class="text-sm font-semibold text-blue-900 dark:text-blue-100">图片上传专用代理</h4>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<label class="inline-flex items-center gap-2 cursor-pointer">
|
||||||
|
<input type="checkbox" id="cfgImageUploadProxyEnabled" class="h-4 w-4 rounded border-input">
|
||||||
|
<span class="text-sm font-medium">启用图片上传专用代理</span>
|
||||||
|
</label>
|
||||||
|
<p class="text-xs text-muted-foreground mt-1">启用后,图片上传将使用下方设置的专用代理</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<label class="text-sm font-medium mb-2 block">图片上传代理地址</label>
|
||||||
|
<input id="cfgImageUploadProxyUrl" class="flex h-9 w-full rounded-md border border-input bg-background px-3 py-2 text-sm" placeholder="http://127.0.0.1:8888 或 socks5://127.0.0.1:1080">
|
||||||
|
<p class="text-xs text-muted-foreground mt-1">仅用于图片上传操作,未启用时将使用全局代理</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<div>
|
<div>
|
||||||
<label class="text-sm font-medium mb-2 block">测试域名</label>
|
<label class="text-sm font-medium mb-2 block">测试域名</label>
|
||||||
<input id="cfgProxyTestUrl" class="flex h-9 w-full rounded-md border border-input bg-background px-3 py-2 text-sm" placeholder="https://sora.chatgpt.com" value="https://sora.chatgpt.com">
|
<input id="cfgProxyTestUrl" class="flex h-9 w-full rounded-md border border-input bg-background px-3 py-2 text-sm" placeholder="https://sora.chatgpt.com" value="https://sora.chatgpt.com">
|
||||||
@@ -585,6 +607,11 @@
|
|||||||
</select>
|
</select>
|
||||||
<p class="text-xs text-muted-foreground mt-2">随机轮询:随机选择可用账号;逐个轮询:每个活跃账号只使用一次,全部使用过后再开始下一轮</p>
|
<p class="text-xs text-muted-foreground mt-2">随机轮询:随机选择可用账号;逐个轮询:每个活跃账号只使用一次,全部使用过后再开始下一轮</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div>
|
||||||
|
<label class="text-sm font-medium block">进度轮询间隔(秒)</label>
|
||||||
|
<input id="cfgCallLogicPollInterval" type="number" step="0.1" min="0.1" class="flex h-9 w-full rounded-md border border-input bg-background px-3 py-2 text-sm" placeholder="2.5">
|
||||||
|
<p class="text-xs text-muted-foreground mt-2">控制生成任务进度查询的时间间隔,保存后立即热更新生效</p>
|
||||||
|
</div>
|
||||||
<button onclick="saveCallLogicConfig()" class="inline-flex items-center justify-center rounded-md bg-primary text-primary-foreground hover:bg-primary/90 h-9 px-4 w-full">保存配置</button>
|
<button onclick="saveCallLogicConfig()" class="inline-flex items-center justify-center rounded-md bg-primary text-primary-foreground hover:bg-primary/90 h-9 px-4 w-full">保存配置</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -1122,10 +1149,10 @@
|
|||||||
updateAPIKey=async()=>{const newKey=$('cfgNewAPIKey').value.trim();if(!newKey)return showToast('请输入新的 API Key','error');if(newKey.length<6)return showToast('API Key 至少6个字符','error');if(!confirm('确定要更新 API Key 吗?更新后需要通知所有客户端使用新密钥。'))return;try{const r=await apiRequest('/api/admin/apikey',{method:'POST',body:JSON.stringify({new_api_key:newKey})});if(!r)return;const d=await r.json();if(d.success){showToast('API Key 更新成功','success');$('cfgCurrentAPIKey').value=newKey;$('cfgNewAPIKey').value=''}else{showToast('更新失败: '+(d.detail||'未知错误'),'error')}}catch(e){showToast('更新失败: '+e.message,'error')}},
|
updateAPIKey=async()=>{const newKey=$('cfgNewAPIKey').value.trim();if(!newKey)return showToast('请输入新的 API Key','error');if(newKey.length<6)return showToast('API Key 至少6个字符','error');if(!confirm('确定要更新 API Key 吗?更新后需要通知所有客户端使用新密钥。'))return;try{const r=await apiRequest('/api/admin/apikey',{method:'POST',body:JSON.stringify({new_api_key:newKey})});if(!r)return;const d=await r.json();if(d.success){showToast('API Key 更新成功','success');$('cfgCurrentAPIKey').value=newKey;$('cfgNewAPIKey').value=''}else{showToast('更新失败: '+(d.detail||'未知错误'),'error')}}catch(e){showToast('更新失败: '+e.message,'error')}},
|
||||||
toggleDebugMode=async()=>{const enabled=$('cfgDebugEnabled').checked;try{const r=await apiRequest('/api/admin/debug',{method:'POST',body:JSON.stringify({enabled:enabled})});if(!r)return;const d=await r.json();if(d.success){showToast(enabled?'调试模式已开启':'调试模式已关闭','success')}else{showToast('操作失败: '+(d.detail||'未知错误'),'error');$('cfgDebugEnabled').checked=!enabled}}catch(e){showToast('操作失败: '+e.message,'error');$('cfgDebugEnabled').checked=!enabled}},
|
toggleDebugMode=async()=>{const enabled=$('cfgDebugEnabled').checked;try{const r=await apiRequest('/api/admin/debug',{method:'POST',body:JSON.stringify({enabled:enabled})});if(!r)return;const d=await r.json();if(d.success){showToast(enabled?'调试模式已开启':'调试模式已关闭','success')}else{showToast('操作失败: '+(d.detail||'未知错误'),'error');$('cfgDebugEnabled').checked=!enabled}}catch(e){showToast('操作失败: '+e.message,'error');$('cfgDebugEnabled').checked=!enabled}},
|
||||||
downloadDebugLogs=async()=>{try{const token=localStorage.getItem('adminToken');if(!token){showToast('未登录','error');return}const r=await fetch('/api/admin/logs/download',{headers:{Authorization:`Bearer ${token}`}});if(!r.ok){if(r.status===404){showToast('日志文件不存在','error')}else{showToast('下载失败','error')}return}const blob=await r.blob();const url=URL.createObjectURL(blob);const link=document.createElement('a');link.href=url;link.download=`logs_${new Date().toISOString().split('T')[0]}.txt`;document.body.appendChild(link);link.click();document.body.removeChild(link);URL.revokeObjectURL(url);showToast('日志文件下载成功','success')}catch(e){showToast('下载失败: '+e.message,'error')}},
|
downloadDebugLogs=async()=>{try{const token=localStorage.getItem('adminToken');if(!token){showToast('未登录','error');return}const r=await fetch('/api/admin/logs/download',{headers:{Authorization:`Bearer ${token}`}});if(!r.ok){if(r.status===404){showToast('日志文件不存在','error')}else{showToast('下载失败','error')}return}const blob=await r.blob();const url=URL.createObjectURL(blob);const link=document.createElement('a');link.href=url;link.download=`logs_${new Date().toISOString().split('T')[0]}.txt`;document.body.appendChild(link);link.click();document.body.removeChild(link);URL.revokeObjectURL(url);showToast('日志文件下载成功','success')}catch(e){showToast('下载失败: '+e.message,'error')}},
|
||||||
loadProxyConfig=async()=>{try{const r=await apiRequest('/api/proxy/config');if(!r)return;const d=await r.json();$('cfgProxyEnabled').checked=d.proxy_enabled||false;$('cfgProxyUrl').value=d.proxy_url||''}catch(e){console.error('加载代理配置失败:',e)}},
|
loadProxyConfig=async()=>{try{const r=await apiRequest('/api/proxy/config');if(!r)return;const d=await r.json();$('cfgProxyEnabled').checked=d.proxy_enabled||false;$('cfgProxyUrl').value=d.proxy_url||'';$('cfgImageUploadProxyEnabled').checked=d.image_upload_proxy_enabled||false;$('cfgImageUploadProxyUrl').value=d.image_upload_proxy_url||''}catch(e){console.error('加载代理配置失败:',e)}},
|
||||||
setProxyStatus=(msg,type='muted')=>{const el=$('proxyStatusMessage');if(!el)return;if(!msg){el.textContent='';el.classList.add('hidden');return}el.textContent=msg;el.classList.remove('hidden','text-muted-foreground','text-green-600','text-red-600');if(type==='success')el.classList.add('text-green-600');else if(type==='error')el.classList.add('text-red-600');else el.classList.add('text-muted-foreground')},
|
setProxyStatus=(msg,type='muted')=>{const el=$('proxyStatusMessage');if(!el)return;if(!msg){el.textContent='';el.classList.add('hidden');return}el.textContent=msg;el.classList.remove('hidden','text-muted-foreground','text-green-600','text-red-600');if(type==='success')el.classList.add('text-green-600');else if(type==='error')el.classList.add('text-red-600');else el.classList.add('text-muted-foreground')},
|
||||||
testProxyConfig=async()=>{const enabled=$('cfgProxyEnabled').checked;const url=$('cfgProxyUrl').value.trim();const testUrl=$('cfgProxyTestUrl').value.trim()||'https://sora.chatgpt.com';if(!enabled||!url){setProxyStatus('代理未启用或地址为空','error');return}try{setProxyStatus('正在测试代理连接...','muted');const r=await apiRequest('/api/proxy/test',{method:'POST',body:JSON.stringify({test_url:testUrl})});if(!r)return;const d=await r.json();if(d.success){setProxyStatus(`✓ ${d.message||'代理可用'} - 测试域名: ${d.test_url||testUrl}`,'success')}else{setProxyStatus(`✗ ${d.message||'代理不可用'} - 测试域名: ${d.test_url||testUrl}`,'error')}}catch(e){setProxyStatus('代理测试失败: '+e.message,'error')}},
|
testProxyConfig=async()=>{const enabled=$('cfgProxyEnabled').checked;const url=$('cfgProxyUrl').value.trim();const testUrl=$('cfgProxyTestUrl').value.trim()||'https://sora.chatgpt.com';if(!enabled||!url){setProxyStatus('代理未启用或地址为空','error');return}try{setProxyStatus('正在测试代理连接...','muted');const r=await apiRequest('/api/proxy/test',{method:'POST',body:JSON.stringify({test_url:testUrl})});if(!r)return;const d=await r.json();if(d.success){setProxyStatus(`✓ ${d.message||'代理可用'} - 测试域名: ${d.test_url||testUrl}`,'success')}else{setProxyStatus(`✗ ${d.message||'代理不可用'} - 测试域名: ${d.test_url||testUrl}`,'error')}}catch(e){setProxyStatus('代理测试失败: '+e.message,'error')}},
|
||||||
saveProxyConfig=async()=>{try{const r=await apiRequest('/api/proxy/config',{method:'POST',body:JSON.stringify({proxy_enabled:$('cfgProxyEnabled').checked,proxy_url:$('cfgProxyUrl').value.trim()})});if(!r)return;const d=await r.json();d.success?showToast('代理配置保存成功','success'):showToast('保存失败','error')}catch(e){showToast('保存失败: '+e.message,'error')}},
|
saveProxyConfig=async()=>{try{const r=await apiRequest('/api/proxy/config',{method:'POST',body:JSON.stringify({proxy_enabled:$('cfgProxyEnabled').checked,proxy_url:$('cfgProxyUrl').value.trim(),image_upload_proxy_enabled:$('cfgImageUploadProxyEnabled').checked,image_upload_proxy_url:$('cfgImageUploadProxyUrl').value.trim()})});if(!r)return;const d=await r.json();d.success?showToast('代理配置保存成功','success'):showToast('保存失败','error')}catch(e){showToast('保存失败: '+e.message,'error')}},
|
||||||
loadWatermarkFreeConfig=async()=>{try{const r=await apiRequest('/api/watermark-free/config');if(!r)return;const d=await r.json();$('cfgWatermarkFreeEnabled').checked=d.watermark_free_enabled||false;$('cfgParseMethod').value=d.parse_method||'third_party';$('cfgCustomParseUrl').value=d.custom_parse_url||'';$('cfgCustomParseToken').value=d.custom_parse_token||'';$('cfgFallbackOnFailure').checked=d.fallback_on_failure!==false;toggleWatermarkFreeOptions();toggleCustomParseOptions()}catch(e){console.error('加载无水印模式配置失败:',e)}},
|
loadWatermarkFreeConfig=async()=>{try{const r=await apiRequest('/api/watermark-free/config');if(!r)return;const d=await r.json();$('cfgWatermarkFreeEnabled').checked=d.watermark_free_enabled||false;$('cfgParseMethod').value=d.parse_method||'third_party';$('cfgCustomParseUrl').value=d.custom_parse_url||'';$('cfgCustomParseToken').value=d.custom_parse_token||'';$('cfgFallbackOnFailure').checked=d.fallback_on_failure!==false;toggleWatermarkFreeOptions();toggleCustomParseOptions()}catch(e){console.error('加载无水印模式配置失败:',e)}},
|
||||||
saveWatermarkFreeConfig=async()=>{try{const enabled=$('cfgWatermarkFreeEnabled').checked,parseMethod=$('cfgParseMethod').value,customUrl=$('cfgCustomParseUrl').value.trim(),customToken=$('cfgCustomParseToken').value.trim(),fallbackOnFailure=$('cfgFallbackOnFailure').checked;if(enabled&&parseMethod==='custom'){if(!customUrl)return showToast('请输入解析服务器地址','error');if(!customToken)return showToast('请输入访问密钥','error')}const r=await apiRequest('/api/watermark-free/config',{method:'POST',body:JSON.stringify({watermark_free_enabled:enabled,parse_method:parseMethod,custom_parse_url:customUrl||null,custom_parse_token:customToken||null,fallback_on_failure:fallbackOnFailure})});if(!r)return;const d=await r.json();d.success?showToast('无水印模式配置保存成功','success'):showToast('保存失败','error')}catch(e){showToast('保存失败: '+e.message,'error')}},
|
saveWatermarkFreeConfig=async()=>{try{const enabled=$('cfgWatermarkFreeEnabled').checked,parseMethod=$('cfgParseMethod').value,customUrl=$('cfgCustomParseUrl').value.trim(),customToken=$('cfgCustomParseToken').value.trim(),fallbackOnFailure=$('cfgFallbackOnFailure').checked;if(enabled&&parseMethod==='custom'){if(!customUrl)return showToast('请输入解析服务器地址','error');if(!customToken)return showToast('请输入访问密钥','error')}const r=await apiRequest('/api/watermark-free/config',{method:'POST',body:JSON.stringify({watermark_free_enabled:enabled,parse_method:parseMethod,custom_parse_url:customUrl||null,custom_parse_token:customToken||null,fallback_on_failure:fallbackOnFailure})});if(!r)return;const d=await r.json();d.success?showToast('无水印模式配置保存成功','success'):showToast('保存失败','error')}catch(e){showToast('保存失败: '+e.message,'error')}},
|
||||||
toggleWatermarkFreeOptions=()=>{const enabled=$('cfgWatermarkFreeEnabled').checked;$('watermarkFreeOptions').style.display=enabled?'block':'none'},
|
toggleWatermarkFreeOptions=()=>{const enabled=$('cfgWatermarkFreeEnabled').checked;$('watermarkFreeOptions').style.display=enabled?'block':'none'},
|
||||||
@@ -1147,8 +1174,8 @@
|
|||||||
logout=()=>{if(!confirm('确定要退出登录吗?'))return;localStorage.removeItem('adminToken');location.href='/login'},
|
logout=()=>{if(!confirm('确定要退出登录吗?'))return;localStorage.removeItem('adminToken');location.href='/login'},
|
||||||
loadCharacters=async()=>{try{const r=await apiRequest('/api/characters');if(!r)return;const d=await r.json();const g=$('charactersGrid');if(!d||d.length===0){g.innerHTML='<div class="col-span-full text-center py-8 text-muted-foreground">暂无角色卡</div>';return}g.innerHTML=d.map(c=>`<div class="rounded-lg border border-border bg-background p-4"><div class="flex items-start gap-3"><img src="${c.avatar_path||'/static/favicon.ico'}" class="h-14 w-14 rounded-lg object-cover" onerror="this.src='/static/favicon.ico'"/><div class="flex-1 min-w-0"><div class="font-semibold truncate">${c.display_name||c.username}</div><div class="text-xs text-muted-foreground truncate">@${c.username}</div>${c.description?`<div class="text-xs text-muted-foreground mt-1 line-clamp-2">${c.description}</div>`:''}</div></div><div class="mt-3 flex gap-2"><button onclick="deleteCharacter(${c.id})" class="flex-1 inline-flex items-center justify-center rounded-md border border-destructive text-destructive hover:bg-destructive hover:text-white h-8 px-3 text-sm transition-colors">删除</button></div></div>`).join('')}catch(e){showToast('加载失败: '+e.message,'error')}},
|
loadCharacters=async()=>{try{const r=await apiRequest('/api/characters');if(!r)return;const d=await r.json();const g=$('charactersGrid');if(!d||d.length===0){g.innerHTML='<div class="col-span-full text-center py-8 text-muted-foreground">暂无角色卡</div>';return}g.innerHTML=d.map(c=>`<div class="rounded-lg border border-border bg-background p-4"><div class="flex items-start gap-3"><img src="${c.avatar_path||'/static/favicon.ico'}" class="h-14 w-14 rounded-lg object-cover" onerror="this.src='/static/favicon.ico'"/><div class="flex-1 min-w-0"><div class="font-semibold truncate">${c.display_name||c.username}</div><div class="text-xs text-muted-foreground truncate">@${c.username}</div>${c.description?`<div class="text-xs text-muted-foreground mt-1 line-clamp-2">${c.description}</div>`:''}</div></div><div class="mt-3 flex gap-2"><button onclick="deleteCharacter(${c.id})" class="flex-1 inline-flex items-center justify-center rounded-md border border-destructive text-destructive hover:bg-destructive hover:text-white h-8 px-3 text-sm transition-colors">删除</button></div></div>`).join('')}catch(e){showToast('加载失败: '+e.message,'error')}},
|
||||||
deleteCharacter=async(id)=>{if(!confirm('确定要删除这个角色卡吗?'))return;try{const r=await apiRequest(`/api/characters/${id}`,{method:'DELETE'});if(!r)return;const d=await r.json();if(d.success){showToast('删除成功','success');await loadCharacters()}else{showToast('删除失败','error')}}catch(e){showToast('删除失败: '+e.message,'error')}},
|
deleteCharacter=async(id)=>{if(!confirm('确定要删除这个角色卡吗?'))return;try{const r=await apiRequest(`/api/characters/${id}`,{method:'DELETE'});if(!r)return;const d=await r.json();if(d.success){showToast('删除成功','success');await loadCharacters()}else{showToast('删除失败','error')}}catch(e){showToast('删除失败: '+e.message,'error')}},
|
||||||
loadCallLogicConfig=async()=>{try{const r=await apiRequest('/api/call-logic/config');if(!r)return;const d=await r.json();if(d.success&&d.config){const mode=d.config.call_mode||((d.config.polling_mode_enabled||false)?'polling':'default');$('cfgCallLogicMode').value=mode}else{console.error('调用逻辑配置数据格式错误:',d)}}catch(e){console.error('加载调用逻辑配置失败:',e)}},
|
loadCallLogicConfig=async()=>{try{const r=await apiRequest('/api/call-logic/config');if(!r)return;const d=await r.json();if(d.success&&d.config){const mode=d.config.call_mode||((d.config.polling_mode_enabled||false)?'polling':'default');const pollInterval=Number(d.config.poll_interval||2.5);$('cfgCallLogicMode').value=mode;$('cfgCallLogicPollInterval').value=Number.isFinite(pollInterval)&&pollInterval>0?pollInterval:2.5}else{console.error('调用逻辑配置数据格式错误:',d)}}catch(e){console.error('加载调用逻辑配置失败:',e)}},
|
||||||
saveCallLogicConfig=async()=>{try{const mode=$('cfgCallLogicMode').value||'default';const r=await apiRequest('/api/call-logic/config',{method:'POST',body:JSON.stringify({call_mode:mode})});if(!r)return;const d=await r.json();if(d.success){showToast('调用逻辑配置保存成功','success')}else{showToast('保存失败','error')}}catch(e){showToast('保存失败: '+e.message,'error')}},
|
saveCallLogicConfig=async()=>{try{const mode=$('cfgCallLogicMode').value||'default';const pollInterval=parseFloat($('cfgCallLogicPollInterval').value||'2.5');if(!Number.isFinite(pollInterval)||pollInterval<=0)return showToast('进度轮询间隔必须大于0','error');const r=await apiRequest('/api/call-logic/config',{method:'POST',body:JSON.stringify({call_mode:mode,poll_interval:pollInterval})});if(!r)return;const d=await r.json();if(d.success){showToast('调用逻辑配置保存成功(已立即生效)','success');await loadCallLogicConfig()}else{showToast('保存失败','error')}}catch(e){showToast('保存失败: '+e.message,'error')}},
|
||||||
loadPowConfig=async()=>{try{const r=await apiRequest('/api/pow/config');if(!r)return;const d=await r.json();if(d.success&&d.config){$('cfgPowMode').value=d.config.mode||'local';$('cfgPowUseTokenForPow').checked=d.config.use_token_for_pow||false;$('cfgPowServerUrl').value=d.config.server_url||'';$('cfgPowApiKey').value=d.config.api_key||'';$('cfgPowProxyEnabled').checked=d.config.proxy_enabled||false;$('cfgPowProxyUrl').value=d.config.proxy_url||'';togglePowFields();togglePowProxyFields()}else{console.error('POW配置数据格式错误:',d)}}catch(e){console.error('加载POW配置失败:',e)}},
|
loadPowConfig=async()=>{try{const r=await apiRequest('/api/pow/config');if(!r)return;const d=await r.json();if(d.success&&d.config){$('cfgPowMode').value=d.config.mode||'local';$('cfgPowUseTokenForPow').checked=d.config.use_token_for_pow||false;$('cfgPowServerUrl').value=d.config.server_url||'';$('cfgPowApiKey').value=d.config.api_key||'';$('cfgPowProxyEnabled').checked=d.config.proxy_enabled||false;$('cfgPowProxyUrl').value=d.config.proxy_url||'';togglePowFields();togglePowProxyFields()}else{console.error('POW配置数据格式错误:',d)}}catch(e){console.error('加载POW配置失败:',e)}},
|
||||||
savePowConfig=async()=>{try{const mode=$('cfgPowMode').value;const useTokenForPow=$('cfgPowUseTokenForPow').checked;const serverUrl=$('cfgPowServerUrl').value.trim();const apiKey=$('cfgPowApiKey').value.trim();const proxyEnabled=$('cfgPowProxyEnabled').checked;const proxyUrl=$('cfgPowProxyUrl').value.trim();if(mode==='external'){if(!serverUrl)return showToast('请输入服务器地址','error');if(!apiKey)return showToast('请输入API密钥','error')}const r=await apiRequest('/api/pow/config',{method:'POST',body:JSON.stringify({mode:mode,use_token_for_pow:useTokenForPow,server_url:serverUrl||null,api_key:apiKey||null,proxy_enabled:proxyEnabled,proxy_url:proxyUrl||null})});if(!r)return;const d=await r.json();if(d.success){showToast('POW配置保存成功','success')}else{showToast('保存失败','error')}}catch(e){showToast('保存失败: '+e.message,'error')}},
|
savePowConfig=async()=>{try{const mode=$('cfgPowMode').value;const useTokenForPow=$('cfgPowUseTokenForPow').checked;const serverUrl=$('cfgPowServerUrl').value.trim();const apiKey=$('cfgPowApiKey').value.trim();const proxyEnabled=$('cfgPowProxyEnabled').checked;const proxyUrl=$('cfgPowProxyUrl').value.trim();if(mode==='external'){if(!serverUrl)return showToast('请输入服务器地址','error');if(!apiKey)return showToast('请输入API密钥','error')}const r=await apiRequest('/api/pow/config',{method:'POST',body:JSON.stringify({mode:mode,use_token_for_pow:useTokenForPow,server_url:serverUrl||null,api_key:apiKey||null,proxy_enabled:proxyEnabled,proxy_url:proxyUrl||null})});if(!r)return;const d=await r.json();if(d.success){showToast('POW配置保存成功','success')}else{showToast('保存失败','error')}}catch(e){showToast('保存失败: '+e.message,'error')}},
|
||||||
loadPowProxyConfig=loadPowConfig,savePowProxyConfig=savePowConfig,loadPowServiceConfig=loadPowConfig,savePowServiceConfig=savePowConfig,
|
loadPowProxyConfig=loadPowConfig,savePowProxyConfig=savePowConfig,loadPowServiceConfig=loadPowConfig,savePowServiceConfig=savePowConfig,
|
||||||
|
|||||||
Reference in New Issue
Block a user