mirror of
https://github.com/TheSmallHanCat/sora2api.git
synced 2026-02-15 02:26:11 +08:00
feat: 增加详细日志记录并支持热重载、过载情况不计入错误阈值计数、请求日志显示详细信息
fix: 修改进度查询接口 refactor: 移除去水印功能中的公开视频处理逻辑 Close #38
This commit is contained in:
@@ -701,9 +701,20 @@ async def get_logs(limit: int = 100, token: str = Depends(verify_admin_token)):
|
||||
"operation": log.get("operation"),
|
||||
"status_code": log.get("status_code"),
|
||||
"duration": log.get("duration"),
|
||||
"created_at": log.get("created_at")
|
||||
"created_at": log.get("created_at"),
|
||||
"request_body": log.get("request_body"),
|
||||
"response_body": log.get("response_body")
|
||||
} for log in logs]
|
||||
|
||||
@router.delete("/api/logs")
|
||||
async def clear_logs(token: str = Depends(verify_admin_token)):
|
||||
"""Clear all logs"""
|
||||
try:
|
||||
await db.clear_all_logs()
|
||||
return {"success": True, "message": "所有日志已清空"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# Cache config endpoints
|
||||
@router.post("/api/cache/config")
|
||||
async def update_cache_timeout(
|
||||
|
||||
@@ -741,8 +741,13 @@ class Database:
|
||||
""", (today, token_id))
|
||||
await db.commit()
|
||||
|
||||
async def increment_error_count(self, token_id: int):
|
||||
"""Increment error count (both total and consecutive)"""
|
||||
async def increment_error_count(self, token_id: int, increment_consecutive: bool = True):
|
||||
"""Increment error count
|
||||
|
||||
Args:
|
||||
token_id: Token ID
|
||||
increment_consecutive: Whether to increment consecutive error count (False for overload errors)
|
||||
"""
|
||||
from datetime import date
|
||||
async with aiosqlite.connect(self.db_path) as db:
|
||||
today = str(date.today())
|
||||
@@ -752,26 +757,46 @@ class Database:
|
||||
|
||||
# If date changed, reset today's error count
|
||||
if row and row[0] != today:
|
||||
await db.execute("""
|
||||
UPDATE token_stats
|
||||
SET error_count = error_count + 1,
|
||||
consecutive_error_count = consecutive_error_count + 1,
|
||||
today_error_count = 1,
|
||||
today_date = ?,
|
||||
last_error_at = CURRENT_TIMESTAMP
|
||||
WHERE token_id = ?
|
||||
""", (today, token_id))
|
||||
if increment_consecutive:
|
||||
await db.execute("""
|
||||
UPDATE token_stats
|
||||
SET error_count = error_count + 1,
|
||||
consecutive_error_count = consecutive_error_count + 1,
|
||||
today_error_count = 1,
|
||||
today_date = ?,
|
||||
last_error_at = CURRENT_TIMESTAMP
|
||||
WHERE token_id = ?
|
||||
""", (today, token_id))
|
||||
else:
|
||||
await db.execute("""
|
||||
UPDATE token_stats
|
||||
SET error_count = error_count + 1,
|
||||
today_error_count = 1,
|
||||
today_date = ?,
|
||||
last_error_at = CURRENT_TIMESTAMP
|
||||
WHERE token_id = ?
|
||||
""", (today, token_id))
|
||||
else:
|
||||
# Same day, just increment all counters
|
||||
await db.execute("""
|
||||
UPDATE token_stats
|
||||
SET error_count = error_count + 1,
|
||||
consecutive_error_count = consecutive_error_count + 1,
|
||||
today_error_count = today_error_count + 1,
|
||||
today_date = ?,
|
||||
last_error_at = CURRENT_TIMESTAMP
|
||||
WHERE token_id = ?
|
||||
""", (today, token_id))
|
||||
# Same day, just increment counters
|
||||
if increment_consecutive:
|
||||
await db.execute("""
|
||||
UPDATE token_stats
|
||||
SET error_count = error_count + 1,
|
||||
consecutive_error_count = consecutive_error_count + 1,
|
||||
today_error_count = today_error_count + 1,
|
||||
today_date = ?,
|
||||
last_error_at = CURRENT_TIMESTAMP
|
||||
WHERE token_id = ?
|
||||
""", (today, token_id))
|
||||
else:
|
||||
await db.execute("""
|
||||
UPDATE token_stats
|
||||
SET error_count = error_count + 1,
|
||||
today_error_count = today_error_count + 1,
|
||||
today_date = ?,
|
||||
last_error_at = CURRENT_TIMESTAMP
|
||||
WHERE token_id = ?
|
||||
""", (today, token_id))
|
||||
await db.commit()
|
||||
|
||||
async def reset_error_count(self, token_id: int):
|
||||
@@ -848,7 +873,13 @@ class Database:
|
||||
""", (limit,))
|
||||
rows = await cursor.fetchall()
|
||||
return [dict(row) for row in rows]
|
||||
|
||||
|
||||
async def clear_all_logs(self):
|
||||
"""Clear all request logs"""
|
||||
async with aiosqlite.connect(self.db_path) as db:
|
||||
await db.execute("DELETE FROM request_logs")
|
||||
await db.commit()
|
||||
|
||||
# Admin config operations
|
||||
async def get_admin_config(self) -> AdminConfig:
|
||||
"""Get admin configuration"""
|
||||
|
||||
@@ -67,16 +67,20 @@ class DebugLogger:
|
||||
proxy: Optional[str] = None
|
||||
):
|
||||
"""Log API request details to log.txt"""
|
||||
|
||||
|
||||
# Check if debug mode is enabled
|
||||
if not config.debug_enabled:
|
||||
return
|
||||
|
||||
try:
|
||||
self._write_separator()
|
||||
self.logger.info(f"🔵 [REQUEST] {self._format_timestamp()}")
|
||||
self._write_separator("-")
|
||||
|
||||
|
||||
# Basic info
|
||||
self.logger.info(f"Method: {method}")
|
||||
self.logger.info(f"URL: {url}")
|
||||
|
||||
|
||||
# Headers
|
||||
self.logger.info("\n📋 Headers:")
|
||||
masked_headers = dict(headers)
|
||||
@@ -85,10 +89,10 @@ class DebugLogger:
|
||||
if auth_value.startswith("Bearer "):
|
||||
token = auth_value[7:]
|
||||
masked_headers["Authorization"] = f"Bearer {self._mask_token(token)}"
|
||||
|
||||
|
||||
for key, value in masked_headers.items():
|
||||
self.logger.info(f" {key}: {value}")
|
||||
|
||||
|
||||
# Body
|
||||
if body is not None:
|
||||
self.logger.info("\n📦 Request Body:")
|
||||
@@ -97,7 +101,7 @@ class DebugLogger:
|
||||
self.logger.info(body_str)
|
||||
else:
|
||||
self.logger.info(str(body))
|
||||
|
||||
|
||||
# Files
|
||||
if files:
|
||||
self.logger.info("\n📎 Files:")
|
||||
@@ -112,14 +116,14 @@ class DebugLogger:
|
||||
except (AttributeError, TypeError):
|
||||
# Fallback for objects that don't support iteration
|
||||
self.logger.info(" <binary file data>")
|
||||
|
||||
|
||||
# Proxy
|
||||
if proxy:
|
||||
self.logger.info(f"\n🌐 Proxy: {proxy}")
|
||||
|
||||
|
||||
self._write_separator()
|
||||
self.logger.info("") # Empty line
|
||||
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error logging request: {e}")
|
||||
|
||||
@@ -131,25 +135,29 @@ class DebugLogger:
|
||||
duration_ms: Optional[float] = None
|
||||
):
|
||||
"""Log API response details to log.txt"""
|
||||
|
||||
|
||||
# Check if debug mode is enabled
|
||||
if not config.debug_enabled:
|
||||
return
|
||||
|
||||
try:
|
||||
self._write_separator()
|
||||
self.logger.info(f"🟢 [RESPONSE] {self._format_timestamp()}")
|
||||
self._write_separator("-")
|
||||
|
||||
|
||||
# Status
|
||||
status_emoji = "✅" if 200 <= status_code < 300 else "❌"
|
||||
self.logger.info(f"Status: {status_code} {status_emoji}")
|
||||
|
||||
|
||||
# Duration
|
||||
if duration_ms is not None:
|
||||
self.logger.info(f"Duration: {duration_ms:.2f}ms")
|
||||
|
||||
|
||||
# Headers
|
||||
self.logger.info("\n📋 Response Headers:")
|
||||
for key, value in headers.items():
|
||||
self.logger.info(f" {key}: {value}")
|
||||
|
||||
|
||||
# Body
|
||||
self.logger.info("\n📦 Response Body:")
|
||||
if isinstance(body, (dict, list)):
|
||||
@@ -169,7 +177,7 @@ class DebugLogger:
|
||||
self.logger.info(body)
|
||||
else:
|
||||
self.logger.info(str(body))
|
||||
|
||||
|
||||
self._write_separator()
|
||||
self.logger.info("") # Empty line
|
||||
|
||||
@@ -183,17 +191,21 @@ class DebugLogger:
|
||||
response_text: Optional[str] = None
|
||||
):
|
||||
"""Log API error details to log.txt"""
|
||||
|
||||
|
||||
# Check if debug mode is enabled
|
||||
if not config.debug_enabled:
|
||||
return
|
||||
|
||||
try:
|
||||
self._write_separator()
|
||||
self.logger.info(f"🔴 [ERROR] {self._format_timestamp()}")
|
||||
self._write_separator("-")
|
||||
|
||||
|
||||
if status_code:
|
||||
self.logger.info(f"Status Code: {status_code}")
|
||||
|
||||
|
||||
self.logger.info(f"Error Message: {error_message}")
|
||||
|
||||
|
||||
if response_text:
|
||||
self.logger.info("\n📦 Error Response:")
|
||||
# Try to parse as JSON
|
||||
@@ -207,15 +219,20 @@ class DebugLogger:
|
||||
self.logger.info(f"{response_text[:2000]}... (truncated)")
|
||||
else:
|
||||
self.logger.info(response_text)
|
||||
|
||||
|
||||
self._write_separator()
|
||||
self.logger.info("") # Empty line
|
||||
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error logging error: {e}")
|
||||
|
||||
def log_info(self, message: str):
|
||||
"""Log general info message to log.txt"""
|
||||
|
||||
# Check if debug mode is enabled
|
||||
if not config.debug_enabled:
|
||||
return
|
||||
|
||||
try:
|
||||
self.logger.info(f"ℹ️ [{self._format_timestamp()}] {message}")
|
||||
except Exception as e:
|
||||
|
||||
@@ -404,13 +404,31 @@ class GenerationHandler:
|
||||
if is_video and self.concurrency_manager:
|
||||
await self.concurrency_manager.release_video(token_obj.id)
|
||||
|
||||
# Log successful request
|
||||
# Log successful request with complete task info
|
||||
duration = time.time() - start_time
|
||||
|
||||
# Get complete task info from database
|
||||
task_info = await self.db.get_task(task_id)
|
||||
response_data = {
|
||||
"task_id": task_id,
|
||||
"status": "success",
|
||||
"prompt": prompt,
|
||||
"model": model
|
||||
}
|
||||
|
||||
# Add result_urls if available
|
||||
if task_info and task_info.result_urls:
|
||||
try:
|
||||
result_urls = json.loads(task_info.result_urls)
|
||||
response_data["result_urls"] = result_urls
|
||||
except:
|
||||
response_data["result_urls"] = task_info.result_urls
|
||||
|
||||
await self._log_request(
|
||||
token_obj.id,
|
||||
f"generate_{model_config['type']}",
|
||||
{"model": model, "prompt": prompt, "has_image": image is not None},
|
||||
{"task_id": task_id, "status": "success"},
|
||||
response_data,
|
||||
200,
|
||||
duration
|
||||
)
|
||||
@@ -427,9 +445,11 @@ class GenerationHandler:
|
||||
if is_video and token_obj and self.concurrency_manager:
|
||||
await self.concurrency_manager.release_video(token_obj.id)
|
||||
|
||||
# Record error
|
||||
# Record error (check if it's an overload error)
|
||||
if token_obj:
|
||||
await self.token_manager.record_error(token_obj.id)
|
||||
error_str = str(e).lower()
|
||||
is_overload = "heavy_load" in error_str or "under heavy load" in error_str
|
||||
await self.token_manager.record_error(token_obj.id, is_overload=is_overload)
|
||||
|
||||
# Log failed request
|
||||
duration = time.time() - start_time
|
||||
@@ -1254,9 +1274,11 @@ class GenerationHandler:
|
||||
await self.token_manager.record_success(token_obj.id, is_video=True)
|
||||
|
||||
except Exception as e:
|
||||
# Record error
|
||||
# Record error (check if it's an overload error)
|
||||
if token_obj:
|
||||
await self.token_manager.record_error(token_obj.id)
|
||||
error_str = str(e).lower()
|
||||
is_overload = "heavy_load" in error_str or "under heavy load" in error_str
|
||||
await self.token_manager.record_error(token_obj.id, is_overload=is_overload)
|
||||
debug_logger.log_error(
|
||||
error_message=f"Character and video generation failed: {str(e)}",
|
||||
status_code=500,
|
||||
@@ -1341,9 +1363,11 @@ class GenerationHandler:
|
||||
await self.token_manager.record_success(token_obj.id, is_video=True)
|
||||
|
||||
except Exception as e:
|
||||
# Record error
|
||||
# Record error (check if it's an overload error)
|
||||
if token_obj:
|
||||
await self.token_manager.record_error(token_obj.id)
|
||||
error_str = str(e).lower()
|
||||
is_overload = "heavy_load" in error_str or "under heavy load" in error_str
|
||||
await self.token_manager.record_error(token_obj.id, is_overload=is_overload)
|
||||
debug_logger.log_error(
|
||||
error_message=f"Remix generation failed: {str(e)}",
|
||||
status_code=500,
|
||||
|
||||
@@ -291,7 +291,7 @@ class SoraClient:
|
||||
Returns:
|
||||
List of pending tasks with progress information
|
||||
"""
|
||||
result = await self._make_request("GET", "/nf/pending", token)
|
||||
result = await self._make_request("GET", "/nf/pending/v2", token)
|
||||
# The API returns a list directly
|
||||
return result if isinstance(result, list) else []
|
||||
|
||||
|
||||
@@ -982,16 +982,22 @@ class TokenManager:
|
||||
else:
|
||||
await self.db.increment_image_count(token_id)
|
||||
|
||||
async def record_error(self, token_id: int):
|
||||
"""Record token error"""
|
||||
await self.db.increment_error_count(token_id)
|
||||
async def record_error(self, token_id: int, is_overload: bool = False):
|
||||
"""Record token error
|
||||
|
||||
# Check if should ban
|
||||
stats = await self.db.get_token_stats(token_id)
|
||||
admin_config = await self.db.get_admin_config()
|
||||
Args:
|
||||
token_id: Token ID
|
||||
is_overload: Whether this is an overload error (heavy_load). If True, only increment total error count.
|
||||
"""
|
||||
await self.db.increment_error_count(token_id, increment_consecutive=not is_overload)
|
||||
|
||||
if stats and stats.consecutive_error_count >= admin_config.error_ban_threshold:
|
||||
await self.db.update_token_status(token_id, False)
|
||||
# Check if should ban (only if not overload error)
|
||||
if not is_overload:
|
||||
stats = await self.db.get_token_stats(token_id)
|
||||
admin_config = await self.db.get_admin_config()
|
||||
|
||||
if stats and stats.consecutive_error_count >= admin_config.error_ban_threshold:
|
||||
await self.db.update_token_status(token_id, False)
|
||||
|
||||
async def record_success(self, token_id: int, is_video: bool = False):
|
||||
"""Record successful request (reset error count)"""
|
||||
|
||||
Reference in New Issue
Block a user