mirror of
https://github.com/TheSmallHanCat/sora2api.git
synced 2026-02-13 17:34:42 +08:00
feat: 添加资源缓存开关、自动刷新AT fix: 修复邀请码复制、修复移动端无法滑动、优化图片、视频格式输出
This commit is contained in:
@@ -229,6 +229,8 @@ class GenerationHandler:
|
||||
start_time = time.time()
|
||||
last_heartbeat_time = start_time # Track last heartbeat for image generation
|
||||
heartbeat_interval = 10 # Send heartbeat every 10 seconds for image generation
|
||||
last_status_output_time = start_time # Track last status output time for video generation
|
||||
video_status_interval = 30 # Output status every 30 seconds for video generation
|
||||
|
||||
debug_logger.log_info(f"Starting task polling: task_id={task_id}, is_video={is_video}, timeout={timeout}s, max_attempts={max_attempts}")
|
||||
|
||||
@@ -275,16 +277,18 @@ class GenerationHandler:
|
||||
else:
|
||||
progress_pct = int(progress_pct * 100)
|
||||
|
||||
# Only yield progress update if it changed
|
||||
if progress_pct != last_progress:
|
||||
last_progress = progress_pct
|
||||
status = task.get("status", "processing")
|
||||
debug_logger.log_info(f"Task {task_id} progress: {progress_pct}% (status: {status})")
|
||||
# Update last_progress for tracking
|
||||
last_progress = progress_pct
|
||||
status = task.get("status", "processing")
|
||||
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"**Video Generation Progress**: {progress_pct}% ({status})\n"
|
||||
)
|
||||
# Output status every 30 seconds (not just when progress changes)
|
||||
current_time = time.time()
|
||||
if stream and (current_time - last_status_output_time >= video_status_interval):
|
||||
last_status_output_time = current_time
|
||||
debug_logger.log_info(f"Task {task_id} progress: {progress_pct}% (status: {status})")
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"**Video Generation Progress**: {progress_pct}% ({status})\n"
|
||||
)
|
||||
break
|
||||
|
||||
# If task not found in pending tasks, it's completed - fetch from drafts
|
||||
@@ -356,43 +360,51 @@ class GenerationHandler:
|
||||
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"Video published successfully. Post ID: {post_id}\nNow caching watermark-free video...\n"
|
||||
reasoning_content=f"Video published successfully. Post ID: {post_id}\nNow {'caching' if config.cache_enabled else 'preparing'} watermark-free video...\n"
|
||||
)
|
||||
|
||||
# Cache watermark-free video
|
||||
try:
|
||||
cached_filename = await self.file_cache.download_and_cache(watermark_free_url, "video")
|
||||
local_url = f"{self._get_base_url()}/tmp/{cached_filename}"
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content="Watermark-free video cached successfully. Preparing final response...\n"
|
||||
)
|
||||
|
||||
# Delete the published post after caching
|
||||
# Cache watermark-free video (if cache enabled)
|
||||
if config.cache_enabled:
|
||||
try:
|
||||
debug_logger.log_info(f"Deleting published post: {post_id}")
|
||||
await self.sora_client.delete_post(post_id, token)
|
||||
debug_logger.log_info(f"Published post deleted successfully: {post_id}")
|
||||
cached_filename = await self.file_cache.download_and_cache(watermark_free_url, "video")
|
||||
local_url = f"{self._get_base_url()}/tmp/{cached_filename}"
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content="Published post deleted successfully.\n"
|
||||
reasoning_content="Watermark-free video cached successfully. Preparing final response...\n"
|
||||
)
|
||||
except Exception as delete_error:
|
||||
debug_logger.log_error(
|
||||
error_message=f"Failed to delete published post {post_id}: {str(delete_error)}",
|
||||
status_code=500,
|
||||
response_text=str(delete_error)
|
||||
)
|
||||
|
||||
# Delete the published post after caching
|
||||
try:
|
||||
debug_logger.log_info(f"Deleting published post: {post_id}")
|
||||
await self.sora_client.delete_post(post_id, token)
|
||||
debug_logger.log_info(f"Published post deleted successfully: {post_id}")
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content="Published post deleted successfully.\n"
|
||||
)
|
||||
except Exception as delete_error:
|
||||
debug_logger.log_error(
|
||||
error_message=f"Failed to delete published post {post_id}: {str(delete_error)}",
|
||||
status_code=500,
|
||||
response_text=str(delete_error)
|
||||
)
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"Warning: Failed to delete published post - {str(delete_error)}\n"
|
||||
)
|
||||
except Exception as cache_error:
|
||||
# Fallback to watermark-free URL if caching fails
|
||||
local_url = watermark_free_url
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"Warning: Failed to delete published post - {str(delete_error)}\n"
|
||||
reasoning_content=f"Warning: Failed to cache file - {str(cache_error)}\nUsing original watermark-free URL instead...\n"
|
||||
)
|
||||
except Exception as cache_error:
|
||||
# Fallback to watermark-free URL if caching fails
|
||||
else:
|
||||
# Cache disabled: use watermark-free URL directly
|
||||
local_url = watermark_free_url
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"Warning: Failed to cache file - {str(cache_error)}\nUsing original watermark-free URL instead...\n"
|
||||
reasoning_content="Cache is disabled. Using watermark-free URL directly...\n"
|
||||
)
|
||||
|
||||
except Exception as publish_error:
|
||||
@@ -410,34 +422,45 @@ class GenerationHandler:
|
||||
url = item.get("downloadable_url") or item.get("url")
|
||||
if not url:
|
||||
raise Exception("Video URL not found")
|
||||
try:
|
||||
cached_filename = await self.file_cache.download_and_cache(url, "video")
|
||||
local_url = f"{self._get_base_url()}/tmp/{cached_filename}"
|
||||
except Exception as cache_error:
|
||||
if config.cache_enabled:
|
||||
try:
|
||||
cached_filename = await self.file_cache.download_and_cache(url, "video")
|
||||
local_url = f"{self._get_base_url()}/tmp/{cached_filename}"
|
||||
except Exception as cache_error:
|
||||
local_url = url
|
||||
else:
|
||||
local_url = url
|
||||
else:
|
||||
# Normal mode: use downloadable_url instead of url
|
||||
url = item.get("downloadable_url") or item.get("url")
|
||||
if url:
|
||||
# Cache video file
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content="**Video Generation Completed**\n\nVideo generation successful. Now caching the video file...\n"
|
||||
)
|
||||
|
||||
try:
|
||||
cached_filename = await self.file_cache.download_and_cache(url, "video")
|
||||
local_url = f"{self._get_base_url()}/tmp/{cached_filename}"
|
||||
# Cache video file (if cache enabled)
|
||||
if config.cache_enabled:
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content="Video file cached successfully. Preparing final response...\n"
|
||||
reasoning_content="**Video Generation Completed**\n\nVideo generation successful. Now caching the video file...\n"
|
||||
)
|
||||
except Exception as cache_error:
|
||||
# Fallback to original URL if caching fails
|
||||
|
||||
try:
|
||||
cached_filename = await self.file_cache.download_and_cache(url, "video")
|
||||
local_url = f"{self._get_base_url()}/tmp/{cached_filename}"
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content="Video file cached successfully. Preparing final response...\n"
|
||||
)
|
||||
except Exception as cache_error:
|
||||
# Fallback to original URL if caching fails
|
||||
local_url = url
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"Warning: Failed to cache file - {str(cache_error)}\nUsing original URL instead...\n"
|
||||
)
|
||||
else:
|
||||
# Cache disabled: use original URL directly
|
||||
local_url = url
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"Warning: Failed to cache file - {str(cache_error)}\nUsing original URL instead...\n"
|
||||
reasoning_content="**Video Generation Completed**\n\nCache is disabled. Using original URL directly...\n"
|
||||
)
|
||||
|
||||
# Task completed
|
||||
@@ -482,27 +505,37 @@ class GenerationHandler:
|
||||
|
||||
base_url = self._get_base_url()
|
||||
local_urls = []
|
||||
for idx, url in enumerate(urls):
|
||||
try:
|
||||
cached_filename = await self.file_cache.download_and_cache(url, "image")
|
||||
local_url = f"{base_url}/tmp/{cached_filename}"
|
||||
local_urls.append(local_url)
|
||||
if stream and len(urls) > 1:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"Cached image {idx + 1}/{len(urls)}...\n"
|
||||
)
|
||||
except Exception as cache_error:
|
||||
# Fallback to original URL if caching fails
|
||||
local_urls.append(url)
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"Warning: Failed to cache image {idx + 1} - {str(cache_error)}\nUsing original URL instead...\n"
|
||||
)
|
||||
|
||||
if stream and all(u.startswith(base_url) for u in local_urls):
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content="All images cached successfully. Preparing final response...\n"
|
||||
)
|
||||
# Check if cache is enabled
|
||||
if config.cache_enabled:
|
||||
for idx, url in enumerate(urls):
|
||||
try:
|
||||
cached_filename = await self.file_cache.download_and_cache(url, "image")
|
||||
local_url = f"{base_url}/tmp/{cached_filename}"
|
||||
local_urls.append(local_url)
|
||||
if stream and len(urls) > 1:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"Cached image {idx + 1}/{len(urls)}...\n"
|
||||
)
|
||||
except Exception as cache_error:
|
||||
# Fallback to original URL if caching fails
|
||||
local_urls.append(url)
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"Warning: Failed to cache image {idx + 1} - {str(cache_error)}\nUsing original URL instead...\n"
|
||||
)
|
||||
|
||||
if stream and all(u.startswith(base_url) for u in local_urls):
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content="All images cached successfully. Preparing final response...\n"
|
||||
)
|
||||
else:
|
||||
# Cache disabled: use original URLs directly
|
||||
local_urls = urls
|
||||
if stream:
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content="Cache is disabled. Using original URLs directly...\n"
|
||||
)
|
||||
|
||||
await self.db.update_task(
|
||||
task_id, "completed", 100.0,
|
||||
@@ -510,10 +543,10 @@ class GenerationHandler:
|
||||
)
|
||||
|
||||
if stream:
|
||||
# Final response with content
|
||||
content_html = "".join([f"<img src='{url}' />" for url in local_urls])
|
||||
# Final response with content (Markdown format)
|
||||
content_markdown = "\n".join([f"" for url in local_urls])
|
||||
yield self._format_stream_chunk(
|
||||
content=content_html,
|
||||
content=content_markdown,
|
||||
finish_reason="STOP"
|
||||
)
|
||||
yield "data: [DONE]\n\n"
|
||||
@@ -544,7 +577,7 @@ class GenerationHandler:
|
||||
last_heartbeat_time = current_time
|
||||
elapsed = int(current_time - start_time)
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"**Generating**\n\nImage generation in progress... ({elapsed}s elapsed)\n"
|
||||
reasoning_content=f"Image generation in progress... ({elapsed}s elapsed)\n"
|
||||
)
|
||||
|
||||
# If task not found in response, send heartbeat for image generation
|
||||
@@ -554,7 +587,7 @@ class GenerationHandler:
|
||||
last_heartbeat_time = current_time
|
||||
elapsed = int(current_time - start_time)
|
||||
yield self._format_stream_chunk(
|
||||
reasoning_content=f"**Generating**\n\nImage generation in progress... ({elapsed}s elapsed)\n"
|
||||
reasoning_content=f"Image generation in progress... ({elapsed}s elapsed)\n"
|
||||
)
|
||||
|
||||
# Progress update for stream mode (fallback if no status from API)
|
||||
@@ -638,7 +671,7 @@ class GenerationHandler:
|
||||
if media_type == "video":
|
||||
content = f"```html\n<video src='{url}' controls></video>\n```"
|
||||
else:
|
||||
content = f"<img src='{url}' />"
|
||||
content = f""
|
||||
|
||||
response = {
|
||||
"id": f"chatcmpl-{datetime.now().timestamp()}",
|
||||
|
||||
@@ -25,6 +25,18 @@ class LoadBalancer:
|
||||
Returns:
|
||||
Selected token or None if no available tokens
|
||||
"""
|
||||
# Try to auto-refresh tokens expiring within 24 hours if enabled
|
||||
if config.at_auto_refresh_enabled:
|
||||
all_tokens = await self.token_manager.get_all_tokens()
|
||||
for token in all_tokens:
|
||||
if token.is_active and token.expiry_time:
|
||||
from datetime import datetime
|
||||
time_until_expiry = token.expiry_time - datetime.now()
|
||||
hours_until_expiry = time_until_expiry.total_seconds() / 3600
|
||||
# Refresh if expiry is within 24 hours
|
||||
if hours_until_expiry <= 24:
|
||||
await self.token_manager.auto_refresh_expiring_token(token.id)
|
||||
|
||||
active_tokens = await self.token_manager.get_active_tokens()
|
||||
|
||||
if not active_tokens:
|
||||
|
||||
@@ -871,3 +871,77 @@ class TokenManager:
|
||||
print(f"Failed to refresh Sora2 remaining count: {e}")
|
||||
except Exception as e:
|
||||
print(f"Error in refresh_sora2_remaining_if_cooldown_expired: {e}")
|
||||
|
||||
async def auto_refresh_expiring_token(self, token_id: int) -> bool:
|
||||
"""
|
||||
Auto refresh token when expiry time is within 24 hours using ST or RT
|
||||
|
||||
Returns:
|
||||
True if refresh successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
token_data = await self.db.get_token(token_id)
|
||||
if not token_data:
|
||||
return False
|
||||
|
||||
# Check if token is expiring within 24 hours
|
||||
if not token_data.expiry_time:
|
||||
return False # No expiry time set
|
||||
|
||||
time_until_expiry = token_data.expiry_time - datetime.now()
|
||||
hours_until_expiry = time_until_expiry.total_seconds() / 3600
|
||||
|
||||
# Only refresh if expiry is within 24 hours (1440 minutes)
|
||||
if hours_until_expiry > 24:
|
||||
return False # Token not expiring soon
|
||||
|
||||
if hours_until_expiry < 0:
|
||||
# Token already expired, still try to refresh
|
||||
print(f"🔄 Token {token_id} 已过期,尝试自动刷新...")
|
||||
else:
|
||||
print(f"🔄 Token {token_id} 将在 {hours_until_expiry:.1f} 小时后过期,尝试自动刷新...")
|
||||
|
||||
# Priority: ST > RT
|
||||
new_at = None
|
||||
new_st = None
|
||||
new_rt = None
|
||||
|
||||
if token_data.st:
|
||||
# Try to refresh using ST
|
||||
try:
|
||||
print(f"📝 使用 ST 刷新 Token {token_id}...")
|
||||
result = await self.st_to_at(token_data.st)
|
||||
new_at = result.get("access_token")
|
||||
# ST refresh doesn't return new ST, so keep the old one
|
||||
new_st = token_data.st
|
||||
print(f"✅ 使用 ST 刷新成功")
|
||||
except Exception as e:
|
||||
print(f"❌ 使用 ST 刷新失败: {e}")
|
||||
new_at = None
|
||||
|
||||
if not new_at and token_data.rt:
|
||||
# Try to refresh using RT
|
||||
try:
|
||||
print(f"📝 使用 RT 刷新 Token {token_id}...")
|
||||
result = await self.rt_to_at(token_data.rt)
|
||||
new_at = result.get("access_token")
|
||||
new_rt = result.get("refresh_token", token_data.rt) # RT might be updated
|
||||
print(f"✅ 使用 RT 刷新成功")
|
||||
except Exception as e:
|
||||
print(f"❌ 使用 RT 刷新失败: {e}")
|
||||
new_at = None
|
||||
|
||||
if new_at:
|
||||
# Update token with new AT
|
||||
await self.update_token(token_id, token=new_at, st=new_st, rt=new_rt)
|
||||
print(f"✅ Token {token_id} 已自动刷新")
|
||||
return True
|
||||
else:
|
||||
# No ST or RT, disable token
|
||||
print(f"⚠️ Token {token_id} 无法刷新(无 ST 或 RT),已禁用")
|
||||
await self.disable_token(token_id)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ 自动刷新 Token {token_id} 失败: {e}")
|
||||
return False
|
||||
|
||||
Reference in New Issue
Block a user