142 lines
4.2 KiB
Python
142 lines
4.2 KiB
Python
import json
|
||
import time
|
||
from datetime import datetime
|
||
from typing import Dict, Optional
|
||
|
||
# 内存队列(当Redis不可用时使用)
|
||
_memory_queue = []
|
||
_memory_processing = {}
|
||
_memory_results = {}
|
||
|
||
def get_redis_client():
|
||
"""获取Redis客户端"""
|
||
try:
|
||
from app import redis_client
|
||
return redis_client
|
||
except:
|
||
return None
|
||
|
||
class ParseQueue:
|
||
"""解析队列管理器"""
|
||
|
||
QUEUE_KEY = "parse_queue"
|
||
PROCESSING_KEY = "parse_processing"
|
||
RESULT_KEY_PREFIX = "parse_result:"
|
||
|
||
@staticmethod
|
||
def add_task(task_id: str, video_url: str, user_id: Optional[int] = None, ip_address: str = ""):
|
||
"""添加任务到队列"""
|
||
task = {
|
||
'task_id': task_id,
|
||
'video_url': video_url,
|
||
'user_id': user_id,
|
||
'ip_address': ip_address,
|
||
'created_at': datetime.utcnow().isoformat(),
|
||
'status': 'queued'
|
||
}
|
||
|
||
redis_client = get_redis_client()
|
||
if redis_client:
|
||
redis_client.rpush(ParseQueue.QUEUE_KEY, json.dumps(task))
|
||
else:
|
||
# 使用内存队列
|
||
_memory_queue.append(task)
|
||
return task_id
|
||
|
||
@staticmethod
|
||
def get_task() -> Optional[Dict]:
|
||
"""从队列获取任务"""
|
||
redis_client = get_redis_client()
|
||
if redis_client:
|
||
task_json = redis_client.lpop(ParseQueue.QUEUE_KEY)
|
||
if task_json:
|
||
task = json.loads(task_json)
|
||
redis_client.hset(ParseQueue.PROCESSING_KEY, task['task_id'], json.dumps(task))
|
||
return task
|
||
else:
|
||
# 使用内存队列
|
||
if _memory_queue:
|
||
task = _memory_queue.pop(0)
|
||
_memory_processing[task['task_id']] = task
|
||
return task
|
||
return None
|
||
|
||
@staticmethod
|
||
def complete_task(task_id: str, result: Dict):
|
||
"""完成任务"""
|
||
redis_client = get_redis_client()
|
||
if redis_client:
|
||
redis_client.hdel(ParseQueue.PROCESSING_KEY, task_id)
|
||
redis_client.setex(
|
||
f"{ParseQueue.RESULT_KEY_PREFIX}{task_id}",
|
||
3600,
|
||
json.dumps(result)
|
||
)
|
||
else:
|
||
# 使用内存
|
||
_memory_processing.pop(task_id, None)
|
||
_memory_results[task_id] = result
|
||
|
||
@staticmethod
|
||
def get_result(task_id: str) -> Optional[Dict]:
|
||
"""获取任务结果"""
|
||
redis_client = get_redis_client()
|
||
if redis_client:
|
||
result_json = redis_client.get(f"{ParseQueue.RESULT_KEY_PREFIX}{task_id}")
|
||
if result_json:
|
||
return json.loads(result_json)
|
||
else:
|
||
# 使用内存
|
||
return _memory_results.get(task_id)
|
||
return None
|
||
|
||
@staticmethod
|
||
def get_queue_length() -> int:
|
||
"""获取队列长度"""
|
||
redis_client = get_redis_client()
|
||
if redis_client:
|
||
return redis_client.llen(ParseQueue.QUEUE_KEY)
|
||
else:
|
||
return len(_memory_queue)
|
||
|
||
@staticmethod
|
||
def get_processing_count() -> int:
|
||
"""获取正在处理的任务数"""
|
||
redis_client = get_redis_client()
|
||
if redis_client:
|
||
return redis_client.hlen(ParseQueue.PROCESSING_KEY)
|
||
else:
|
||
return len(_memory_processing)
|
||
|
||
@staticmethod
|
||
def get_queue_status() -> Dict:
|
||
"""获取队列状态"""
|
||
return {
|
||
'queued': ParseQueue.get_queue_length(),
|
||
'processing': ParseQueue.get_processing_count()
|
||
}
|
||
|
||
|
||
class ConcurrencyController:
|
||
"""并发控制器"""
|
||
|
||
@staticmethod
|
||
def can_process() -> bool:
|
||
"""检查是否可以处理新任务"""
|
||
from models import SiteConfig
|
||
config = SiteConfig.query.filter_by(config_key='max_concurrent').first()
|
||
max_concurrent = int(config.config_value) if config else 3
|
||
|
||
processing_count = ParseQueue.get_processing_count()
|
||
return processing_count < max_concurrent
|
||
|
||
@staticmethod
|
||
def wait_for_slot(timeout: int = 60) -> bool:
|
||
"""等待可用槽位"""
|
||
start_time = time.time()
|
||
while time.time() - start_time < timeout:
|
||
if ConcurrencyController.can_process():
|
||
return True
|
||
time.sleep(0.5)
|
||
return False
|