feat:添加视频识别
This commit is contained in:
@@ -2,6 +2,11 @@
|
||||
AutoReply 插件 - 基于双LLM架构的智能自动回复
|
||||
|
||||
使用小模型判断是否需要回复,通过后触发AIChat插件生成回复
|
||||
|
||||
v2.0 改进:
|
||||
- 后台异步判断,不阻塞消息处理
|
||||
- 消息过时检测,避免回复已被刷走的消息
|
||||
- 更智能的冷却机制
|
||||
"""
|
||||
|
||||
import json
|
||||
@@ -12,7 +17,7 @@ import aiohttp
|
||||
from pathlib import Path
|
||||
from datetime import datetime, date
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, Optional
|
||||
from typing import Dict, Optional, List
|
||||
from loguru import logger
|
||||
from utils.plugin_base import PluginBase
|
||||
from utils.decorators import on_text_message
|
||||
@@ -45,6 +50,17 @@ class ChatState:
|
||||
last_reset_date: str = ""
|
||||
total_messages: int = 0
|
||||
total_replies: int = 0
|
||||
message_count_at_trigger: int = 0 # 触发判断时的消息计数
|
||||
|
||||
|
||||
@dataclass
|
||||
class PendingJudge:
|
||||
"""待处理的判断任务"""
|
||||
chat_id: str
|
||||
from_wxid: str
|
||||
content: str
|
||||
trigger_time: float
|
||||
message_count: int # 触发时的消息计数
|
||||
|
||||
|
||||
class AutoReply(PluginBase):
|
||||
@@ -52,7 +68,7 @@ class AutoReply(PluginBase):
|
||||
|
||||
description = "基于双LLM架构的智能自动回复插件"
|
||||
author = "ShiHao"
|
||||
version = "1.1.0"
|
||||
version = "2.0.0"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
@@ -60,9 +76,12 @@ class AutoReply(PluginBase):
|
||||
self.chat_states: Dict[str, ChatState] = {}
|
||||
self.weights = {}
|
||||
self.last_judge_time: Dict[str, float] = {}
|
||||
self.last_reply_time: Dict[str, float] = {} # 上次回复时间(用于冷却)
|
||||
self.judging: Dict[str, bool] = {}
|
||||
self.bot_wxid: str = ""
|
||||
self.bot_nickname: str = ""
|
||||
self.pending_tasks: Dict[str, asyncio.Task] = {} # 后台判断任务
|
||||
self.message_counters: Dict[str, int] = {} # 每个群的消息计数器
|
||||
|
||||
async def async_init(self):
|
||||
"""异步初始化"""
|
||||
@@ -145,7 +164,7 @@ class AutoReply(PluginBase):
|
||||
|
||||
@on_text_message(priority=90)
|
||||
async def handle_message(self, bot, message: dict):
|
||||
"""处理消息"""
|
||||
"""处理消息 - 后台异步判断模式"""
|
||||
try:
|
||||
# 检查是否启用
|
||||
if not self.config or not self.config["basic"]["enabled"]:
|
||||
@@ -179,42 +198,50 @@ class AutoReply(PluginBase):
|
||||
chat_id = self._normalize_chat_id(from_wxid)
|
||||
current_time = time.time()
|
||||
|
||||
# 更新消息计数器(每条消息都计数)
|
||||
self.message_counters[chat_id] = self.message_counters.get(chat_id, 0) + 1
|
||||
current_msg_count = self.message_counters[chat_id]
|
||||
|
||||
# 频率限制:检查是否正在判断中
|
||||
if self.judging.get(chat_id, False):
|
||||
logger.debug(f"[AutoReply] 群聊 {from_wxid[:15]}... 正在判断中,跳过")
|
||||
return True
|
||||
|
||||
# 频率限制:检查时间间隔
|
||||
# 频率限制:检查判断间隔
|
||||
min_interval = self.config.get("rate_limit", {}).get("min_interval", 10)
|
||||
last_time = self.last_judge_time.get(chat_id, 0)
|
||||
if current_time - last_time < min_interval:
|
||||
logger.debug(f"[AutoReply] 距离上次判断仅 {current_time - last_time:.1f}秒,跳过")
|
||||
last_judge = self.last_judge_time.get(chat_id, 0)
|
||||
if current_time - last_judge < min_interval:
|
||||
logger.debug(f"[AutoReply] 距离上次判断仅 {current_time - last_judge:.1f}秒,跳过")
|
||||
return True
|
||||
|
||||
# 冷却检查:上次回复后的冷却时间
|
||||
reply_cooldown = self.config.get("rate_limit", {}).get("reply_cooldown", 60)
|
||||
last_reply = self.last_reply_time.get(chat_id, 0)
|
||||
if current_time - last_reply < reply_cooldown:
|
||||
logger.debug(f"[AutoReply] 回复冷却中,剩余 {reply_cooldown - (current_time - last_reply):.0f}秒")
|
||||
return True
|
||||
|
||||
# 标记正在判断
|
||||
self.judging[chat_id] = True
|
||||
self.last_judge_time[chat_id] = current_time
|
||||
|
||||
try:
|
||||
# 使用小模型判断
|
||||
judge_result = await self._judge_with_small_model(from_wxid, content)
|
||||
# 启动后台判断任务(fire-and-forget)
|
||||
pending = PendingJudge(
|
||||
chat_id=chat_id,
|
||||
from_wxid=from_wxid,
|
||||
content=content,
|
||||
trigger_time=current_time,
|
||||
message_count=current_msg_count
|
||||
)
|
||||
|
||||
if judge_result.should_reply:
|
||||
logger.info(f"[AutoReply] 触发回复 | 群:{from_wxid[:15]}... | 评分:{judge_result.overall_score:.2f} | {judge_result.reasoning[:30]}")
|
||||
task = asyncio.create_task(
|
||||
self._background_judge(bot, pending)
|
||||
)
|
||||
self.pending_tasks[chat_id] = task
|
||||
|
||||
# 更新状态
|
||||
self._update_state(chat_id, replied=True)
|
||||
|
||||
# 设置触发标记,让AIChat处理
|
||||
message['_auto_reply_triggered'] = True
|
||||
else:
|
||||
logger.debug(f"[AutoReply] 不触发 | 群:{from_wxid[:15]}... | 评分:{judge_result.overall_score:.2f}")
|
||||
self._update_state(chat_id, replied=False)
|
||||
|
||||
finally:
|
||||
# 清除判断中标记
|
||||
self.judging[chat_id] = False
|
||||
logger.debug(f"[AutoReply] 启动后台判断 | 群:{from_wxid[:15]}... | 消息#{current_msg_count}")
|
||||
|
||||
# 立即返回,不阻塞消息处理
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
@@ -226,6 +253,140 @@ class AutoReply(PluginBase):
|
||||
self.judging[chat_id] = False
|
||||
return True
|
||||
|
||||
async def _background_judge(self, bot, pending: PendingJudge):
|
||||
"""后台判断任务 - 判断完成后直接触发回复"""
|
||||
chat_id = pending.chat_id
|
||||
try:
|
||||
# 使用小模型判断
|
||||
judge_result = await self._judge_with_small_model(pending.from_wxid, pending.content)
|
||||
|
||||
if not judge_result.should_reply:
|
||||
logger.debug(f"[AutoReply] 不触发 | 群:{pending.from_wxid[:15]}... | 评分:{judge_result.overall_score:.2f}")
|
||||
self._update_state(chat_id, replied=False)
|
||||
return
|
||||
|
||||
# 检查消息是否过时
|
||||
current_time = time.time()
|
||||
current_msg_count = self.message_counters.get(chat_id, 0)
|
||||
|
||||
elapsed_time = current_time - pending.trigger_time
|
||||
new_messages = current_msg_count - pending.message_count
|
||||
|
||||
# 获取过时阈值配置
|
||||
stale_config = self.config.get("stale_detection", {})
|
||||
max_elapsed = stale_config.get("max_elapsed_seconds", 60)
|
||||
max_new_messages = stale_config.get("max_new_messages", 15)
|
||||
|
||||
is_stale = elapsed_time > max_elapsed or new_messages > max_new_messages
|
||||
|
||||
if is_stale:
|
||||
logger.info(f"[AutoReply] 消息过时,放弃回复 | 耗时:{elapsed_time:.1f}s | 新消息:{new_messages}条")
|
||||
self._update_state(chat_id, replied=False)
|
||||
return
|
||||
|
||||
# 触发回复
|
||||
logger.info(f"[AutoReply] 触发回复 | 群:{pending.from_wxid[:15]}... | 评分:{judge_result.overall_score:.2f} | 耗时:{elapsed_time:.1f}s | {judge_result.reasoning[:30]}")
|
||||
|
||||
# 更新状态
|
||||
self._update_state(chat_id, replied=True)
|
||||
self.last_reply_time[chat_id] = current_time
|
||||
|
||||
# 直接调用 AIChat 生成回复(基于最新上下文)
|
||||
await self._trigger_ai_reply(bot, pending.from_wxid)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[AutoReply] 后台判断异常: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
finally:
|
||||
# 清除判断中标记
|
||||
self.judging[chat_id] = False
|
||||
# 清理任务引用
|
||||
if chat_id in self.pending_tasks:
|
||||
del self.pending_tasks[chat_id]
|
||||
|
||||
async def _trigger_ai_reply(self, bot, from_wxid: str):
|
||||
"""触发 AIChat 生成回复(基于最新历史上下文)"""
|
||||
try:
|
||||
from utils.plugin_manager import PluginManager
|
||||
aichat_plugin = PluginManager().plugins.get("AIChat")
|
||||
|
||||
if not aichat_plugin:
|
||||
logger.warning("[AutoReply] AIChat 插件未加载")
|
||||
return
|
||||
|
||||
# 获取最新的历史记录作为上下文
|
||||
chat_id = self._normalize_chat_id(from_wxid)
|
||||
recent_context = await self._get_recent_context_for_reply(chat_id)
|
||||
|
||||
if not recent_context:
|
||||
logger.warning("[AutoReply] 无法获取上下文")
|
||||
return
|
||||
|
||||
# 构造一个虚拟消息,触发 AIChat 回复
|
||||
# 使用特殊标记让 AIChat 知道这是自动回复触发
|
||||
virtual_message = {
|
||||
'FromWxid': from_wxid,
|
||||
'SenderWxid': '', # 空,表示不是特定用户
|
||||
'Content': recent_context,
|
||||
'IsGroup': True,
|
||||
'_auto_reply_triggered': True,
|
||||
'_auto_reply_context': True, # 标记这是上下文触发
|
||||
}
|
||||
|
||||
# 调用 AIChat 的处理方法
|
||||
await aichat_plugin.handle_message(bot, virtual_message)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[AutoReply] 触发AI回复失败: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
async def _get_recent_context_for_reply(self, chat_id: str) -> str:
|
||||
"""获取最近的上下文用于生成回复"""
|
||||
try:
|
||||
history = await self._get_history(chat_id)
|
||||
if not history:
|
||||
return ""
|
||||
|
||||
# 取最近几条消息作为上下文提示
|
||||
count = self.config.get('context', {}).get('messages_count', 5)
|
||||
recent = history[-count:] if len(history) > count else history
|
||||
|
||||
# 构建上下文摘要
|
||||
context_lines = []
|
||||
for record in recent:
|
||||
nickname = record.get('nickname', '未知')
|
||||
content = record.get('content', '')
|
||||
if isinstance(content, list):
|
||||
# 多模态内容,提取文本
|
||||
for item in content:
|
||||
if item.get('type') == 'text':
|
||||
content = item.get('text', '')
|
||||
break
|
||||
else:
|
||||
content = '[图片]'
|
||||
if len(content) > 50:
|
||||
content = content[:50] + "..."
|
||||
context_lines.append(f"{nickname}: {content}")
|
||||
|
||||
# 返回最后一条消息作为触发内容(AIChat 会读取完整历史)
|
||||
if recent:
|
||||
last = recent[-1]
|
||||
last_content = last.get('content', '')
|
||||
if isinstance(last_content, list):
|
||||
for item in last_content:
|
||||
if item.get('type') == 'text':
|
||||
return item.get('text', '')
|
||||
return '[图片]'
|
||||
return last_content
|
||||
|
||||
return ""
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[AutoReply] 获取上下文失败: {e}")
|
||||
return ""
|
||||
|
||||
async def _judge_with_small_model(self, from_wxid: str, content: str) -> JudgeResult:
|
||||
"""使用小模型判断是否需要回复"""
|
||||
chat_id = self._normalize_chat_id(from_wxid)
|
||||
|
||||
Reference in New Issue
Block a user