vk_chat_bot/vk/handlers/default.py

133 lines
4.5 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

from typing import Dict, List, Optional
from vkbottle.bot import Message
from vkbottle.framework.labeler import BotLabeler
from vkbottle_types.codegen.objects import GroupsGroup
from openrouter import OpenRouter
from dataclasses import dataclass
import utils
import vk.vk_database as database
labeler = BotLabeler()
@dataclass
class ChatContext:
def __init__(self, max_messages: int):
self.max_messages: int = max_messages
self.messages: List[Dict[str, str]] = []
def add_message(self, role: str, content: str):
if len(self.messages) == self.max_messages:
# Всегда сохраняем в контексте системное сообщение
self.messages.pop(1)
self.messages.append({"role": role, "content": content})
def get_messages_for_api(self) -> List[Dict[str, str]]:
return self.messages
chat_contexts: Dict[int, ChatContext] = {}
bot_user: Optional[GroupsGroup] = None
system_prompt = """
Ты - помощник в групповом чате Telegram.
Отвечай на вопросы и поддерживай контекст беседы.
Ты не можешь обсуждать политику и религию.
Сообщения пользователей будут приходить в следующем формате: '[Имя]: текст сообщения'
При ответе НЕ нужно указывать пользователя, которому он предназначен.
"""
def get_ai_chat_context(chat_id: int) -> ChatContext:
"""Get or create chat context for a specific chat"""
if chat_id not in chat_contexts:
chat_contexts[chat_id] = ChatContext(10)
chat = database.DB.get_chat(chat_id)
prompt = system_prompt
if chat['ai_prompt'] is not None:
prompt += '\n\n' + chat['ai_prompt']
chat_contexts[chat_id].add_message(role="system", content=prompt)
return chat_contexts[chat_id]
async def ai_message_handler(message: Message):
chat_id = message.peer_id
# Extract user information and message content
user = await message.ctx_api.users.get(user_ids=[message.from_id])
if len(user) == 1:
user_name = "{} {}".format(user[0].first_name, user[0].last_name)
else:
user_name = '@id' + str(message.from_id)
bot_mention = '@' + bot_user.screen_name
message_text = message.text.replace(bot_mention, bot_user.name)
context = get_ai_chat_context(chat_id)
context.add_message(
role="user",
content=f"[{user_name}]: {message_text}"
)
# noinspection PyUnresolvedReferences
api_key = message.ctx_api.config['openrouter_token']
client = OpenRouter(api_key=api_key)
messages_for_api = context.get_messages_for_api()
await message.ctx_api.messages.set_activity(peer_id=chat_id, type='typing')
try:
# Get response from OpenRouter
response = await client.chat.send_async(
model="meta-llama/llama-3.3-70b-instruct:free",
messages=messages_for_api,
max_tokens=500,
temperature=0.7
)
# Extract AI response
ai_response = response.choices[0].message.content
# Add AI response to context
context.add_message(role="assistant", content=ai_response)
# Send response back to chat
await message.reply(ai_response)
except Exception as e:
print(f"Error processing message: {e}")
await message.reply("Извините, при обработке запроса произошла ошибка.")
# Обычные сообщения (не команды и не действия)
@labeler.chat_message()
async def any_message_handler(message: Message):
chat_id = message.peer_id
chat = database.DB.create_chat_if_not_exists(chat_id)
if chat['active'] == 0:
return
# Игнорировать ботов
if message.from_id < 0:
return
# Не учитывать служебные сообщения
if message.action is not None:
return
user_id = message.from_id
database.DB.create_user_if_not_exists(chat_id, user_id)
database.DB.user_set_last_message(chat_id, user_id, utils.posix_time())
database.DB.user_increment_messages(chat_id, user_id)
global bot_user
if bot_user is None:
bot_user = (await message.ctx_api.groups.get_by_id()).groups[0]
bot_mention = '@' + bot_user.screen_name
if message.text is not None and message.text.find(bot_mention) != -1:
await ai_message_handler(message)