From 3db3f13cdae33e53e050231d8af2b265a88efb74 Mon Sep 17 00:00:00 2001 From: Kirill Kirilenko Date: Mon, 29 Dec 2025 22:10:36 +0300 Subject: [PATCH] =?UTF-8?q?=D0=98=D0=B7=20=D1=81=D0=B8=D1=81=D1=82=D0=B5?= =?UTF-8?q?=D0=BC=D0=BD=D0=BE=D0=B3=D0=BE=20=D0=B7=D0=B0=D0=BF=D1=80=D0=BE?= =?UTF-8?q?=D1=81=D0=B0=20=D1=83=D0=B4=D0=B0=D0=BB=D0=B5=D0=BD=D0=BE=20?= =?UTF-8?q?=D1=83=D0=BF=D0=BE=D0=BC=D0=B8=D0=BD=D0=B0=D0=BD=D0=B8=D0=B5=20?= =?UTF-8?q?Telegram.=20=D0=92=20=D1=81=D0=BB=D1=83=D1=87=D0=B0=D0=B5=20?= =?UTF-8?q?=D0=BE=D1=88=D0=B8=D0=B1=D0=BA=D0=B8=20API=20=D0=B2=D0=BE=D0=B7?= =?UTF-8?q?=D0=B2=D1=80=D0=B0=D1=89=D0=B0=D0=B5=D1=82=D1=81=D1=8F=20=D0=B5?= =?UTF-8?q?=D0=B5=20=D0=BE=D0=BF=D0=B8=D1=81=D0=B0=D0=BD=D0=B8=D0=B5,=20?= =?UTF-8?q?=D0=B0=20=D0=B2=D1=8B=D0=B7=D0=B2=D0=B0=D0=B2=D1=88=D0=B8=D0=B9?= =?UTF-8?q?=20=D0=B5=D0=B5=20=D0=B7=D0=B0=D0=BF=D1=80=D0=BE=D1=81=20=D0=BD?= =?UTF-8?q?=D0=B5=20=D0=B4=D0=BE=D0=B1=D0=B0=D0=B2=D0=BB=D1=8F=D0=B5=D1=82?= =?UTF-8?q?=D1=81=D1=8F=20=D0=B2=20=D0=BA=D0=BE=D0=BD=D1=82=D0=B5=D0=BA?= =?UTF-8?q?=D1=81=D1=82.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ai_agent.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/ai_agent.py b/ai_agent.py index a6362a9..ec888c4 100644 --- a/ai_agent.py +++ b/ai_agent.py @@ -5,7 +5,7 @@ from openrouter import OpenRouter SYSTEM_PROMPT = """ -Ты - помощник в групповом чате Telegram. +Ты - помощник в групповом чате. Отвечай на вопросы и поддерживай контекст беседы. Ты не можешь обсуждать политику и религию. Сообщения пользователей будут приходить в следующем формате: '[Имя]: текст сообщения' @@ -13,7 +13,6 @@ SYSTEM_PROMPT = """ """ -@dataclass class ChatContext: def __init__(self, max_messages: int): self.max_messages: int = max_messages @@ -28,6 +27,9 @@ class ChatContext: def get_messages_for_api(self) -> List[Dict[str, str]]: return self.messages + def remove_last_message(self): + self.messages.pop() + class AiAgent: def __init__(self, api_token: str): @@ -37,13 +39,12 @@ class AiAgent: async def get_reply(self, chat_id: int, chat_prompt: str, user_name: str, message: str) -> str: context = self._get_chat_context(chat_id, chat_prompt) context.add_message(role="user", content=f"[{user_name}]: {message}") - messages_for_api = context.get_messages_for_api() try: # Get response from OpenRouter response = await self.client.chat.send_async( model="meta-llama/llama-3.3-70b-instruct:free", - messages=messages_for_api, + messages=context.get_messages_for_api(), max_tokens=500, temperature=0.7 ) @@ -57,8 +58,9 @@ class AiAgent: return ai_response except Exception as e: - print(f"Error processing message: {e}") - return "Извините, при обработке запроса произошла ошибка." + context.remove_last_message() + print(f"Ошибка выполнения запроса к ИИ: {e}") + return f"Извините, при обработке запроса произошла ошибка:\n{e}" def clear_chat_context(self, chat_id: int): self.chat_contexts.pop(chat_id, None)