background

This commit is contained in:
SimolZimol
2024-12-17 22:30:44 +01:00
parent 9235b1efaa
commit 1be363aef4

84
bot.py
View File

@@ -494,66 +494,58 @@ async def on_message(message):
# Prüfen, ob der Live-Chat aktiv ist # Prüfen, ob der Live-Chat aktiv ist
if channel_id in live_chats and live_chats[channel_id]["active"]: if channel_id in live_chats and live_chats[channel_id]["active"]:
timestamp = int(time.time()) # Unix-Timestamp
message_data = { await live_chat_queue.put((message, message.id, message.author.id, message.author.display_name, message.content))
"timestamp": timestamp,
"message_id": message.id,
"user_id": message.author.id,
"nickname": message.author.display_name,
"content": message.content
}
# Historie aktualisieren
live_chats[channel_id]["messages"].append(message_data)
save_chat_history(channel_id, live_chats[channel_id]["messages"])
# Nachricht zur AI-Warteschlange hinzufügen
await live_chat_queue.put((channel_id, message_data))
await client.process_commands(message) await client.process_commands(message)
async def process_live_chat_queue(): async def process_live_chat_queue():
while True: while True:
try: try:
channel_id, message_data = await live_chat_queue.get() if not askmultus_queue.empty():
# Kontext zusammenstellen: Introduction + Chat-Historie message, message.id, message.author.id, message.author.display_name, message.content = await live_chat_queue.get()
introduction = read_file("chat_intro.txt") # Neue Introduction-Datei
chat_history = load_chat_history(channel_id)
formatted_history = "\n".join(
f"{msg['timestamp']} | {msg['user_id']} | {msg['nickname']}: {msg['content']}"
for msg in chat_history
)
# Anfrage an die AI senden live_introduction = read_file("live_introduction.txt")
ai_response = await send_to_ai(introduction, formatted_history) live_background_data = read_file("live_background_data.txt")
message_data = live_introduction + " background data:" + live_background_data
chat_history = load_chat_history(message.channel.id)
if ai_response.strip() != "::null::": timestamp = int(time.time()) # Unix-Timestamp
channel = client.get_channel(channel_id)
if channel:
await channel.send(f"**AI:** {ai_response}")
live_chat_queue.task_done() content = timestamp + "/" + message.id + "/" + message.author.id + "/" + message.author.display_name + ":" + message.content
chat_history.append({"role": "user", "content": f"{content}"})
messages = [
{"role": "system", "content": message_data},
*chat_history
]
ai_answer = await loop.run_in_executor(executor, lambda: openai_instance.chat.completions.create(
model="model",
messages=messages,
temperature=0.8,
timeout=15, # Limit waiting time for response
))
ai_message = ai_answer.choices[0].message.content
chat_history.append({"role": "assistant", "content": ai_message})
save_chat_history(message.channel.id, chat_history)
channel = message.channel
if ai_message.strip() != "::null::":
if channel:
await channel.send(f"**AI:** {ai_message}")
live_chat_queue.task_done()
except asyncio.CancelledError:
break
except Exception as e: except Exception as e:
logger.error(f"Error processing live chat queue: {e}") logger.error(f"Error processing live chat queue: {e}")
await asyncio.sleep(5)
async def send_to_ai(introduction, chat_history):
"""Sendet den Chat-Verlauf mit Introduction an die AI."""
try:
response = openai_instance.chat.completions.create(
model="gpt-4",
messages=[
{"role": "system", "content": introduction},
{"role": "user", "content": f"Hier ist der bisherige Chat-Verlauf:\n{chat_history}\nAntworte auf die letzte Nachricht."}
],
temperature=0.7,
max_tokens=150
)
return response.choices[0].message.content
except Exception as e:
logger.error(f"AI processing error: {e}")
return "::null::"
# ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # -----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------