modified: bot.py

This commit is contained in:
SimolZimol
2024-12-17 21:15:40 +01:00
parent 0c7ec7fcbe
commit 9235b1efaa

125
bot.py
View File

@@ -435,6 +435,129 @@ async def startgiveaway(ctx, platform: str, prize: str, num_winners: int, title:
check_giveaway.start(giveaway_id)
# -----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
live_chats = {}
live_chat_queue = asyncio.Queue()
def read_file(filename):
try:
with open(filename, "r", encoding="utf-8") as file:
return file.read()
except FileNotFoundError:
return "Du bist ein hilfreicher Assistent, der Fragen beantwortet."
def load_chat_history(channel_id):
"""Lädt die Chat-Historie für einen bestimmten Kanal."""
history_file = os.path.join(CACHE_DIR, f"chat_{channel_id}.json")
if os.path.exists(history_file):
with open(history_file, "r", encoding="utf-8") as file:
return json.load(file)
return []
def save_chat_history(channel_id, messages):
"""Speichert die Chat-Historie für einen bestimmten Kanal."""
history_file = os.path.join(CACHE_DIR, f"chat_{channel_id}.json")
with open(history_file, "w", encoding="utf-8") as file:
json.dump(messages, file, indent=4)
@client.hybrid_command()
async def startlivechat(ctx):
"""Startet den Live-Chat im aktuellen Kanal."""
channel_id = ctx.channel.id
if channel_id in live_chats and live_chats[channel_id]["active"]:
await ctx.send("Live-Chat ist bereits aktiv.")
return
# Lade oder initialisiere die Chat-Historie
history = load_chat_history(channel_id)
live_chats[channel_id] = {"messages": history, "active": True}
await ctx.send("Live-Chat gestartet. Nachrichten werden verarbeitet.")
@client.hybrid_command()
async def stoplivechat(ctx):
"""Beendet den Live-Chat im aktuellen Kanal."""
channel_id = ctx.channel.id
if channel_id in live_chats:
live_chats[channel_id]["active"] = False
await ctx.send("Live-Chat wurde beendet.")
else:
await ctx.send("Kein aktiver Live-Chat in diesem Kanal.")
@client.event
async def on_message(message):
if message.author.bot: # Bots ignorieren
return
channel_id = message.channel.id
# Prüfen, ob der Live-Chat aktiv ist
if channel_id in live_chats and live_chats[channel_id]["active"]:
timestamp = int(time.time()) # Unix-Timestamp
message_data = {
"timestamp": timestamp,
"message_id": message.id,
"user_id": message.author.id,
"nickname": message.author.display_name,
"content": message.content
}
# Historie aktualisieren
live_chats[channel_id]["messages"].append(message_data)
save_chat_history(channel_id, live_chats[channel_id]["messages"])
# Nachricht zur AI-Warteschlange hinzufügen
await live_chat_queue.put((channel_id, message_data))
await client.process_commands(message)
async def process_live_chat_queue():
while True:
try:
channel_id, message_data = await live_chat_queue.get()
# Kontext zusammenstellen: Introduction + Chat-Historie
introduction = read_file("chat_intro.txt") # Neue Introduction-Datei
chat_history = load_chat_history(channel_id)
formatted_history = "\n".join(
f"{msg['timestamp']} | {msg['user_id']} | {msg['nickname']}: {msg['content']}"
for msg in chat_history
)
# Anfrage an die AI senden
ai_response = await send_to_ai(introduction, formatted_history)
if ai_response.strip() != "::null::":
channel = client.get_channel(channel_id)
if channel:
await channel.send(f"**AI:** {ai_response}")
live_chat_queue.task_done()
except Exception as e:
logger.error(f"Error processing live chat queue: {e}")
async def send_to_ai(introduction, chat_history):
"""Sendet den Chat-Verlauf mit Introduction an die AI."""
try:
response = openai_instance.chat.completions.create(
model="gpt-4",
messages=[
{"role": "system", "content": introduction},
{"role": "user", "content": f"Hier ist der bisherige Chat-Verlauf:\n{chat_history}\nAntworte auf die letzte Nachricht."}
],
temperature=0.7,
max_tokens=150
)
return response.choices[0].message.content
except Exception as e:
logger.error(f"AI processing error: {e}")
return "::null::"
# -----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
@client.hybrid_command()
async def setlocalpermission(ctx, permission_level: int):
"""Allows an admin or higher to set their own local permission level."""
@@ -497,7 +620,6 @@ async def on_interaction(interaction):
# Logge Interaktionen, die nicht den erwarteten Typ haben
logger.error(f"Unbekannte Interaktion: {interaction.type}, Daten: {interaction.data}")
def read_introduction():
try:
with open("introduction.txt", "r", encoding="utf-8") as file:
@@ -645,6 +767,7 @@ background_data = read_background_data("background_data.txt")
@client.event
async def on_ready():
client.loop.create_task(process_ai_queue())
client.loop.create_task(process_live_chat_queue()) # Starte die Queue-Verarbeitung
logger.info("Bot is ready!")
logger.info(f"Logged in as: {client.user.name}")
logger.info(f"Client ID: {client.user.id}")