new file: .gitignore
new file: Dockerfile new file: README.md new file: app.py new file: chat-logs/chat-index.json new file: chat-logs/crea-1-10.08.2020-merged.txt new file: chat-logs/crea-1-11.08.2020-merged.txt new file: chat-logs/crea-1-12.08.2020-merged.txt new file: chat-logs/crea-1-13.08.2020-merged.txt new file: chat-logs/crea-1-14.08.2020-merged.txt new file: chat-logs/crea-1-15.08.2020-merged.txt new file: chat-logs/crea-1-18.08.2020-merged.txt new file: chat-logs/crea-1-20.08.2020-merged.txt new file: chat-logs/crea-1-2020-07-27-1-filtered.txt new file: chat-logs/crea-1-2020-07-28-1-filtered.txt new file: chat-logs/crea-1-2020-07-29-1-filtered.txt new file: chat-logs/crea-1-2020-07-30-1-filtered.txt new file: chat-logs/crea-1-2020-08-03-1-filtered.txt new file: chat-logs/crea-1-2020-08-04-1-filtered.txt new file: chat-logs/crea-1-2020-08-08-1-filtered.txt new file: chat-logs/crea-1-2020-08-09-1-filtered.txt new file: chat-logs/crea-1-2020-08-10-1-filtered.txt new file: chat-logs/crea-1-2020-08-11-1-filtered.txt new file: chat-logs/crea-1-2020-08-13-1-filtered.txt new file: chat-logs/crea-1-2020-08-16-1-filtered.txt new file: chat-logs/crea-1-2020-08-17-1-filtered.txt new file: chat-logs/crea-1-2020-08-18-1-filtered.txt new file: chat-logs/crea-1-2020-08-20-1-filtered.txt new file: chat-logs/crea-1-2020-08-24-1-filtered.txt new file: chat-logs/crea-1-2020-08-29-1-filtered.txt new file: chat-logs/crea-1-2020-08-30-1-filtered.txt new file: chat-logs/crea-1-21.08.2020-merged.txt new file: chat-logs/crea-1-22.08.2020-merged.txt new file: chat-logs/crea-1-23.08.2020-merged.txt new file: chat-logs/crea-1-24.07.2020-merged.txt new file: chat-logs/crea-1-25.07.2020-merged.txt new file: chat-logs/crea-1-25.08.2020-merged.txt new file: chat-logs/crea-1-26.07.2020-merged.txt new file: chat-logs/crea-1-26.08.2020-merged.txt new file: chat-logs/crea-1-27.08.2020-merged.txt new file: chat-logs/crea-1-28.08.2020-merged.txt new file: chat-logs/crea-1-crea-1-10.08.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-11.08.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-12.08.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-14.08.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-15.08.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-18.08.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-20.08.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-21.08.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-22.08.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-23.08.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-24.07.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-25.07.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-25.08.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-26.07.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-26.08.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-27.08.2020-merged-filtered.txt new file: chat-logs/crea-1-crea-1-28.08.2020-merged-filtered.txt new file: chat-logs/survival-1-15.08.2020-merged.txt new file: chat-logs/survival-1-2020-07-27-1-filtered.txt new file: chat-logs/survival-1-2020-07-28-1-filtered.txt new file: chat-logs/survival-1-2020-08-07-1-filtered.txt new file: chat-logs/survival-1-2020-08-08-1-filtered.txt new file: chat-logs/survival-1-2020-08-11-1-filtered.txt new file: chat-logs/survival-1-2020-08-13-1-filtered.txt new file: chat-logs/survival-1-2020-08-14-1-filtered.txt new file: chat-logs/survival-1-2020-08-17-1-filtered.txt new file: chat-logs/survival-1-2020-08-18-1-filtered.txt new file: chat-logs/survival-1-2020-08-19-1-filtered.txt new file: chat-logs/survival-1-25.07.2020-merged.txt new file: chat-logs/survival-1-survival-1-15.08.2020-merged-filtered.txt new file: chat-logs/survival-1-survival-1-25.07.2020-merged-filtered.txt new file: chat-logs/thesur-1-2020-08-17-1-filtered.txt new file: chat-logs/thesur-1-2020-08-31-1-filtered.txt new file: count_all_sessions.py new file: count_sessions.py new file: index.html new file: local-chat-analyzer.js new file: merge_daily_logs.py new file: process_thesur_logs.py new file: quick_add.py new file: requirements.txt new file: script.js new file: server.py new file: statistics-integration.js new file: statistics.css new file: statistics.js new file: style.css
This commit is contained in:
212
merge_daily_logs.py
Normal file
212
merge_daily_logs.py
Normal file
@@ -0,0 +1,212 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Merge Daily Logs Script
|
||||
Führt alle Raw Logs des gleichen Tages zusammen, sortiert nach Zeit
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from collections import defaultdict
|
||||
|
||||
# Set UTF-8 encoding for console output
|
||||
if sys.platform == "win32":
|
||||
import codecs
|
||||
sys.stdout = codecs.getwriter("utf-8")(sys.stdout.detach())
|
||||
sys.stderr = codecs.getwriter("utf-8")(sys.stderr.detach())
|
||||
|
||||
def extract_date_and_time_from_line(line):
|
||||
"""Extrahiert Datum und Zeit aus einer Log-Zeile"""
|
||||
# Pattern: [HH:MM:SS] am Anfang der Zeile
|
||||
time_match = re.match(r'^\[(\d{2}):(\d{2}):(\d{2})\]', line)
|
||||
if time_match:
|
||||
hours, minutes, seconds = map(int, time_match.groups())
|
||||
return hours * 3600 + minutes * 60 + seconds # In Sekunden für Sortierung
|
||||
return None
|
||||
|
||||
def extract_date_from_filename(filename):
|
||||
"""Extrahiert das Datum aus dem Dateinamen"""
|
||||
# Verschiedene Datumsformate unterstützen
|
||||
date_patterns = [
|
||||
r'(\d{4})-(\d{2})-(\d{2})', # YYYY-MM-DD (für deine .log Dateien)
|
||||
r'(\d{2})\.(\d{2})\.(\d{4})', # DD.MM.YYYY
|
||||
r'(\d{2})-(\d{2})-(\d{4})', # DD-MM-YYYY
|
||||
r'(\d{1,2})\.(\d{1,2})\.(\d{4})', # D.M.YYYY oder DD.M.YYYY
|
||||
]
|
||||
|
||||
for i, pattern in enumerate(date_patterns):
|
||||
match = re.search(pattern, filename)
|
||||
if match:
|
||||
if i == 0: # YYYY-MM-DD
|
||||
year, month, day = match.groups()
|
||||
else: # DD.MM.YYYY variants
|
||||
day, month, year = match.groups()
|
||||
|
||||
try:
|
||||
# Normalisiere das Datum
|
||||
day = int(day)
|
||||
month = int(month)
|
||||
year = int(year)
|
||||
return f"{day:02d}.{month:02d}.{year}"
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
return None
|
||||
|
||||
def group_files_by_date(directory):
|
||||
"""Gruppiert alle .txt und .log Dateien nach Datum"""
|
||||
files_by_date = defaultdict(list)
|
||||
|
||||
if not os.path.exists(directory):
|
||||
print(f"❌ Verzeichnis '{directory}' existiert nicht!")
|
||||
return {}
|
||||
|
||||
for filename in os.listdir(directory):
|
||||
if (filename.endswith('.txt') or filename.endswith('.log')) and not filename.endswith('-merged.txt'):
|
||||
date = extract_date_from_filename(filename)
|
||||
if date:
|
||||
full_path = os.path.join(directory, filename)
|
||||
files_by_date[date].append(full_path)
|
||||
print(f"📅 {date}: {filename}")
|
||||
else:
|
||||
print(f"⚠️ Kein Datum erkannt in: {filename}")
|
||||
|
||||
return files_by_date
|
||||
|
||||
def merge_log_files(file_paths, output_path):
|
||||
"""Führt mehrere Log-Dateien zusammen und sortiert nach Zeit"""
|
||||
all_lines = []
|
||||
|
||||
print(f"📂 Führe {len(file_paths)} Dateien zusammen...")
|
||||
|
||||
for file_path in file_paths:
|
||||
print(f" 📄 Lese: {os.path.basename(file_path)}")
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if line:
|
||||
timestamp = extract_date_and_time_from_line(line)
|
||||
all_lines.append((timestamp or 0, line))
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Lesen von {file_path}: {e}")
|
||||
|
||||
# Sortiere nach Zeitstempel
|
||||
all_lines.sort(key=lambda x: x[0])
|
||||
|
||||
# Schreibe zusammengeführte Datei
|
||||
try:
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
for _, line in all_lines:
|
||||
f.write(line + '\n')
|
||||
|
||||
print(f"✅ Zusammengeführt: {output_path}")
|
||||
print(f"📊 Gesamtzeilen: {len(all_lines)}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Schreiben: {e}")
|
||||
return False
|
||||
|
||||
def detect_gamemode_from_path(file_paths):
|
||||
"""Erkennt den Gamemode basierend auf Dateinamen"""
|
||||
# Schaue in alle Dateinamen
|
||||
all_names = ' '.join([os.path.basename(path).lower() for path in file_paths])
|
||||
|
||||
if 'crea' in all_names or 'creative' in all_names:
|
||||
return 'crea-1'
|
||||
elif 'survival' in all_names or 'surv' in all_names:
|
||||
return 'survival-1'
|
||||
else:
|
||||
return 'unknown'
|
||||
|
||||
def main():
|
||||
print("🔄 Merge Daily Logs - Minecraft Raw Logs")
|
||||
print("=" * 50)
|
||||
|
||||
# Standardverzeichnis für raw logs
|
||||
raw_logs_dir = input("📁 Raw Logs Verzeichnis (Enter für aktuelles): ").strip()
|
||||
if not raw_logs_dir:
|
||||
raw_logs_dir = "."
|
||||
|
||||
print(f"🔍 Suche nach Log-Dateien in: {raw_logs_dir}")
|
||||
|
||||
# Gruppiere Dateien nach Datum
|
||||
files_by_date = group_files_by_date(raw_logs_dir)
|
||||
|
||||
if not files_by_date:
|
||||
print("❌ Keine Log-Dateien mit erkennbarem Datum gefunden!")
|
||||
return
|
||||
|
||||
print(f"\n📊 Gefundene Tage: {len(files_by_date)}")
|
||||
|
||||
# Zeige Übersicht
|
||||
for date, files in files_by_date.items():
|
||||
if len(files) > 1: # Nur Tage mit mehreren Dateien
|
||||
print(f"\n📅 {date}: {len(files)} Dateien")
|
||||
for file_path in files:
|
||||
size = os.path.getsize(file_path) // 1024 # KB
|
||||
print(f" 📄 {os.path.basename(file_path)} ({size} KB)")
|
||||
|
||||
print("\n" + "="*50)
|
||||
|
||||
# Frage welche Tage zusammengeführt werden sollen
|
||||
merge_all = input("🔄 Alle Tage mit mehreren Dateien zusammenführen? (Y/n): ").strip().lower()
|
||||
|
||||
merged_count = 0
|
||||
|
||||
for date, files in files_by_date.items():
|
||||
if len(files) <= 1:
|
||||
continue # Skip Tage mit nur einer Datei
|
||||
|
||||
if merge_all not in ['', 'y', 'yes', 'j', 'ja']:
|
||||
response = input(f"📅 {date} ({len(files)} Dateien) zusammenführen? (y/N): ")
|
||||
if response.lower() not in ['y', 'yes', 'j', 'ja']:
|
||||
continue
|
||||
|
||||
# Bestimme Gamemode und Output-Dateinamen
|
||||
gamemode = detect_gamemode_from_path(files)
|
||||
|
||||
# Erstelle Output-Dateiname: gamemode-DD.MM.YYYY-merged.txt
|
||||
date_parts = date.split('.')
|
||||
if len(date_parts) == 3:
|
||||
day, month, year = date_parts
|
||||
output_filename = f"{gamemode}-{day}.{month}.{year}-merged.txt"
|
||||
else:
|
||||
output_filename = f"{gamemode}-{date}-merged.txt"
|
||||
|
||||
output_path = os.path.join("chat-logs", output_filename)
|
||||
|
||||
# Erstelle chat-logs Verzeichnis falls nicht vorhanden
|
||||
os.makedirs("chat-logs", exist_ok=True)
|
||||
|
||||
print(f"\n🔄 Führe {date} zusammen...")
|
||||
print(f"📤 Output: {output_path}")
|
||||
|
||||
if merge_log_files(files, output_path):
|
||||
merged_count += 1
|
||||
|
||||
# Optional: Originaldateien löschen
|
||||
delete_originals = input("🗑️ Original-Dateien löschen? (y/N): ")
|
||||
if delete_originals.lower() in ['y', 'yes', 'j', 'ja']:
|
||||
for file_path in files:
|
||||
try:
|
||||
os.remove(file_path)
|
||||
print(f"🗑️ Gelöscht: {os.path.basename(file_path)}")
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Löschen {file_path}: {e}")
|
||||
|
||||
print(f"\n🎉 Fertig! {merged_count} Tage zusammengeführt.")
|
||||
print(f"📁 Zusammengeführte Dateien befinden sich in: chat-logs/")
|
||||
|
||||
if merged_count > 0:
|
||||
print(f"\n💡 Nächste Schritte:")
|
||||
print(f" 1. Nutze das filter_raw_logs.py Script für jede *-merged.txt Datei")
|
||||
print(f" 2. Oder nutze quick_add.py um sie direkt hinzuzufügen")
|
||||
print(f" Beispiel: python quick_add.py chat-logs/crea-1-24.07.2020-merged.txt")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user