#!/usr/bin/env python3
"""
NEOG OS - Obsidian Daily Report Generator
Extracts AI interactions, commands, and errors from logs.db
Generates Markdown report in Obsidian vault
"""

import sqlite3
from datetime import datetime, timedelta
import os
import json
from collections import Counter

# CONFIG
LOG_DB_PATH = "/Users/neog/logs.db"
OBSIDIAN_VAULT_PATH = os.path.expanduser("~/Documents/Obsidian/Vault/NEOG Reports")
TEMPLATE_PATH = "/Users/neog/obsidian-report-template.md"
DATE_STR = datetime.now().strftime("%Y-%m-%d")
REPORT_PATH = os.path.join(OBSIDIAN_VAULT_PATH, f"{DATE_STR}-neog-report.md")

# Ensure directory exists
os.makedirs(OBSIDIAN_VAULT_PATH, exist_ok=True)

def get_db_connection():
    """Connect to logs database"""
    return sqlite3.connect(LOG_DB_PATH)

def fetch_ai_interactions():
    """Fetch all AI interactions (ai, ask, chat commands)"""
    conn = get_db_connection()
    c = conn.cursor()

    # Query for AI-related logs from today
    c.execute("""
        SELECT timestamp, source, message, data
        FROM logs
        WHERE (source = 'LM_STUDIO' OR source = 'TERMINAL')
          AND (message LIKE '%AI query%' OR message LIKE '%Chat message%' OR message LIKE '%ai %' OR message LIKE '%ask %')
          AND date(timestamp) = date('now', 'localtime')
        ORDER BY timestamp
    """)
    rows = c.fetchall()
    conn.close()

    if not rows:
        return "_No AI interactions today._\n"

    content = ""
    for row in rows:
        ts, src, msg, data_str = row

        # Parse timestamp for readable format
        try:
            dt = datetime.fromisoformat(ts.replace('Z', '+00:00'))
            time_str = dt.strftime("%H:%M:%S")
        except:
            time_str = ts

        # Extract question/query from message or data
        query_text = msg
        if data_str:
            try:
                data = json.loads(data_str)
                if 'question' in data:
                    query_text = f"**Q:** {data['question']}"
                elif 'response' in data:
                    query_text = f"**A:** {data['response'][:100]}..."
            except:
                pass

        content += f"- `{time_str}` [{src}] {query_text}\n"

    return content

def fetch_commands():
    """Fetch terminal commands executed today"""
    conn = get_db_connection()
    c = conn.cursor()

    c.execute("""
        SELECT timestamp, message
        FROM logs
        WHERE source = 'TERMINAL'
          AND message LIKE 'Command executed:%'
          AND date(timestamp) = date('now', 'localtime')
        ORDER BY timestamp
        LIMIT 50
    """)
    rows = c.fetchall()
    conn.close()

    if not rows:
        return "_No commands executed today._\n"

    content = ""
    for row in rows:
        ts, msg = row
        try:
            dt = datetime.fromisoformat(ts.replace('Z', '+00:00'))
            time_str = dt.strftime("%H:%M:%S")
        except:
            time_str = ts

        # Extract command from message
        cmd = msg.replace("Command executed:", "").strip()
        content += f"- `{time_str}` `{cmd}`\n"

    return content

def fetch_errors():
    """Fetch all errors and warnings from today"""
    conn = get_db_connection()
    c = conn.cursor()

    c.execute("""
        SELECT timestamp, source, message, level
        FROM logs
        WHERE level IN ('error', 'warning')
          AND date(timestamp) = date('now', 'localtime')
        ORDER BY timestamp
    """)
    rows = c.fetchall()
    conn.close()

    if not rows:
        return "_No errors or warnings today. Clean run! 🎉_\n"

    content = ""
    for row in rows:
        ts, src, msg, level = row
        try:
            dt = datetime.fromisoformat(ts.replace('Z', '+00:00'))
            time_str = dt.strftime("%H:%M:%S")
        except:
            time_str = ts

        icon = "⚠️" if level == "warning" else "❌"
        content += f"- {icon} `{time_str}` [{src}] {msg}\n"

    return content

def get_system_stats():
    """Get comprehensive system statistics for today"""
    conn = get_db_connection()
    c = conn.cursor()

    stats = {}

    # Total logs today
    c.execute("SELECT COUNT(*) FROM logs WHERE date(timestamp) = date('now', 'localtime')")
    stats["TOTAL_LOGS"] = c.fetchone()[0]

    # AI interactions
    c.execute("""
        SELECT COUNT(*) FROM logs
        WHERE (source IN ('LM_STUDIO', 'TERMINAL'))
          AND (message LIKE '%AI query%' OR message LIKE '%Chat message%' OR message LIKE '%ai %')
          AND date(timestamp) = date('now', 'localtime')
    """)
    stats["AI_INTERACTIONS"] = c.fetchone()[0]

    # Errors
    c.execute("SELECT COUNT(*) FROM logs WHERE level = 'error' AND date(timestamp) = date('now', 'localtime')")
    stats["ERRORS"] = c.fetchone()[0]

    # Most used command
    c.execute("""
        SELECT message FROM logs
        WHERE source = 'TERMINAL'
          AND message LIKE 'Command executed:%'
          AND date(timestamp) = date('now', 'localtime')
    """)
    commands = [row[0].replace("Command executed:", "").strip().split()[0] for row in c.fetchall()]
    if commands:
        most_common = Counter(commands).most_common(1)[0]
        stats["MOST_USED_COMMAND"] = f"`{most_common[0]}` ({most_common[1]}x)"
    else:
        stats["MOST_USED_COMMAND"] = "N/A"

    # Total commands
    stats["TOTAL_COMMANDS"] = len(commands)

    # Devices
    c.execute("SELECT COUNT(DISTINCT device_ip) FROM logs WHERE date(timestamp) = date('now', 'localtime')")
    stats["DEVICES"] = c.fetchone()[0]

    # LM Studio sessions (count distinct AI interaction sequences)
    c.execute("""
        SELECT COUNT(*) FROM logs
        WHERE source = 'LM_STUDIO'
          AND date(timestamp) = date('now', 'localtime')
    """)
    stats["LM_SESSIONS"] = c.fetchone()[0]

    # Peak activity hour
    c.execute("""
        SELECT strftime('%H', timestamp) as hour, COUNT(*) as count
        FROM logs
        WHERE date(timestamp) = date('now', 'localtime')
        GROUP BY hour
        ORDER BY count DESC
        LIMIT 1
    """)
    peak = c.fetchone()
    if peak:
        stats["PEAK_HOUR"] = f"{peak[0]}:00 ({peak[1]} logs)"
    else:
        stats["PEAK_HOUR"] = "N/A"

    conn.close()
    return stats

def generate_report():
    """Generate the Obsidian report using template"""
    print(f"🔄 Generating report for {DATE_STR}...")

    # Get all data
    stats = get_system_stats()
    ai_content = fetch_ai_interactions()
    commands_content = fetch_commands()
    errors_content = fetch_errors()

    # Load template
    with open(TEMPLATE_PATH, "r") as f:
        template = f.read()

    # Replace placeholders
    now = datetime.now()
    report = template.replace("{{DATE}}", now.strftime("%Y-%m-%d"))
    report = report.replace("{{TIME}}", now.strftime("%H:%M:%S"))
    report = report.replace("{{TOTAL_LOGS}}", str(stats["TOTAL_LOGS"]))
    report = report.replace("{{AI_INTERACTIONS}}", str(stats["AI_INTERACTIONS"]))
    report = report.replace("{{ERRORS}}", str(stats["ERRORS"]))
    report = report.replace("{{MOST_USED_COMMAND}}", stats["MOST_USED_COMMAND"])
    report = report.replace("{{DEVICES}}", str(stats["DEVICES"]))
    report = report.replace("{{LM_SESSIONS}}", str(stats["LM_SESSIONS"]))
    report = report.replace("{{TOTAL_COMMANDS}}", str(stats["TOTAL_COMMANDS"]))
    report = report.replace("{{PEAK_HOUR}}", stats["PEAK_HOUR"])
    report = report.replace("{{AI_CONTENT}}", ai_content)
    report = report.replace("{{COMMANDS_CONTENT}}", commands_content)
    report = report.replace("{{ERRORS_CONTENT}}", errors_content)

    # Write report
    with open(REPORT_PATH, "w") as f:
        f.write(report)

    print(f"✅ Report generated: {REPORT_PATH}")
    print(f"📊 Summary: {stats['TOTAL_LOGS']} logs, {stats['AI_INTERACTIONS']} AI interactions, {stats['ERRORS']} errors")
    return REPORT_PATH

if __name__ == "__main__":
    try:
        report_path = generate_report()
        print(f"\n🎉 Success! Open in Obsidian:")
        print(f"   open '{OBSIDIAN_VAULT_PATH}'")
    except Exception as e:
        print(f"❌ Error generating report: {e}")
        import traceback
        traceback.print_exc()
        exit(1)
