#!/usr/bin/env python3
"""
Workspace API - Dynamic data for Workspace Dashboard
Port: 9003
"""

from flask import Flask, jsonify, request
from flask_cors import CORS
import os
import glob
import json
import time
import requests
from datetime import datetime, timedelta
from pathlib import Path
import psutil

app = Flask(__name__)
CORS(app)

HOME = os.path.expanduser("~")

# Directory structure
DIRS = {
    "apps": f"{HOME}/Apps",
    "web": f"{HOME}/Web",
    "projects": f"{HOME}/Projects",
    "services": f"{HOME}/Services",
    "scripts": f"{HOME}/Scripts",
    "docs": f"{HOME}/Docs",
    "archive": f"{HOME}/Archive"
}

def get_dir_size(path, max_depth=5, current_depth=0, visited=None):
    """Get directory size in bytes (with depth limit and visited tracking to avoid symlink loops)"""
    if current_depth > max_depth:
        return 0
    if visited is None:
        visited = set()

    # Get real path and check if already visited
    try:
        real_path = os.path.realpath(path)
        if real_path in visited:
            return 0  # Already visited, skip to avoid loops
        visited.add(real_path)
    except (OSError, PermissionError):
        return 0

    # Skip Archive folder entirely for performance
    if '/Archive/' in path:
        return 0

    total = 0
    try:
        for entry in os.scandir(path):
            try:
                if entry.is_symlink() or os.path.islink(entry.path):
                    continue  # Skip symlinks
                if entry.is_file(follow_symlinks=False):
                    total += entry.stat(follow_symlinks=False).st_size
                elif entry.is_dir(follow_symlinks=False):
                    total += get_dir_size(entry.path, max_depth, current_depth + 1, visited)
            except (OSError, PermissionError):
                continue
    except (PermissionError, OSError):
        pass
    return total

def format_size(bytes):
    """Format bytes to human readable"""
    for unit in ['B', 'KB', 'MB', 'GB']:
        if bytes < 1024:
            return f"{bytes:.1f} {unit}"
        bytes /= 1024
    return f"{bytes:.1f} TB"

def scan_ios_apps():
    """Scan iOS apps directory"""
    apps = []
    apps_dir = DIRS["apps"]
    if os.path.exists(apps_dir):
        for item in os.listdir(apps_dir):
            path = os.path.join(apps_dir, item)
            if os.path.isdir(path) and item.endswith("-iOS"):
                has_claude = os.path.exists(os.path.join(path, "CLAUDE.md"))
                has_readme = os.path.exists(os.path.join(path, "README.md"))
                xcodeproj = glob.glob(os.path.join(path, "*.xcodeproj"))
                apps.append({
                    "name": item,
                    "path": path,
                    "type": "ios",
                    "hasClaudeMd": has_claude,
                    "hasReadme": has_readme,
                    "hasXcodeproj": len(xcodeproj) > 0,
                    "modified": datetime.fromtimestamp(os.path.getmtime(path)).isoformat()
                })
    return apps

def scan_web_apps():
    """Scan web apps directory"""
    categories = []
    web_dir = DIRS["web"]
    if os.path.exists(web_dir):
        for item in os.listdir(web_dir):
            path = os.path.join(web_dir, item)
            if os.path.isdir(path):
                html_files = glob.glob(os.path.join(path, "*.html"))
                categories.append({
                    "name": item,
                    "path": path,
                    "type": "web",
                    "count": len(html_files),
                    "files": [os.path.basename(f) for f in html_files[:20]],  # First 20
                    "modified": datetime.fromtimestamp(os.path.getmtime(path)).isoformat()
                })
    return categories

def scan_projects():
    """Scan projects directory"""
    projects = []
    projects_dir = DIRS["projects"]
    if os.path.exists(projects_dir):
        for item in os.listdir(projects_dir):
            path = os.path.join(projects_dir, item)
            if os.path.isdir(path):
                has_claude = os.path.exists(os.path.join(path, "CLAUDE.md"))
                has_readme = os.path.exists(os.path.join(path, "README.md"))
                has_package = os.path.exists(os.path.join(path, "package.json"))
                projects.append({
                    "name": item,
                    "path": path,
                    "type": "project",
                    "hasClaudeMd": has_claude,
                    "hasReadme": has_readme,
                    "hasPackageJson": has_package,
                    "modified": datetime.fromtimestamp(os.path.getmtime(path)).isoformat()
                })
    return projects

def scan_scripts():
    """Scan scripts directory"""
    scripts = []
    scripts_dir = DIRS["scripts"]
    if os.path.exists(scripts_dir):
        for root, dirs, files in os.walk(scripts_dir):
            for f in files:
                if f.endswith(".sh"):
                    path = os.path.join(root, f)
                    scripts.append({
                        "name": f,
                        "path": path,
                        "type": "script",
                        "category": os.path.basename(root),
                        "modified": datetime.fromtimestamp(os.path.getmtime(path)).isoformat()
                    })
    return scripts

def scan_services():
    """Scan Python services"""
    services = []
    services_dir = DIRS["services"]
    if os.path.exists(services_dir):
        for f in os.listdir(services_dir):
            if f.endswith(".py"):
                path = os.path.join(services_dir, f)
                services.append({
                    "name": f,
                    "path": path,
                    "type": "service",
                    "modified": datetime.fromtimestamp(os.path.getmtime(path)).isoformat()
                })
    return services

@app.route('/api/stats')
def get_stats():
    """Get workspace statistics"""
    stats = {
        "ios": len(scan_ios_apps()),
        "web": sum(c["count"] for c in scan_web_apps()),
        "webCategories": len(scan_web_apps()),
        "projects": len(scan_projects()),
        "scripts": len(scan_scripts()),
        "services": len(scan_services()),
        "timestamp": datetime.now().isoformat()
    }

    # Get sizes
    for key, path in DIRS.items():
        if os.path.exists(path):
            stats[f"{key}Size"] = format_size(get_dir_size(path))

    return jsonify(stats)

@app.route('/api/items')
def get_items():
    """Get all workspace items"""
    items = []

    # iOS Apps
    for app in scan_ios_apps():
        items.append({
            "id": f"ios_{app['name']}",
            "type": "ios",
            "name": app["name"],
            "desc": "iOS App",
            "icon": "📱",
            "path": app["path"],
            "hasClaudeMd": app["hasClaudeMd"],
            "modified": app["modified"]
        })

    # Web Categories
    for cat in scan_web_apps():
        items.append({
            "id": f"web_{cat['name']}",
            "type": "web",
            "name": cat["name"].title(),
            "desc": f"{cat['count']} HTML files",
            "icon": "🌐",
            "path": cat["path"],
            "count": cat["count"],
            "files": cat["files"],
            "modified": cat["modified"]
        })

    # Projects
    for proj in scan_projects():
        items.append({
            "id": f"proj_{proj['name']}",
            "type": "project",
            "name": proj["name"],
            "desc": "Project",
            "icon": "🗂️",
            "path": proj["path"],
            "hasClaudeMd": proj["hasClaudeMd"],
            "modified": proj["modified"]
        })

    return jsonify(items)

@app.route('/api/search')
def search():
    """Search across all items"""
    query = request.args.get('q', '').lower()
    limit = int(request.args.get('limit', 20))

    if len(query) < 2:
        return jsonify([])

    results = []

    # Search iOS apps
    for app in scan_ios_apps():
        if query in app['name'].lower():
            results.append({
                "type": "ios",
                "name": app['name'],
                "path": app['path'],
                "icon": "📱"
            })

    # Search web files
    web_dir = DIRS["web"]
    if os.path.exists(web_dir):
        for root, dirs, files in os.walk(web_dir):
            for f in files:
                if f.endswith(".html") and query in f.lower():
                    results.append({
                        "type": "web",
                        "name": f,
                        "path": os.path.join(root, f),
                        "icon": "🌐",
                        "category": os.path.basename(root)
                    })

    # Search projects
    for proj in scan_projects():
        if query in proj['name'].lower():
            results.append({
                "type": "project",
                "name": proj['name'],
                "path": proj['path'],
                "icon": "🗂️"
            })

    # Search scripts
    for script in scan_scripts():
        if query in script['name'].lower():
            results.append({
                "type": "script",
                "name": script['name'],
                "path": script['path'],
                "icon": "🔧",
                "category": script["category"]
            })

    return jsonify(results[:limit])

@app.route('/api/files/<path:filepath>')
def get_file_content(filepath):
    """Get file content for preview"""
    full_path = os.path.join(HOME, filepath)

    if not os.path.exists(full_path):
        return jsonify({"error": "File not found"}), 404

    if not full_path.startswith(HOME):
        return jsonify({"error": "Access denied"}), 403

    try:
        with open(full_path, 'r') as f:
            content = f.read()

        return jsonify({
            "path": full_path,
            "name": os.path.basename(full_path),
            "content": content[:50000],  # Limit to 50KB
            "size": os.path.getsize(full_path),
            "modified": datetime.fromtimestamp(os.path.getmtime(full_path)).isoformat()
        })
    except Exception as e:
        return jsonify({"error": str(e)}), 500

@app.route('/api/web/<category>')
def get_web_category(category):
    """Get all files in a web category"""
    category_path = os.path.join(DIRS["web"], category)

    if not os.path.exists(category_path):
        return jsonify({"error": "Category not found"}), 404

    files = []
    for f in os.listdir(category_path):
        if f.endswith(".html"):
            path = os.path.join(category_path, f)
            files.append({
                "name": f,
                "path": path,
                "url": f"/Web/{category}/{f}",
                "size": os.path.getsize(path),
                "modified": datetime.fromtimestamp(os.path.getmtime(path)).isoformat()
            })

    # Sort by modified date (newest first)
    files.sort(key=lambda x: x['modified'], reverse=True)

    return jsonify({
        "category": category,
        "count": len(files),
        "files": files
    })

@app.route('/api/health')
def health():
    """Health check"""
    return jsonify({
        "status": "ok",
        "service": "workspace-api",
        "port": 9003,
        "timestamp": datetime.now().isoformat()
    })

@app.route('/api/system')
def system_stats():
    """Get system statistics"""
    try:
        # CPU
        cpu_percent = psutil.cpu_percent(interval=0.1)
        cpu_count = psutil.cpu_count()

        # Memory
        mem = psutil.virtual_memory()
        mem_total = mem.total / (1024**3)  # GB
        mem_used = mem.used / (1024**3)
        mem_percent = mem.percent

        # Disk
        disk = psutil.disk_usage('/')
        disk_total = disk.total / (1024**3)
        disk_used = disk.used / (1024**3)
        disk_percent = disk.percent

        # Battery (if available)
        battery = None
        try:
            bat = psutil.sensors_battery()
            if bat:
                battery = {
                    "percent": bat.percent,
                    "plugged": bat.power_plugged
                }
        except:
            pass

        # Network (simple)
        net = psutil.net_io_counters()
        net_sent = net.bytes_sent / (1024**2)  # MB
        net_recv = net.bytes_recv / (1024**2)

        return jsonify({
            "cpu": {
                "percent": cpu_percent,
                "cores": cpu_count
            },
            "memory": {
                "total_gb": round(mem_total, 1),
                "used_gb": round(mem_used, 1),
                "percent": mem_percent
            },
            "disk": {
                "total_gb": round(disk_total, 1),
                "used_gb": round(disk_used, 1),
                "percent": disk_percent
            },
            "battery": battery,
            "network": {
                "sent_mb": round(net_sent, 1),
                "recv_mb": round(net_recv, 1)
            },
            "timestamp": datetime.now().isoformat()
        })
    except Exception as e:
        return jsonify({"error": str(e)}), 500

@app.route('/api/processes')
def get_processes():
    """Get top processes by CPU/Memory"""
    limit = int(request.args.get('limit', 10))
    sort_by = request.args.get('sort', 'cpu')  # cpu or memory

    processes = []
    for proc in psutil.process_iter(['pid', 'name', 'cpu_percent', 'memory_percent']):
        try:
            pinfo = proc.info
            processes.append({
                "pid": pinfo['pid'],
                "name": pinfo['name'],
                "cpu": pinfo['cpu_percent'] or 0,
                "memory": round(pinfo['memory_percent'] or 0, 1)
            })
        except (psutil.NoSuchProcess, psutil.AccessDenied):
            pass

    # Sort
    key = 'cpu' if sort_by == 'cpu' else 'memory'
    processes.sort(key=lambda x: x[key], reverse=True)

    return jsonify(processes[:limit])

@app.route('/api/actions/open-finder', methods=['POST'])
def open_finder():
    """Open path in Finder"""
    data = request.json or {}
    path = data.get('path', '')
    if not path or not os.path.exists(path):
        return jsonify({"error": "Invalid path"}), 400
    os.system(f'open "{path}"')
    return jsonify({"status": "ok", "path": path})

@app.route('/api/actions/open-vscode', methods=['POST'])
def open_vscode():
    """Open path in VS Code"""
    data = request.json or {}
    path = data.get('path', '')
    if not path or not os.path.exists(path):
        return jsonify({"error": "Invalid path"}), 400
    os.system(f'code "{path}"')
    return jsonify({"status": "ok", "path": path})

@app.route('/api/actions/open-xcode', methods=['POST'])
def open_xcode():
    """Open Xcode project"""
    data = request.json or {}
    path = data.get('path', '')
    if not path or not os.path.exists(path):
        return jsonify({"error": "Invalid path"}), 400

    # Find .xcodeproj or .xcworkspace
    xcodeproj = glob.glob(os.path.join(path, "*.xcodeproj"))
    xcworkspace = glob.glob(os.path.join(path, "*.xcworkspace"))

    target = xcworkspace[0] if xcworkspace else (xcodeproj[0] if xcodeproj else None)
    if target:
        os.system(f'open "{target}"')
        return jsonify({"status": "ok", "opened": target})
    return jsonify({"error": "No Xcode project found"}), 404

@app.route('/api/actions/run-script', methods=['POST'])
def run_script():
    """Run a shell script"""
    data = request.json or {}
    path = data.get('path', '')
    if not path or not os.path.exists(path):
        return jsonify({"error": "Invalid path"}), 400
    if not path.endswith('.sh'):
        return jsonify({"error": "Not a shell script"}), 400

    import subprocess
    try:
        result = subprocess.run(['bash', path], capture_output=True, text=True, timeout=30)
        return jsonify({
            "status": "ok",
            "stdout": result.stdout[-2000:] if len(result.stdout) > 2000 else result.stdout,
            "stderr": result.stderr[-500:] if len(result.stderr) > 500 else result.stderr,
            "returncode": result.returncode
        })
    except subprocess.TimeoutExpired:
        return jsonify({"error": "Script timeout (30s)"}), 408
    except Exception as e:
        return jsonify({"error": str(e)}), 500

@app.route('/api/recent')
def get_recent():
    """Get recently modified files"""
    limit = int(request.args.get('limit', 10))
    recent = []

    # Scan web files
    web_dir = DIRS["web"]
    if os.path.exists(web_dir):
        for root, dirs, files in os.walk(web_dir):
            for f in files:
                if f.endswith(".html"):
                    path = os.path.join(root, f)
                    mtime = os.path.getmtime(path)
                    recent.append({
                        "name": f,
                        "path": path,
                        "type": "web",
                        "modified": datetime.fromtimestamp(mtime).isoformat(),
                        "mtime": mtime
                    })

    # Sort by modification time and return top N
    recent.sort(key=lambda x: x['mtime'], reverse=True)
    for item in recent:
        del item['mtime']

    return jsonify(recent[:limit])

# =============================================================================
# QUICK ACTIONS
# =============================================================================

QUICK_ACTIONS = [
    {"id": "cc-stats", "name": "CC Stats", "icon": "📊", "desc": "Claude Code statistics"},
    {"id": "git-status", "name": "Git Status", "icon": "📦", "desc": "Repos with changes"},
    {"id": "clear-cache", "name": "Clear Cache", "icon": "🧹", "desc": "Clear system caches"},
    {"id": "disk-usage", "name": "Disk Usage", "icon": "💾", "desc": "Detailed disk analysis"},
    {"id": "open-dashboard", "name": "Dashboard", "icon": "🏠", "desc": "Open workspace dashboard"},
    {"id": "check-ports", "name": "Check Ports", "icon": "🔌", "desc": "Active network ports"}
]

@app.route('/api/actions')
def get_actions():
    """Get available quick actions"""
    return jsonify(QUICK_ACTIONS)

@app.route('/api/actions/cc-stats', methods=['POST'])
def action_cc_stats():
    """Get Claude Code ROI stats"""
    import subprocess
    try:
        result = subprocess.run(
            ['curl', '-s', 'http://localhost:9001/api/cc/roi/summary'],
            capture_output=True, text=True, timeout=5
        )
        if result.returncode == 0:
            import json
            return jsonify(json.loads(result.stdout))
        return jsonify({"error": "CC ROI API not available"}), 503
    except Exception as e:
        return jsonify({"error": str(e)}), 500

@app.route('/api/actions/git-status', methods=['POST'])
def action_git_status():
    """Get git status for all repos"""
    import subprocess
    repos_with_changes = []

    # Check common project directories
    check_dirs = [
        DIRS["apps"], DIRS["projects"], DIRS["services"], DIRS["scripts"]
    ]

    for base_dir in check_dirs:
        if not os.path.exists(base_dir):
            continue
        for item in os.listdir(base_dir):
            path = os.path.join(base_dir, item)
            git_dir = os.path.join(path, ".git")
            if os.path.isdir(git_dir):
                try:
                    result = subprocess.run(
                        ['git', '-C', path, 'status', '--porcelain'],
                        capture_output=True, text=True, timeout=5
                    )
                    if result.stdout.strip():
                        changes = len(result.stdout.strip().split('\n'))
                        repos_with_changes.append({
                            "name": item,
                            "path": path,
                            "changes": changes
                        })
                except:
                    pass

    return jsonify({
        "repos_with_changes": repos_with_changes,
        "total": len(repos_with_changes)
    })

@app.route('/api/actions/clear-cache', methods=['POST'])
def action_clear_cache():
    """Clear system caches"""
    import subprocess
    cleared = []

    # Clear Xcode derived data (if very large)
    derived_data = os.path.expanduser("~/Library/Developer/Xcode/DerivedData")
    if os.path.exists(derived_data):
        size_before = get_dir_size(derived_data, max_depth=2)
        # Only clear if > 5GB
        if size_before > 5 * 1024 * 1024 * 1024:
            subprocess.run(['rm', '-rf', derived_data], capture_output=True)
            cleared.append({"name": "Xcode DerivedData", "size": format_size(size_before)})

    # Clear npm cache
    npm_cache = os.path.expanduser("~/.npm/_cacache")
    if os.path.exists(npm_cache):
        size = get_dir_size(npm_cache, max_depth=2)
        if size > 500 * 1024 * 1024:  # > 500MB
            subprocess.run(['npm', 'cache', 'clean', '--force'], capture_output=True)
            cleared.append({"name": "npm cache", "size": format_size(size)})

    # Clear pip cache
    pip_cache = os.path.expanduser("~/Library/Caches/pip")
    if os.path.exists(pip_cache):
        size = get_dir_size(pip_cache, max_depth=2)
        if size > 200 * 1024 * 1024:  # > 200MB
            subprocess.run(['pip3', 'cache', 'purge'], capture_output=True)
            cleared.append({"name": "pip cache", "size": format_size(size)})

    return jsonify({
        "cleared": cleared,
        "total_items": len(cleared)
    })

@app.route('/api/actions/disk-usage', methods=['POST'])
def action_disk_usage():
    """Get detailed disk usage"""
    usage = {}

    dirs_to_check = {
        "Home": HOME,
        "Apps (iOS)": DIRS["apps"],
        "Web": DIRS["web"],
        "Projects": DIRS["projects"],
        "Scripts": DIRS["scripts"],
        "Services": DIRS["services"],
        "Archive": DIRS["archive"],
        "Downloads": os.path.expanduser("~/Downloads"),
        "Desktop": os.path.expanduser("~/Desktop"),
        "Documents": os.path.expanduser("~/Documents")
    }

    for name, path in dirs_to_check.items():
        if os.path.exists(path):
            size = get_dir_size(path, max_depth=3)
            usage[name] = {
                "path": path,
                "size_bytes": size,
                "size_human": format_size(size)
            }

    # Sort by size
    sorted_usage = dict(sorted(usage.items(), key=lambda x: x[1]['size_bytes'], reverse=True))

    return jsonify(sorted_usage)

@app.route('/api/actions/check-ports', methods=['POST'])
def action_check_ports():
    """Check active network ports"""
    import subprocess
    try:
        result = subprocess.run(
            ['lsof', '-i', '-P', '-n'],
            capture_output=True, text=True, timeout=10
        )

        ports = {}
        for line in result.stdout.split('\n')[1:]:  # Skip header
            parts = line.split()
            if len(parts) >= 9 and 'LISTEN' in line:
                name = parts[0]
                pid = parts[1]
                addr = parts[8]
                if ':' in addr:
                    port = addr.split(':')[-1]
                    if port.isdigit():
                        ports[port] = {"name": name, "pid": pid, "address": addr}

        return jsonify({
            "ports": ports,
            "count": len(ports)
        })
    except Exception as e:
        return jsonify({"error": str(e)}), 500

# =============================================================================
# SERVICE MANAGER
# =============================================================================

# Paths for service management
SERVICES_CONFIG_FILE = os.path.join(HOME, "Services", "data", "services.json")
SERVICES_HISTORY_FILE = os.path.join(HOME, "Services", "data", "services-history.json")
SERVICES_LOGS_DIR = os.path.join(HOME, "Services", "logs")

# Ensure logs directory exists
os.makedirs(SERVICES_LOGS_DIR, exist_ok=True)

def load_services_config():
    """Load services from JSON config file"""
    try:
        if os.path.exists(SERVICES_CONFIG_FILE):
            with open(SERVICES_CONFIG_FILE, 'r') as f:
                data = json.load(f)
                services = {}
                for svc in data.get('services', []):
                    services[svc['id']] = {
                        "name": svc.get('name', svc['id']),
                        "command": svc.get('command', ''),
                        "port": svc.get('port', 0),
                        "log": os.path.join(SERVICES_LOGS_DIR, f"{svc['id']}.log"),
                        "cwd": HOME,
                        "icon": svc.get('icon', '🔧'),
                        "category": svc.get('category', 'tools'),
                        "healthCheck": svc.get('healthCheck'),
                        "description": svc.get('description', '')
                    }
                return services
    except Exception as e:
        print(f"Error loading services config: {e}")

    # Fallback to default services
    return {
        "http-server": {
            "name": "HTTP Server",
            "command": f"python3 -m http.server 8000 --bind 0.0.0.0 -d {HOME}",
            "port": 8000,
            "log": os.path.join(SERVICES_LOGS_DIR, "http-server.log"),
            "cwd": HOME,
            "icon": "🌐",
            "category": "web",
            "healthCheck": "/",
            "description": "Static file server"
        },
        "workspace-api": {
            "name": "Workspace API",
            "command": f"python3 {HOME}/Services/workspace-api.py",
            "port": 9003,
            "log": os.path.join(SERVICES_LOGS_DIR, "workspace-api.log"),
            "cwd": f"{HOME}/Services",
            "icon": "⚡",
            "category": "api",
            "healthCheck": "/api/health",
            "description": "Main workspace API"
        },
        "cc-roi-api": {
            "name": "CC ROI API",
            "command": f"python3 {HOME}/Services/cc-roi-api.py",
            "port": 9001,
            "log": os.path.join(SERVICES_LOGS_DIR, "cc-roi-api.log"),
            "cwd": f"{HOME}/Services",
            "icon": "📊",
            "category": "api",
            "healthCheck": "/api/cc/health",
            "description": "Claude Code ROI tracking"
        }
    }

def save_services_config(services):
    """Save services to JSON config file"""
    try:
        services_list = []
        for sid, svc in services.items():
            services_list.append({
                "id": sid,
                "name": svc.get('name', sid),
                "icon": svc.get('icon', '🔧'),
                "command": svc.get('command', ''),
                "port": svc.get('port', 0),
                "category": svc.get('category', 'tools'),
                "healthCheck": svc.get('healthCheck'),
                "description": svc.get('description', '')
            })
        with open(SERVICES_CONFIG_FILE, 'w') as f:
            json.dump({"services": services_list}, f, indent=2)
        return True
    except Exception as e:
        print(f"Error saving services config: {e}")
        return False

def load_services_history():
    """Load services history from JSON file"""
    try:
        if os.path.exists(SERVICES_HISTORY_FILE):
            with open(SERVICES_HISTORY_FILE, 'r') as f:
                return json.load(f)
    except:
        pass
    return {}

def save_services_history(history):
    """Save services history to JSON file"""
    try:
        with open(SERVICES_HISTORY_FILE, 'w') as f:
            json.dump(history, f, indent=2)
    except Exception as e:
        print(f"Error saving services history: {e}")

def record_service_event(service_id, event_type, reason=None):
    """Record a service start/stop event"""
    history = load_services_history()
    if service_id not in history:
        history[service_id] = {"uptime_history": [], "crashes": [], "metrics": []}

    now = datetime.now().isoformat()

    if event_type == "start":
        history[service_id]["uptime_history"].append({
            "start": now,
            "end": None,
            "duration": None
        })
    elif event_type == "stop":
        # Close the last uptime entry
        if history[service_id]["uptime_history"]:
            last = history[service_id]["uptime_history"][-1]
            if last["end"] is None:
                last["end"] = now
                start_dt = datetime.fromisoformat(last["start"])
                end_dt = datetime.fromisoformat(now)
                last["duration"] = int((end_dt - start_dt).total_seconds())

        history[service_id]["crashes"].append({
            "timestamp": now,
            "reason": reason or "manual_stop"
        })

    # Prune old entries (keep last 7 days)
    cutoff = (datetime.now() - timedelta(days=7)).isoformat()
    history[service_id]["uptime_history"] = [
        h for h in history[service_id]["uptime_history"]
        if h["start"] > cutoff
    ]
    history[service_id]["crashes"] = [
        c for c in history[service_id]["crashes"]
        if c["timestamp"] > cutoff
    ]
    history[service_id]["metrics"] = [
        m for m in history[service_id]["metrics"]
        if m["timestamp"] > cutoff
    ]

    save_services_history(history)

def check_service_health(service):
    """Perform HTTP health check on a service"""
    health = {
        "port": is_service_running(service.get("port", 0)),
        "http": None,
        "response_time": None,
        "last_check": datetime.now().isoformat()
    }

    health_endpoint = service.get("healthCheck")
    if health_endpoint and health["port"]:
        try:
            start = time.time()
            url = f"http://localhost:{service['port']}{health_endpoint}"
            resp = requests.get(url, timeout=5)
            health["http"] = resp.status_code == 200
            health["response_time"] = round((time.time() - start) * 1000)  # ms
        except:
            health["http"] = False

    return health

# Load services from config
SERVICES = load_services_config()

def is_service_running(port):
    """Check if a service is running on the given port using lsof"""
    import subprocess
    try:
        result = subprocess.run(
            ['lsof', '-i', f':{port}', '-t'],
            capture_output=True, text=True, timeout=5
        )
        return result.returncode == 0 and result.stdout.strip() != ''
    except:
        return False

def get_service_pid(port):
    """Get PID of process listening on port using lsof"""
    import subprocess
    try:
        result = subprocess.run(
            ['lsof', '-i', f':{port}', '-t'],
            capture_output=True, text=True, timeout=5
        )
        if result.returncode == 0 and result.stdout.strip():
            # Return first PID (could be multiple)
            return int(result.stdout.strip().split('\n')[0])
    except:
        pass
    return None

def get_service_info(service_id, include_health=False):
    """Get detailed info about a service"""
    if service_id not in SERVICES:
        return None

    service = SERVICES[service_id]
    port = service["port"]
    pid = get_service_pid(port)
    running = pid is not None

    info = {
        "id": service_id,
        "name": service["name"],
        "port": port,
        "running": running,
        "pid": pid,
        "icon": service.get("icon", "🔧"),
        "log": service["log"],
        "category": service.get("category", "tools"),
        "description": service.get("description", ""),
        "healthCheck": service.get("healthCheck")
    }

    if running and pid:
        try:
            proc = psutil.Process(pid)
            info["cpu"] = round(proc.cpu_percent(), 1)
            info["memory"] = round(proc.memory_percent(), 1)
            info["uptime"] = int(datetime.now().timestamp() - proc.create_time())
            # Get memory in MB
            info["memory_mb"] = round(proc.memory_info().rss / 1024 / 1024, 1)
        except (psutil.NoSuchProcess, psutil.AccessDenied):
            pass

    # Include health check if requested
    if include_health:
        info["health"] = check_service_health(service)

    return info

@app.route('/api/services')
def list_services():
    """List all services with status"""
    include_health = request.args.get('health', 'false').lower() == 'true'
    category_filter = request.args.get('category')

    services = []
    for service_id in SERVICES:
        info = get_service_info(service_id, include_health=include_health)
        if info:
            # Filter by category if specified
            if category_filter and info.get('category') != category_filter:
                continue
            services.append(info)

    # Sort by category then name
    services.sort(key=lambda x: (x.get('category', 'z'), x.get('name', '')))
    return jsonify(services)

@app.route('/api/services/<service_id>')
def get_service(service_id):
    """Get single service info"""
    include_health = request.args.get('health', 'false').lower() == 'true'
    info = get_service_info(service_id, include_health=include_health)
    if info:
        return jsonify(info)
    return jsonify({"error": "Service not found"}), 404

@app.route('/api/services/<service_id>/history')
def get_service_history(service_id):
    """Get service history (uptime, crashes, metrics)"""
    if service_id not in SERVICES:
        return jsonify({"error": "Service not found"}), 404

    history = load_services_history()
    svc_history = history.get(service_id, {
        "uptime_history": [],
        "crashes": [],
        "metrics": []
    })

    # Calculate uptime percentage (last 24h)
    now = datetime.now()
    day_ago = now - timedelta(days=1)
    total_uptime = 0

    for h in svc_history.get("uptime_history", []):
        start = datetime.fromisoformat(h["start"])
        if start < day_ago:
            start = day_ago
        end = datetime.fromisoformat(h["end"]) if h["end"] else now
        if end > now:
            end = now
        if end > day_ago:
            total_uptime += (end - start).total_seconds()

    uptime_pct_24h = round((total_uptime / 86400) * 100, 1) if total_uptime > 0 else 0

    return jsonify({
        "service_id": service_id,
        "uptime_history": svc_history.get("uptime_history", [])[-20:],  # Last 20 entries
        "crashes": svc_history.get("crashes", [])[-10:],  # Last 10 crashes
        "metrics": svc_history.get("metrics", [])[-60:],  # Last 60 metric samples
        "uptime_pct_24h": uptime_pct_24h
    })

@app.route('/api/services', methods=['POST'])
def add_service():
    """Add a new service"""
    global SERVICES
    data = request.json
    if not data or not data.get('id'):
        return jsonify({"error": "Service ID required"}), 400

    service_id = data['id']
    if service_id in SERVICES:
        return jsonify({"error": "Service already exists"}), 400

    SERVICES[service_id] = {
        "name": data.get('name', service_id),
        "command": data.get('command', ''),
        "port": data.get('port', 0),
        "log": os.path.join(SERVICES_LOGS_DIR, f"{service_id}.log"),
        "cwd": data.get('cwd', HOME),
        "icon": data.get('icon', '🔧'),
        "category": data.get('category', 'tools'),
        "healthCheck": data.get('healthCheck'),
        "description": data.get('description', '')
    }

    save_services_config(SERVICES)
    return jsonify({"status": "created", "service": service_id})

@app.route('/api/services/<service_id>', methods=['DELETE'])
def remove_service(service_id):
    """Remove a service"""
    global SERVICES
    if service_id not in SERVICES:
        return jsonify({"error": "Service not found"}), 404

    # Don't allow removing running services
    if is_service_running(SERVICES[service_id]["port"]):
        return jsonify({"error": "Cannot remove running service. Stop it first."}), 400

    del SERVICES[service_id]
    save_services_config(SERVICES)
    return jsonify({"status": "removed", "service": service_id})

@app.route('/api/services/<service_id>', methods=['PUT'])
def update_service(service_id):
    """Update service configuration"""
    global SERVICES
    if service_id not in SERVICES:
        return jsonify({"error": "Service not found"}), 404

    data = request.json
    service = SERVICES[service_id]

    # Update allowed fields
    if 'name' in data: service['name'] = data['name']
    if 'command' in data: service['command'] = data['command']
    if 'port' in data: service['port'] = data['port']
    if 'icon' in data: service['icon'] = data['icon']
    if 'category' in data: service['category'] = data['category']
    if 'healthCheck' in data: service['healthCheck'] = data['healthCheck']
    if 'description' in data: service['description'] = data['description']

    save_services_config(SERVICES)
    return jsonify({"status": "updated", "service": service_id})

@app.route('/api/services/<service_id>/start', methods=['POST'])
def start_service(service_id):
    """Start a service"""
    if service_id not in SERVICES:
        return jsonify({"error": "Service not found"}), 404

    service = SERVICES[service_id]

    if is_service_running(service["port"]):
        return jsonify({"error": "Service already running"}), 400

    import subprocess
    log_file = open(service["log"], "a")
    subprocess.Popen(
        service["command"],
        shell=True,
        cwd=service["cwd"],
        stdout=log_file,
        stderr=log_file,
        start_new_session=True
    )

    # Wait a bit and check if started
    import time
    time.sleep(2)

    if is_service_running(service["port"]):
        record_service_event(service_id, "start")
        return jsonify({"status": "started", "service": service_id})
    else:
        return jsonify({"error": "Failed to start service"}), 500

@app.route('/api/services/<service_id>/stop', methods=['POST'])
def stop_service(service_id):
    """Stop a service"""
    if service_id not in SERVICES:
        return jsonify({"error": "Service not found"}), 404

    service = SERVICES[service_id]
    pid = get_service_pid(service["port"])

    if not pid:
        return jsonify({"error": "Service not running"}), 400

    try:
        proc = psutil.Process(pid)
        proc.terminate()
        proc.wait(timeout=5)
        record_service_event(service_id, "stop", "manual_stop")
        return jsonify({"status": "stopped", "service": service_id})
    except psutil.TimeoutExpired:
        proc.kill()
        record_service_event(service_id, "stop", "force_kill")
        return jsonify({"status": "killed", "service": service_id})
    except Exception as e:
        return jsonify({"error": str(e)}), 500

@app.route('/api/services/<service_id>/restart', methods=['POST'])
def restart_service(service_id):
    """Restart a service"""
    # Stop first
    stop_response = stop_service(service_id)
    if stop_response[1] == 500:
        return stop_response

    import time
    time.sleep(1)

    # Then start
    return start_service(service_id)

MAX_LOG_SIZE_MB = 10  # Max log file size before rotation
MAX_LOG_BACKUPS = 5   # Number of backup files to keep

def rotate_log_if_needed(log_path):
    """Rotate log file if it exceeds MAX_LOG_SIZE_MB"""
    if not os.path.exists(log_path):
        return False

    size_mb = os.path.getsize(log_path) / (1024 * 1024)
    if size_mb < MAX_LOG_SIZE_MB:
        return False

    # Rotate existing backups
    for i in range(MAX_LOG_BACKUPS - 1, 0, -1):
        old_backup = f"{log_path}.{i}"
        new_backup = f"{log_path}.{i + 1}"
        if os.path.exists(old_backup):
            if i + 1 > MAX_LOG_BACKUPS:
                os.remove(old_backup)
            else:
                os.rename(old_backup, new_backup)

    # Move current to .1
    os.rename(log_path, f"{log_path}.1")

    # Create new empty log
    with open(log_path, 'w') as f:
        f.write(f"[{datetime.now().isoformat()}] Log rotated (previous file: {log_path}.1)\n")

    return True

def get_log_size(log_path):
    """Get log file size in human readable format"""
    if not os.path.exists(log_path):
        return "0 B"
    size = os.path.getsize(log_path)
    for unit in ['B', 'KB', 'MB', 'GB']:
        if size < 1024:
            return f"{size:.1f} {unit}"
        size /= 1024
    return f"{size:.1f} TB"

@app.route('/api/services/<service_id>/logs')
def get_service_logs(service_id):
    """Get service logs"""
    if service_id not in SERVICES:
        return jsonify({"error": "Service not found"}), 404

    service = SERVICES[service_id]
    log_path = service["log"]
    lines = int(request.args.get('lines', 100))

    # Check for rotation
    rotated = rotate_log_if_needed(log_path)

    if not os.path.exists(log_path):
        return jsonify({"logs": [], "path": log_path, "size": "0 B"})

    try:
        with open(log_path, 'r') as f:
            all_lines = f.readlines()
            log_lines = all_lines[-lines:]
            return jsonify({
                "logs": [line.rstrip() for line in log_lines],
                "path": log_path,
                "total_lines": len(all_lines),
                "size": get_log_size(log_path),
                "rotated": rotated
            })
    except Exception as e:
        return jsonify({"error": str(e)}), 500

@app.route('/api/services/<service_id>/logs/clear', methods=['POST'])
def clear_service_logs(service_id):
    """Clear service logs"""
    if service_id not in SERVICES:
        return jsonify({"error": "Service not found"}), 404

    service = SERVICES[service_id]
    log_path = service["log"]

    try:
        with open(log_path, 'w') as f:
            f.write(f"[{datetime.now().isoformat()}] Logs cleared\n")
        return jsonify({"status": "cleared"})
    except Exception as e:
        return jsonify({"error": str(e)}), 500

@app.route('/api/services/<service_id>/logs/download')
def download_service_logs(service_id):
    """Download service logs as file"""
    if service_id not in SERVICES:
        return jsonify({"error": "Service not found"}), 404

    service = SERVICES[service_id]
    log_path = service["log"]

    if not os.path.exists(log_path):
        return jsonify({"error": "Log file not found"}), 404

    try:
        from flask import send_file
        return send_file(
            log_path,
            mimetype='text/plain',
            as_attachment=True,
            download_name=f"{service_id}.log"
        )
    except Exception as e:
        return jsonify({"error": str(e)}), 500

# =============================================================================
# FAVORITES & ENHANCED PROJECTS
# =============================================================================

FAVORITES_FILE = os.path.join(HOME, "Services", "data", "favorites.json")

def load_favorites():
    """Load favorites from JSON file"""
    if os.path.exists(FAVORITES_FILE):
        try:
            with open(FAVORITES_FILE, 'r') as f:
                return json.load(f)
        except:
            pass
    return {"projects": [], "apps": [], "web": []}

def save_favorites(favorites):
    """Save favorites to JSON file"""
    os.makedirs(os.path.dirname(FAVORITES_FILE), exist_ok=True)
    with open(FAVORITES_FILE, 'w') as f:
        json.dump(favorites, f, indent=2)

@app.route('/api/favorites', methods=['GET'])
def get_favorites():
    """Get all favorites"""
    return jsonify(load_favorites())

@app.route('/api/favorites', methods=['POST'])
def set_favorites():
    """Set favorites"""
    data = request.json
    if not data:
        return jsonify({"error": "No data provided"}), 400
    save_favorites(data)
    return jsonify({"status": "ok", "saved": data})

@app.route('/api/favorites/<category>/<item_id>', methods=['POST'])
def add_favorite(category, item_id):
    """Add item to favorites"""
    favorites = load_favorites()
    if category not in favorites:
        favorites[category] = []
    if item_id not in favorites[category]:
        favorites[category].append(item_id)
        save_favorites(favorites)
    return jsonify({"status": "ok", "favorites": favorites})

@app.route('/api/favorites/<category>/<item_id>', methods=['DELETE'])
def remove_favorite(category, item_id):
    """Remove item from favorites"""
    favorites = load_favorites()
    if category in favorites and item_id in favorites[category]:
        favorites[category].remove(item_id)
        save_favorites(favorites)
    return jsonify({"status": "ok", "favorites": favorites})

def detect_tech_stack(path):
    """Detect project technology stack"""
    techs = []

    # Python
    if os.path.exists(os.path.join(path, "requirements.txt")) or \
       os.path.exists(os.path.join(path, "setup.py")) or \
       os.path.exists(os.path.join(path, "pyproject.toml")) or \
       glob.glob(os.path.join(path, "*.py")):
        techs.append({"id": "python", "name": "Python", "icon": "🐍"})

    # Node.js
    if os.path.exists(os.path.join(path, "package.json")):
        techs.append({"id": "nodejs", "name": "Node.js", "icon": "📦"})

    # Swift/iOS
    if glob.glob(os.path.join(path, "*.xcodeproj")) or \
       glob.glob(os.path.join(path, "*.xcworkspace")) or \
       glob.glob(os.path.join(path, "*.swift")):
        techs.append({"id": "swift", "name": "Swift", "icon": "🍎"})

    # TypeScript
    if os.path.exists(os.path.join(path, "tsconfig.json")):
        techs.append({"id": "typescript", "name": "TypeScript", "icon": "📘"})

    # React
    if os.path.exists(os.path.join(path, "package.json")):
        try:
            with open(os.path.join(path, "package.json")) as f:
                pkg = json.load(f)
                deps = {**pkg.get("dependencies", {}), **pkg.get("devDependencies", {})}
                if "react" in deps:
                    techs.append({"id": "react", "name": "React", "icon": "⚛️"})
                if "next" in deps:
                    techs.append({"id": "nextjs", "name": "Next.js", "icon": "▲"})
                if "vue" in deps:
                    techs.append({"id": "vue", "name": "Vue", "icon": "💚"})
        except:
            pass

    # Rust
    if os.path.exists(os.path.join(path, "Cargo.toml")):
        techs.append({"id": "rust", "name": "Rust", "icon": "🦀"})

    # Go
    if os.path.exists(os.path.join(path, "go.mod")):
        techs.append({"id": "go", "name": "Go", "icon": "🐹"})

    # Docker
    if os.path.exists(os.path.join(path, "Dockerfile")) or \
       os.path.exists(os.path.join(path, "docker-compose.yml")):
        techs.append({"id": "docker", "name": "Docker", "icon": "🐳"})

    return techs

def get_file_count(path, max_depth=3):
    """Count files in directory (with depth limit)"""
    count = 0
    try:
        for root, dirs, files in os.walk(path):
            depth = root[len(path):].count(os.sep)
            if depth >= max_depth:
                dirs[:] = []  # Don't descend further
                continue
            # Skip common large/generated directories
            dirs[:] = [d for d in dirs if d not in [
                'node_modules', '.git', 'DerivedData', '__pycache__',
                '.next', 'build', 'dist', 'venv', '.venv'
            ]]
            count += len(files)
    except:
        pass
    return count

def time_ago(timestamp):
    """Convert timestamp to human-readable relative time"""
    now = datetime.now()
    dt = datetime.fromtimestamp(timestamp)
    diff = now - dt

    seconds = diff.total_seconds()
    if seconds < 60:
        return "agora"
    elif seconds < 3600:
        mins = int(seconds / 60)
        return f"há {mins} min"
    elif seconds < 86400:
        hours = int(seconds / 3600)
        return f"há {hours}h"
    elif seconds < 604800:
        days = int(seconds / 86400)
        return f"há {days} dia{'s' if days > 1 else ''}"
    elif seconds < 2592000:
        weeks = int(seconds / 604800)
        return f"há {weeks} sem"
    else:
        months = int(seconds / 2592000)
        return f"há {months} mês" if months == 1 else f"há {months} meses"

@app.route('/api/projects/enhanced')
def get_enhanced_projects():
    """Get projects with enhanced metadata"""
    projects_dir = DIRS["projects"]
    favorites = load_favorites()
    fav_projects = favorites.get("projects", [])

    projects = []

    if os.path.exists(projects_dir):
        for item in os.listdir(projects_dir):
            path = os.path.join(projects_dir, item)
            if os.path.isdir(path):
                # Basic info
                has_claude = os.path.exists(os.path.join(path, "CLAUDE.md"))
                has_readme = os.path.exists(os.path.join(path, "README.md"))
                has_package = os.path.exists(os.path.join(path, "package.json"))
                has_git = os.path.exists(os.path.join(path, ".git"))

                # Doc score (0-3)
                doc_score = int(has_claude) + int(has_readme) + int(has_package)

                # Modification time
                mtime = os.path.getmtime(path)

                # Tech stack detection
                tech_stack = detect_tech_stack(path)

                # File count
                file_count = get_file_count(path)

                # Is favorite
                is_favorite = item in fav_projects

                projects.append({
                    "id": f"proj_{item}",
                    "name": item,
                    "path": path,
                    "type": "project",
                    "hasClaudeMd": has_claude,
                    "hasReadme": has_readme,
                    "hasPackageJson": has_package,
                    "hasGit": has_git,
                    "docScore": doc_score,
                    "techStack": tech_stack,
                    "fileCount": file_count,
                    "modified": datetime.fromtimestamp(mtime).isoformat(),
                    "modifiedTimestamp": mtime,
                    "modifiedAgo": time_ago(mtime),
                    "isFavorite": is_favorite
                })

    # Sort by modification time (newest first) by default
    projects.sort(key=lambda x: x['modifiedTimestamp'], reverse=True)

    return jsonify({
        "projects": projects,
        "total": len(projects),
        "favorites_count": sum(1 for p in projects if p['isFavorite']),
        "with_docs": sum(1 for p in projects if p['docScore'] > 0),
        "timestamp": datetime.now().isoformat()
    })

@app.route('/api/projects/<name>/docs')
def get_project_docs(name):
    """Get project documentation (CLAUDE.md or README.md)"""
    projects_dir = DIRS["projects"]
    project_path = os.path.join(projects_dir, name)

    if not os.path.exists(project_path):
        return jsonify({"error": "Project not found"}), 404

    docs = {}

    # Try CLAUDE.md first
    claude_path = os.path.join(project_path, "CLAUDE.md")
    if os.path.exists(claude_path):
        try:
            with open(claude_path, 'r', encoding='utf-8') as f:
                content = f.read()
                docs["claude"] = {
                    "content": content[:20000],  # Limit to 20KB
                    "truncated": len(content) > 20000,
                    "size": len(content)
                }
        except Exception as e:
            docs["claude"] = {"error": str(e)}

    # Try README.md
    readme_path = os.path.join(project_path, "README.md")
    if os.path.exists(readme_path):
        try:
            with open(readme_path, 'r', encoding='utf-8') as f:
                content = f.read()
                docs["readme"] = {
                    "content": content[:20000],
                    "truncated": len(content) > 20000,
                    "size": len(content)
                }
        except Exception as e:
            docs["readme"] = {"error": str(e)}

    # Get directory structure (first level)
    try:
        structure = []
        for item in sorted(os.listdir(project_path))[:30]:
            item_path = os.path.join(project_path, item)
            if item.startswith('.'):
                continue
            is_dir = os.path.isdir(item_path)
            structure.append({
                "name": item,
                "type": "dir" if is_dir else "file",
                "icon": "📁" if is_dir else "📄"
            })
        docs["structure"] = structure
    except:
        docs["structure"] = []

    if not docs.get("claude") and not docs.get("readme"):
        return jsonify({
            "project": name,
            "structure": docs.get("structure", []),
            "message": "No documentation found"
        })

    return jsonify({
        "project": name,
        "path": project_path,
        **docs
    })

if __name__ == '__main__':
    print("🚀 Workspace API starting on port 9003...")
    print(f"   Home: {HOME}")
    print(f"   Endpoints:")
    print(f"     GET /api/stats     - Workspace statistics")
    print(f"     GET /api/items     - All items")
    print(f"     GET /api/search?q= - Search items")
    print(f"     GET /api/web/<cat> - Web category files")
    print(f"     GET /api/health    - Health check")
    print(f"     GET /api/favorites - Favorites management")
    print(f"     GET /api/projects/enhanced - Enhanced projects")
    print(f"     GET /api/projects/<name>/docs - Project docs preview")
    app.run(host='0.0.0.0', port=9003, debug=True)
