# Homelab Dashboard Implementation Plan > **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. **Goal:** Build a unified homelab command center with real-time data from Portainer, Jellyfin, Ollama, Prometheus, and automation scripts — deployed as two Docker containers. **Architecture:** FastAPI backend (Python 3.12) reuses existing `scripts/lib/` modules to aggregate data from 15+ services. Next.js 15 frontend (shadcn/ui, Tailwind, dark theme) polls the API and uses SSE for real-time activity feed. Docker Compose runs both. **Tech Stack:** Python 3.12, FastAPI, uvicorn, httpx | Next.js 15, React 19, shadcn/ui, Tailwind CSS, SWR, TypeScript --- ## File Structure ``` dashboard/ docker-compose.yml # Runs both containers api/ Dockerfile # Python 3.12 slim requirements.txt # fastapi, uvicorn, httpx main.py # FastAPI app, CORS, router includes routers/ overview.py # GET /api/stats/overview, /api/activity (SSE) containers.py # GET /api/containers, /api/containers/{id}/logs, POST restart media.py # GET /api/jellyfin/*, /api/sonarr/*, /api/radarr/*, /api/sabnzbd/* automations.py # GET /api/automations/* expenses.py # GET /api/expenses, /api/expenses/summary, /api/subscriptions olares.py # GET /api/olares/pods, /api/olares/gpu lib_bridge.py # Adds scripts/lib/ to sys.path, re-exports modules log_parser.py # Parses automation log files into structured events ui/ Dockerfile # Node 22 alpine, next build + start package.json next.config.ts tailwind.config.ts tsconfig.json app/ layout.tsx # Root layout: dark bg, top nav, fonts page.tsx # Dashboard overview tab infrastructure/page.tsx # Container + pod tables media/page.tsx # Jellyfin, Sonarr, Radarr, SABnzbd automations/page.tsx # Email stats, restart history, backup, drift, disk expenses/page.tsx # Expense table, monthly summary, subscriptions globals.css # Tailwind directives + dark theme vars components/ nav.tsx # Top navigation bar with active tab stat-card.tsx # Metric card (value, label, indicator) activity-feed.tsx # SSE-powered real-time feed host-card.tsx # Host status badge data-table.tsx # Generic sortable/filterable table jellyfin-card.tsx # Now playing + library counts ollama-card.tsx # Model status + VRAM status-badge.tsx # Green/red/amber dot with label container-logs-modal.tsx # Modal overlay for container logs lib/ api.ts # Typed fetch wrapper for backend use-poll.ts # SWR-based polling hook with configurable interval use-sse.ts # EventSource hook for activity stream types.ts # Shared TypeScript interfaces ``` --- ## Task 1: Project Scaffolding + Docker Compose **Files:** - Create: `dashboard/docker-compose.yml` - Create: `dashboard/api/Dockerfile` - Create: `dashboard/api/requirements.txt` - Create: `dashboard/api/main.py` - Create: `dashboard/ui/Dockerfile` - [ ] **Step 1: Create directory structure** ```bash mkdir -p dashboard/api/routers dashboard/ui ``` - [ ] **Step 2: Create API requirements.txt** ``` # dashboard/api/requirements.txt fastapi==0.115.12 uvicorn[standard]==0.34.2 httpx==0.28.1 pyyaml>=6.0 sse-starlette==2.3.3 ``` - [ ] **Step 3: Create FastAPI main.py** ```python # dashboard/api/main.py """Homelab Dashboard API — aggregates data from homelab services.""" from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from routers import overview, containers, media, automations, expenses, olares app = FastAPI(title="Homelab Dashboard API", version="1.0.0") app.add_middleware( CORSMiddleware, allow_origins=["*"], allow_methods=["*"], allow_headers=["*"], ) app.include_router(overview.router, prefix="/api") app.include_router(containers.router, prefix="/api") app.include_router(media.router, prefix="/api") app.include_router(automations.router, prefix="/api") app.include_router(expenses.router, prefix="/api") app.include_router(olares.router, prefix="/api") @app.get("/api/health") def health(): return {"status": "ok"} ``` - [ ] **Step 4: Create lib_bridge.py** ```python # dashboard/api/lib_bridge.py """Bridge to import scripts/lib/ modules from the mounted volume.""" import sys from pathlib import Path # scripts/ is mounted at /app/scripts in Docker, or relative in dev SCRIPTS_DIR = Path("/app/scripts") if not SCRIPTS_DIR.exists(): SCRIPTS_DIR = Path(__file__).parent.parent.parent / "scripts" sys.path.insert(0, str(SCRIPTS_DIR)) from lib.portainer import ( list_containers as portainer_list_containers, get_container_logs as portainer_get_container_logs, restart_container as portainer_restart_container, inspect_container as portainer_inspect_container, ENDPOINTS, ) from lib.prometheus import prom_query, prom_query_range from lib.ollama import ollama_available, ollama_generate, OLLAMA_URL, OLLAMA_MODEL # DB paths GMAIL_DB = SCRIPTS_DIR / "gmail-organizer" / "processed.db" DVISH_DB = SCRIPTS_DIR / "gmail-organizer-dvish" / "processed.db" PROTON_DB = SCRIPTS_DIR / "proton-organizer" / "processed.db" RESTART_DB = SCRIPTS_DIR / "stack-restart.db" # Data paths DATA_DIR = Path("/app/data") if not DATA_DIR.exists(): DATA_DIR = Path(__file__).parent.parent.parent / "data" EXPENSES_CSV = DATA_DIR / "expenses.csv" # Log paths LOG_DIR = Path("/app/logs") if not LOG_DIR.exists(): LOG_DIR = Path("/tmp") ``` - [ ] **Step 5: Create stub routers** Create each router file with a minimal stub: ```python # dashboard/api/routers/overview.py from fastapi import APIRouter router = APIRouter(tags=["overview"]) # dashboard/api/routers/containers.py from fastapi import APIRouter router = APIRouter(tags=["containers"]) # dashboard/api/routers/media.py from fastapi import APIRouter router = APIRouter(tags=["media"]) # dashboard/api/routers/automations.py from fastapi import APIRouter router = APIRouter(tags=["automations"]) # dashboard/api/routers/expenses.py from fastapi import APIRouter router = APIRouter(tags=["expenses"]) # dashboard/api/routers/olares.py from fastapi import APIRouter router = APIRouter(tags=["olares"]) ``` - [ ] **Step 6: Create API Dockerfile** ```dockerfile # dashboard/api/Dockerfile FROM python:3.12-slim RUN apt-get update && apt-get install -y --no-install-recommends \ openssh-client curl && rm -rf /var/lib/apt/lists/* WORKDIR /app/api COPY requirements.txt . RUN pip install --no-cache-dir -r requirements.txt COPY . . EXPOSE 8888 CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8888"] ``` - [ ] **Step 7: Create docker-compose.yml** ```yaml # dashboard/docker-compose.yml services: dashboard-api: build: ./api ports: - "8888:8888" volumes: - ../scripts:/app/scripts:ro - ../data:/app/data:ro - /tmp:/app/logs:ro - ~/.ssh:/root/.ssh:ro network_mode: host restart: unless-stopped dashboard-ui: build: ./ui ports: - "3000:3000" environment: - NEXT_PUBLIC_API_URL=http://localhost:8888 network_mode: host depends_on: - dashboard-api restart: unless-stopped ``` Note: `network_mode: host` so the API can SSH to olares and reach Portainer on Tailscale IP. - [ ] **Step 8: Test API starts** ```bash cd dashboard/api pip install -r requirements.txt uvicorn main:app --host 0.0.0.0 --port 8888 & curl -s http://localhost:8888/api/health # Expected: {"status":"ok"} kill %1 ``` - [ ] **Step 9: Commit** ```bash git add dashboard/ git commit -m "feat(dashboard): project scaffolding with FastAPI + Docker Compose" ``` --- ## Task 2: Overview API Routes **Files:** - Create: `dashboard/api/log_parser.py` - Modify: `dashboard/api/routers/overview.py` - [ ] **Step 1: Create log_parser.py** ```python # dashboard/api/log_parser.py """Parse automation log files into structured events.""" import os import re from datetime import datetime, date from pathlib import Path LOG_FILES = { "stack-restart": "stack-restart.log", "backup": "backup-validator.log", "email-lz": "gmail-organizer.log", "email-dvish": "gmail-organizer-dvish.log", "email-proton": "proton-organizer.log", "receipt": "receipt-tracker.log", "drift": "config-drift.log", "digest": "email-digest.log", } EVENT_PATTERNS = [ (r"→ (\w+) \((\S+)\)", "email_classified", lambda m: {"category": m.group(1), "label": m.group(2)}), (r"Cached: .+ → (\w+)", "email_cached", lambda m: {"category": m.group(1)}), (r"Stack-restart check complete", "stack_healthy", lambda m: {}), (r"Container (\S+) on (\S+) (restarted)", "container_restarted", lambda m: {"container": m.group(1), "endpoint": m.group(2)}), (r"Unhealthy.*: (\S+) on (\S+)", "container_unhealthy", lambda m: {"container": m.group(1), "endpoint": m.group(2)}), (r"Backup Validation: (OK|ISSUES FOUND)", "backup_result", lambda m: {"status": m.group(1)}), (r"\[DRY-RUN\] Would write: (.+)", "receipt_extracted", lambda m: {"data": m.group(1)}), (r"Would write:.*'vendor': '([^']+)'.*'amount': '([^']+)'", "receipt_extracted", lambda m: {"vendor": m.group(1), "amount": m.group(2)}), (r"No drifts found", "drift_clean", lambda m: {}), (r"Detected (\d+) drifts", "drift_found", lambda m: {"count": m.group(1)}), ] def parse_log_line(line: str) -> dict | None: """Parse a single log line into a structured event.""" # Extract timestamp: "2026-04-03 15:30:01,283 INFO ..." ts_match = re.match(r"(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})", line) if not ts_match: return None timestamp = ts_match.group(1) for pattern, event_type, extractor in EVENT_PATTERNS: m = re.search(pattern, line) if m: event = {"type": event_type, "timestamp": timestamp, "raw": line.strip()} event.update(extractor(m)) return event return None def get_recent_events(log_dir: Path, max_events: int = 50) -> list[dict]: """Get recent events from all log files, sorted by time.""" events = [] today = date.today().isoformat() for source, filename in LOG_FILES.items(): log_path = log_dir / filename if not log_path.exists(): continue try: with open(log_path) as f: for line in f: if not line.startswith(today): continue event = parse_log_line(line) if event: event["source"] = source events.append(event) except Exception: continue events.sort(key=lambda e: e.get("timestamp", ""), reverse=True) return events[:max_events] def tail_logs(log_dir: Path) -> dict[str, int]: """Get file positions for all logs (used by SSE to detect new lines).""" positions = {} for source, filename in LOG_FILES.items(): log_path = log_dir / filename if log_path.exists(): positions[source] = log_path.stat().st_size return positions ``` - [ ] **Step 2: Implement overview router** ```python # dashboard/api/routers/overview.py """Overview endpoints: stats, activity feed (SSE).""" import asyncio import json import sqlite3 import subprocess from datetime import date, datetime from pathlib import Path from fastapi import APIRouter from sse_starlette.sse import EventSourceResponse from lib_bridge import ( portainer_list_containers, ENDPOINTS, ollama_available, GMAIL_DB, DVISH_DB, PROTON_DB, RESTART_DB, LOG_DIR, OLLAMA_URL, ) from log_parser import get_recent_events, tail_logs, parse_log_line, LOG_FILES router = APIRouter(tags=["overview"]) def _count_today_emails(db_path: Path) -> int: """Count emails classified today in a processed.db.""" if not db_path.exists(): return 0 today = date.today().isoformat() conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True) row = conn.execute( "SELECT COUNT(*) FROM processed WHERE processed_at LIKE ?", (f"{today}%",) ).fetchone() conn.close() return row[0] if row else 0 def _count_unhealthy() -> int: """Count currently tracked unhealthy containers.""" if not RESTART_DB.exists(): return 0 conn = sqlite3.connect(f"file:{RESTART_DB}?mode=ro", uri=True) row = conn.execute("SELECT COUNT(*) FROM unhealthy_tracking").fetchone() conn.close() return row[0] if row else 0 def _get_gpu_info() -> dict: """Get GPU info via SSH to olares.""" try: result = subprocess.run( ["ssh", "-o", "ConnectTimeout=3", "olares", "nvidia-smi --query-gpu=temperature.gpu,power.draw,power.limit," "memory.used,memory.total,utilization.gpu --format=csv,noheader,nounits"], capture_output=True, text=True, timeout=8, ) if result.returncode != 0: return {"available": False} parts = [p.strip() for p in result.stdout.strip().split(",")] return { "available": True, "temp_c": int(parts[0]), "power_w": float(parts[1]), "power_limit_w": float(parts[2]), "vram_used_mb": int(parts[3]), "vram_total_mb": int(parts[4]), "utilization_pct": int(parts[5]), } except Exception: return {"available": False} def _get_container_summary() -> dict: """Get container counts across all endpoints.""" total = 0 healthy = 0 per_endpoint = {} for name in ENDPOINTS: try: containers = portainer_list_containers(name) running = [c for c in containers if c.get("State") == "running"] per_endpoint[name] = {"total": len(containers), "running": len(running)} total += len(containers) healthy += len(running) except Exception: per_endpoint[name] = {"total": 0, "running": 0, "error": True} return {"total": total, "running": healthy, "endpoints": per_endpoint} @router.get("/stats/overview") def get_overview(): """Aggregated overview stats for the dashboard.""" containers = _get_container_summary() gpu = _get_gpu_info() emails_today = ( _count_today_emails(GMAIL_DB) + _count_today_emails(DVISH_DB) + _count_today_emails(PROTON_DB) ) alerts = _count_unhealthy() ollama_up = ollama_available() return { "containers": containers, "gpu": gpu, "emails_today": emails_today, "alerts": alerts, "ollama": {"available": ollama_up, "url": OLLAMA_URL}, "hosts_online": sum( 1 for ep in containers["endpoints"].values() if not ep.get("error") ), } @router.get("/activity") async def activity_stream(): """SSE stream of recent automation events.""" async def event_generator(): positions = tail_logs(LOG_DIR) # Send initial batch events = get_recent_events(LOG_DIR, max_events=20) for event in reversed(events): yield {"event": "activity", "data": json.dumps(event)} # Poll for new lines while True: await asyncio.sleep(5) for source, filename in LOG_FILES.items(): log_path = LOG_DIR / filename if not log_path.exists(): continue current_size = log_path.stat().st_size prev_size = positions.get(source, 0) if current_size > prev_size: with open(log_path) as f: f.seek(prev_size) for line in f: event = parse_log_line(line) if event: event["source"] = source yield {"event": "activity", "data": json.dumps(event)} positions[source] = current_size return EventSourceResponse(event_generator()) ``` - [ ] **Step 3: Test overview endpoint** ```bash cd dashboard/api uvicorn main:app --port 8888 & sleep 2 curl -s http://localhost:8888/api/stats/overview | python3 -m json.tool | head -20 # Expected: JSON with containers, gpu, emails_today, alerts, ollama fields kill %1 ``` - [ ] **Step 4: Commit** ```bash git add dashboard/api/ git commit -m "feat(dashboard): overview + activity SSE API routes" ``` --- ## Task 3: Container, Media, and Olares API Routes **Files:** - Modify: `dashboard/api/routers/containers.py` - Modify: `dashboard/api/routers/media.py` - Modify: `dashboard/api/routers/olares.py` - [ ] **Step 1: Implement containers router** ```python # dashboard/api/routers/containers.py """Container management endpoints.""" from fastapi import APIRouter, Query from lib_bridge import ( portainer_list_containers, portainer_get_container_logs, portainer_restart_container, ENDPOINTS, ) router = APIRouter(tags=["containers"]) @router.get("/containers") def list_containers(endpoint: str | None = None): """List containers across endpoints.""" results = [] endpoints = [endpoint] if endpoint else list(ENDPOINTS.keys()) for ep in endpoints: try: containers = portainer_list_containers(ep) for c in containers: results.append({ "id": c.get("Id", "")[:12], "name": (c.get("Names") or ["/unknown"])[0].lstrip("/"), "image": c.get("Image", ""), "state": c.get("State", ""), "status": c.get("Status", ""), "endpoint": ep, }) except Exception as e: results.append({"endpoint": ep, "error": str(e)}) return results @router.get("/containers/{container_id}/logs") def container_logs(container_id: str, endpoint: str = Query(...), tail: int = 100): """Get container logs.""" logs = portainer_get_container_logs(endpoint, container_id, tail=tail) return {"container_id": container_id, "endpoint": endpoint, "logs": logs} @router.post("/containers/{container_id}/restart") def restart_container(container_id: str, endpoint: str = Query(...)): """Restart a container.""" success = portainer_restart_container(endpoint, container_id) return {"success": success, "container_id": container_id, "endpoint": endpoint} ``` - [ ] **Step 2: Implement media router** ```python # dashboard/api/routers/media.py """Media endpoints: Jellyfin, Sonarr, Radarr, SABnzbd.""" import subprocess import json from fastapi import APIRouter router = APIRouter(tags=["media"]) JELLYFIN_TOKEN = "REDACTED_TOKEN" def _jellyfin(path: str) -> dict | list: sep = "&" if "?" in path else "?" url = f"http://localhost:8096{path}{sep}api_key={JELLYFIN_TOKEN}" result = subprocess.run( ["ssh", "-o", "ConnectTimeout=3", "olares", f"kubectl exec -n jellyfin-vishinator deploy/jellyfin -c jellyfin -- curl -s '{url}'"], capture_output=True, text=True, timeout=15, ) if result.returncode != 0: return {} return json.loads(result.stdout) @router.get("/jellyfin/status") def jellyfin_status(): info = _jellyfin("/System/Info/Public") libraries = _jellyfin("/Library/VirtualFolders") sessions = _jellyfin("/Sessions") active = [s for s in (sessions or []) if s.get("NowPlayingItem")] return { "version": info.get("Version", "?"), "server_name": info.get("ServerName", "?"), "libraries": [ {"name": l["Name"], "type": l.get("CollectionType", "?"), "paths": l.get("Locations", [])} for l in (libraries or []) ], "active_sessions": [ { "user": s.get("UserName", "?"), "device": s.get("DeviceName", "?"), "client": s.get("Client", "?"), "title": s["NowPlayingItem"].get("Name", "?"), "type": s["NowPlayingItem"].get("Type", "?"), } for s in active ], "idle_sessions": len([s for s in (sessions or []) if not s.get("NowPlayingItem")]), } @router.get("/sonarr/queue") def sonarr_queue(): import sys sys.path.insert(0, "/app/scripts") from lib.portainer import portainer_api import httpx try: with httpx.Client(timeout=10) as client: r = client.get( "http://192.168.0.200:8989/api/v3/queue", headers={"X-Api-Key": "REDACTED_SONARR_API_KEY"}, params={"pageSize": 20}, ) r.raise_for_status() data = r.json() return { "total": data.get("totalRecords", 0), "items": [ {"title": i.get("title", "?"), "status": i.get("status", "?"), "size": i.get("size", 0), "sizeleft": i.get("sizeleft", 0)} for i in data.get("records", [])[:10] ], } except Exception as e: return {"total": 0, "items": [], "error": str(e)} @router.get("/radarr/queue") def radarr_queue(): import httpx try: with httpx.Client(timeout=10) as client: r = client.get( "http://192.168.0.200:7878/api/v3/queue", headers={"X-Api-Key": "REDACTED_RADARR_API_KEY"}, params={"pageSize": 20}, ) r.raise_for_status() data = r.json() return { "total": data.get("totalRecords", 0), "items": [ {"title": i.get("title", "?"), "status": i.get("status", "?"), "size": i.get("size", 0), "sizeleft": i.get("sizeleft", 0)} for i in data.get("records", [])[:10] ], } except Exception as e: return {"total": 0, "items": [], "error": str(e)} @router.get("/sabnzbd/queue") def sabnzbd_queue(): import httpx try: with httpx.Client(timeout=10) as client: r = client.get( "http://192.168.0.200:8080/api", params={"apikey": "6ae289de5a4f45f7a0124b43ba9c3dea", "output": "json", "mode": "queue"}, ) r.raise_for_status() data = r.json().get("queue", {}) return { "speed": data.get("speed", "0"), "size_left": data.get("sizeleft", "0"), "eta": data.get("eta", "unknown"), "items": [ {"name": s.get("filename", "?"), "size": s.get("size", "?"), "percentage": s.get("percentage", "0")} for s in data.get("slots", [])[:10] ], } except Exception as e: return {"speed": "0", "items": [], "error": str(e)} ``` - [ ] **Step 3: Implement olares router** ```python # dashboard/api/routers/olares.py """Olares K3s cluster endpoints.""" import subprocess from fastapi import APIRouter, Query router = APIRouter(tags=["olares"]) def _kubectl(cmd: str, timeout: int = 10) -> str: result = subprocess.run( ["ssh", "-o", "ConnectTimeout=3", "olares", f"kubectl {cmd}"], capture_output=True, text=True, timeout=timeout, ) if result.returncode != 0: return f"Error: {result.stderr.strip()}" return result.stdout.strip() @router.get("/olares/pods") def olares_pods(namespace: str | None = None): ns_flag = f"-n {namespace}" if namespace else "-A" output = _kubectl(f"get pods {ns_flag} -o wide --no-headers") pods = [] for line in output.splitlines(): parts = line.split() if len(parts) >= 7: pods.append({ "namespace": parts[0] if not namespace else namespace, "name": parts[1] if not namespace else parts[0], "ready": parts[2] if not namespace else parts[1], "status": parts[3] if not namespace else parts[2], "restarts": parts[4] if not namespace else parts[3], "age": parts[5] if not namespace else parts[4], }) return pods @router.get("/olares/gpu") def olares_gpu(): try: result = subprocess.run( ["ssh", "-o", "ConnectTimeout=3", "olares", "nvidia-smi --query-gpu=name,temperature.gpu,power.draw,power.limit," "memory.used,memory.total,utilization.gpu --format=csv,noheader,nounits"], capture_output=True, text=True, timeout=8, ) if result.returncode != 0: return {"available": False, "error": result.stderr.strip()} parts = [p.strip() for p in result.stdout.strip().split(",")] return { "available": True, "name": parts[0], "temp_c": int(parts[1]), "power_w": float(parts[2]), "power_limit_w": float(parts[3]), "vram_used_mb": int(parts[4]), "vram_total_mb": int(parts[5]), "utilization_pct": int(parts[6]), } except Exception as e: return {"available": False, "error": str(e)} ``` - [ ] **Step 4: Test all routes** ```bash cd dashboard/api uvicorn main:app --port 8888 & sleep 2 echo "=== Containers ===" && curl -s http://localhost:8888/api/containers | python3 -c "import sys,json; d=json.load(sys.stdin); print(f'{len(d)} containers')" echo "=== Jellyfin ===" && curl -s http://localhost:8888/api/jellyfin/status | python3 -m json.tool | head -10 echo "=== GPU ===" && curl -s http://localhost:8888/api/olares/gpu | python3 -m json.tool echo "=== Sonarr ===" && curl -s http://localhost:8888/api/sonarr/queue | python3 -m json.tool | head -5 kill %1 ``` - [ ] **Step 5: Commit** ```bash git add dashboard/api/ git commit -m "feat(dashboard): container, media, and olares API routes" ``` --- ## Task 4: Automations and Expenses API Routes **Files:** - Modify: `dashboard/api/routers/automations.py` - Modify: `dashboard/api/routers/expenses.py` - [ ] **Step 1: Implement automations router** ```python # dashboard/api/routers/automations.py """Automation status endpoints.""" import csv import sqlite3 from collections import Counter from datetime import date, datetime, timedelta from pathlib import Path from fastapi import APIRouter from lib_bridge import GMAIL_DB, DVISH_DB, PROTON_DB, RESTART_DB, LOG_DIR router = APIRouter(tags=["automations"]) def _email_stats(db_path: Path, account_name: str) -> dict: """Get today's email classification stats from a processed.db.""" if not db_path.exists(): return {"account": account_name, "today": 0, "categories": {}} today = date.today().isoformat() conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True) rows = conn.execute( "SELECT category FROM processed WHERE processed_at LIKE ?", (f"{today}%",) ).fetchall() conn.close() categories = Counter(r[0] for r in rows) return {"account": account_name, "today": len(rows), "categories": dict(categories)} def _sender_cache_count(db_path: Path) -> int: if not db_path.exists(): return 0 try: conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True) row = conn.execute("SELECT COUNT(*) FROM sender_cache").fetchone() conn.close() return row[0] if row else 0 except Exception: return 0 @router.get("/automations/email") def email_stats(): return { "accounts": [ _email_stats(GMAIL_DB, "lzbellina92@gmail.com"), _email_stats(DVISH_DB, "your-email@example.com"), _email_stats(PROTON_DB, "admin@thevish.io"), ], "sender_cache": { "lzbellina92": _sender_cache_count(GMAIL_DB), "dvish92": _sender_cache_count(DVISH_DB), }, } @router.get("/automations/restarts") def restart_history(): if not RESTART_DB.exists(): return {"entries": []} conn = sqlite3.connect(f"file:{RESTART_DB}?mode=ro", uri=True) rows = conn.execute( "SELECT container_id, endpoint, first_seen, last_checked, restart_count, last_restart " "FROM unhealthy_tracking ORDER BY last_checked DESC LIMIT 50" ).fetchall() conn.close() return { "entries": [ {"container_id": r[0], "endpoint": r[1], "first_seen": r[2], "last_checked": r[3], "restart_count": r[4], "last_restart": r[5]} for r in rows ] } @router.get("/automations/backup") def backup_status(): log_path = LOG_DIR / "gmail-backup-daily.log" if not log_path.exists(): return {"status": "unknown", "message": "No backup log found"} try: with open(log_path) as f: content = f.read() today = date.today().isoformat() today_lines = [l for l in content.splitlines() if today in l] has_error = any("ERROR" in l for l in today_lines) return { "status": "error" if has_error else "ok", "today_entries": len(today_lines), "last_line": today_lines[-1] if today_lines else "No entries today", } except Exception as e: return {"status": "error", "message": str(e)} @router.get("/automations/drift") def drift_status(): log_path = LOG_DIR / "config-drift.log" if not log_path.exists(): return {"status": "unknown", "last_run": None, "drifts": 0} try: with open(log_path) as f: lines = f.readlines() last_lines = lines[-20:] if len(lines) > 20 else lines drift_count = 0 last_run = None for line in reversed(last_lines): if "Detected" in line and "drifts" in line: import re m = re.search(r"Detected (\d+) drifts", line) if m: drift_count = int(m.group(1)) if "No drifts found" in line: drift_count = 0 ts = line[:19] if len(line) > 19 else None if ts and not last_run: last_run = ts return {"status": "clean" if drift_count == 0 else "drifted", "drifts": drift_count, "last_run": last_run} except Exception as e: return {"status": "error", "message": str(e)} ``` - [ ] **Step 2: Implement expenses router** ```python # dashboard/api/routers/expenses.py """Expense tracking endpoints.""" import csv from collections import Counter from datetime import date from pathlib import Path from fastapi import APIRouter, Query from lib_bridge import EXPENSES_CSV router = APIRouter(tags=["expenses"]) def _read_expenses() -> list[dict]: """Read expenses.csv into a list of dicts.""" if not EXPENSES_CSV.exists(): return [] with open(EXPENSES_CSV, newline="") as f: reader = csv.DictReader(f) return list(reader) @router.get("/expenses") def get_expenses(month: str | None = None): """Get expense data, optionally filtered by month (YYYY-MM).""" expenses = _read_expenses() if month: expenses = [e for e in expenses if e.get("date", "").startswith(month)] return { "count": len(expenses), "expenses": expenses, } @router.get("/expenses/summary") def expense_summary(month: str | None = None): """Monthly expense summary.""" if not month: month = date.today().strftime("%Y-%m") expenses = [e for e in _read_expenses() if e.get("date", "").startswith(month)] total = sum(float(e.get("amount", 0) or 0) for e in expenses) by_vendor = Counter() for e in expenses: vendor = e.get("vendor", "Unknown") by_vendor[vendor] += float(e.get("amount", 0) or 0) top_vendors = sorted(by_vendor.items(), key=lambda x: -x[1])[:10] return { "month": month, "total": round(total, 2), "count": len(expenses), "top_vendors": [{"vendor": v, "amount": round(a, 2)} for v, a in top_vendors], } ``` - [ ] **Step 3: Test** ```bash cd dashboard/api uvicorn main:app --port 8888 & sleep 2 curl -s http://localhost:8888/api/automations/email | python3 -m json.tool | head -15 curl -s http://localhost:8888/api/automations/backup | python3 -m json.tool curl -s http://localhost:8888/api/expenses/summary | python3 -m json.tool kill %1 ``` - [ ] **Step 4: Commit** ```bash git add dashboard/api/ git commit -m "feat(dashboard): automations and expenses API routes" ``` --- ## Task 5: Next.js Frontend Scaffolding **Files:** - Create: `dashboard/ui/package.json` - Create: `dashboard/ui/next.config.ts` - Create: `dashboard/ui/tailwind.config.ts` - Create: `dashboard/ui/tsconfig.json` - Create: `dashboard/ui/app/globals.css` - Create: `dashboard/ui/app/layout.tsx` - Create: `dashboard/ui/lib/api.ts` - Create: `dashboard/ui/lib/types.ts` - Create: `dashboard/ui/lib/use-poll.ts` - Create: `dashboard/ui/lib/use-sse.ts` - Create: `dashboard/ui/components/nav.tsx` - Create: `dashboard/ui/Dockerfile` - [ ] **Step 1: Initialize Next.js project** ```bash cd dashboard/ui npx create-next-app@latest . --typescript --tailwind --eslint --app --src-dir=false --import-alias="@/*" --no-git --use-npm ``` - [ ] **Step 2: Install dependencies** ```bash cd dashboard/ui npm install swr clsx npx shadcn@latest init -d npx shadcn@latest add card badge table tabs separator scroll-area dialog button ``` - [ ] **Step 3: Create globals.css with dark theme** ```css /* dashboard/ui/app/globals.css */ @tailwind base; @tailwind components; @tailwind utilities; @layer base { :root { --background: 222 47% 5%; --foreground: 210 40% 96%; --card: 217 33% 6%; --card-foreground: 210 40% 96%; --popover: 222 47% 5%; --popover-foreground: 210 40% 96%; --primary: 217 91% 60%; --primary-foreground: 222 47% 5%; --secondary: 217 33% 12%; --secondary-foreground: 210 40% 96%; --muted: 217 33% 12%; --muted-foreground: 215 20% 55%; --accent: 217 33% 12%; --accent-foreground: 210 40% 96%; --destructive: 0 84% 60%; --destructive-foreground: 210 40% 96%; --border: 217 33% 17%; --input: 217 33% 17%; --ring: 217 91% 60%; --radius: 0.625rem; } * { border-color: hsl(var(--border)); } body { background-color: hsl(var(--background)); color: hsl(var(--foreground)); } } ``` - [ ] **Step 4: Create lib/types.ts** ```typescript // dashboard/ui/lib/types.ts export interface OverviewStats { containers: { total: number; running: number; endpoints: Record; }; gpu: { available: boolean; temp_c?: number; power_w?: number; power_limit_w?: number; vram_used_mb?: number; vram_total_mb?: number; utilization_pct?: number; }; emails_today: number; alerts: number; ollama: { available: boolean; url: string }; hosts_online: number; } export interface ActivityEvent { type: string; timestamp: string; source: string; raw: string; [key: string]: unknown; } export interface Container { id: string; name: string; image: string; state: string; status: string; endpoint: string; error?: string; } export interface JellyfinStatus { version: string; server_name: string; libraries: { name: string; type: string; paths: string[] }[]; active_sessions: { user: string; device: string; client: string; title: string; type: string }[]; idle_sessions: number; } export interface EmailStats { accounts: { account: string; today: number; categories: Record }[]; sender_cache: Record; } export interface ExpenseSummary { month: string; total: number; count: number; top_vendors: { vendor: string; amount: number }[]; } ``` - [ ] **Step 5: Create lib/api.ts** ```typescript // dashboard/ui/lib/api.ts const API = process.env.NEXT_PUBLIC_API_URL || "http://localhost:8888"; export async function fetchAPI(path: string): Promise { const res = await fetch(`${API}${path}`); if (!res.ok) throw new Error(`API error: ${res.status}`); return res.json(); } export async function postAPI(path: string): Promise { const res = await fetch(`${API}${path}`, { method: "POST" }); if (!res.ok) throw new Error(`API error: ${res.status}`); return res.json(); } ``` - [ ] **Step 6: Create lib/use-poll.ts** ```typescript // dashboard/ui/lib/use-poll.ts import useSWR from "swr"; import { fetchAPI } from "./api"; export function usePoll(path: string, interval: number = 60000) { return useSWR(path, () => fetchAPI(path), { refreshInterval: interval, revalidateOnFocus: false, }); } ``` - [ ] **Step 7: Create lib/use-sse.ts** ```typescript // dashboard/ui/lib/use-sse.ts "use client"; import { useEffect, useRef, useState } from "react"; import { ActivityEvent } from "./types"; const API = process.env.NEXT_PUBLIC_API_URL || "http://localhost:8888"; export function useSSE(path: string, maxEvents: number = 30) { const [events, setEvents] = useState([]); const esRef = useRef(null); useEffect(() => { const es = new EventSource(`${API}${path}`); esRef.current = es; es.addEventListener("activity", (e) => { const event: ActivityEvent = JSON.parse(e.data); setEvents((prev) => [event, ...prev].slice(0, maxEvents)); }); es.onerror = () => { es.close(); // Reconnect after 5s setTimeout(() => { esRef.current = new EventSource(`${API}${path}`); }, 5000); }; return () => es.close(); }, [path, maxEvents]); return events; } ``` - [ ] **Step 8: Create components/nav.tsx** ```tsx // dashboard/ui/components/nav.tsx "use client"; import Link from "next/link"; import { usePathname } from "next/navigation"; import { clsx } from "clsx"; const tabs = [ { href: "/", label: "Dashboard" }, { href: "/infrastructure", label: "Infrastructure" }, { href: "/media", label: "Media" }, { href: "/automations", label: "Automations" }, { href: "/expenses", label: "Expenses" }, ]; export function Nav() { const pathname = usePathname(); return (
H
Homelab
{new Date().toLocaleDateString("en-US", { month: "short", day: "numeric", year: "numeric" })}
); } ``` - [ ] **Step 9: Create layout.tsx** ```tsx // dashboard/ui/app/layout.tsx import type { Metadata } from "next"; import { Inter } from "next/font/google"; import "./globals.css"; import { Nav } from "@/components/nav"; const inter = Inter({ subsets: ["latin"] }); export const metadata: Metadata = { title: "Homelab Dashboard", description: "Unified command center for homelab infrastructure", }; export default function RootLayout({ children }: { children: React.ReactNode }) { return (