Sanitized mirror from private repository - 2026-04-05 05:50:13 UTC
Some checks failed
Documentation / Build Docusaurus (push) Failing after 5m1s
Documentation / Deploy to GitHub Pages (push) Has been skipped

This commit is contained in:
Gitea Mirror Bot
2026-04-05 05:50:13 +00:00
commit c1a6970aa7
1390 changed files with 353990 additions and 0 deletions

8
dashboard/api/Dockerfile Normal file
View File

@@ -0,0 +1,8 @@
FROM python:3.12-slim
RUN apt-get update && apt-get install -y --no-install-recommends openssh-client curl && rm -rf /var/lib/apt/lists/*
WORKDIR /app/api
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
EXPOSE 8888
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8888"]

View File

@@ -0,0 +1,35 @@
"""Bridge to import scripts/lib/ modules from the mounted volume."""
import sys
from pathlib import Path
SCRIPTS_DIR = Path("/app/scripts")
if not SCRIPTS_DIR.exists():
SCRIPTS_DIR = Path(__file__).parent.parent.parent / "scripts"
sys.path.insert(0, str(SCRIPTS_DIR))
from lib.portainer import (
list_containers as portainer_list_containers,
get_container_logs as portainer_get_container_logs,
restart_container as portainer_restart_container,
inspect_container as portainer_inspect_container,
ENDPOINTS,
)
from lib.prometheus import prom_query, prom_query_range
from lib.ollama import ollama_available, DEFAULT_URL as OLLAMA_URL, DEFAULT_MODEL as OLLAMA_MODEL
# DB paths
GMAIL_DB = SCRIPTS_DIR / "gmail-organizer" / "processed.db"
DVISH_DB = SCRIPTS_DIR / "gmail-organizer-dvish" / "processed.db"
PROTON_DB = SCRIPTS_DIR / "proton-organizer" / "processed.db"
RESTART_DB = SCRIPTS_DIR / "stack-restart.db"
# Data paths
DATA_DIR = Path("/app/data")
if not DATA_DIR.exists():
DATA_DIR = Path(__file__).parent.parent.parent / "data"
EXPENSES_CSV = DATA_DIR / "expenses.csv"
# Log paths
LOG_DIR = Path("/app/logs")
if not LOG_DIR.exists():
LOG_DIR = Path("/tmp")

135
dashboard/api/log_parser.py Normal file
View File

@@ -0,0 +1,135 @@
"""Parse automation log files into structured events for the dashboard."""
import os
import re
from datetime import datetime, date
from pathlib import Path
# Patterns to match interesting log lines
PATTERNS = [
(re.compile(r"→ receipts|→ newsletters|→ work|→ personal|→ accounts"), "email"),
(re.compile(r"Stack-restart check complete"), "restart_check"),
(re.compile(r"Backup Validation: OK|Backup Report"), "backup"),
(re.compile(r"Cached:\s*\d+"), "cache"),
(re.compile(r"[Uu]nhealthy"), "unhealthy"),
(re.compile(r"Restarting container|restart_container"), "container_restart"),
(re.compile(r"ERROR|CRITICAL"), "error"),
(re.compile(r"Starting .+ check|Starting .+ organizer"), "start"),
(re.compile(r"drifts? found|No drifts found"), "drift"),
(re.compile(r"emails? downloaded|backup: \d+ total"), "backup_progress"),
]
# Timestamp pattern at the start of log lines
TS_PATTERN = re.compile(r"^(\d{4}-\d{2}-\d{2}[\sT_]\d{2}:\d{2}:\d{2})")
def parse_timestamp(line: str) -> datetime | None:
"""Extract timestamp from a log line."""
m = TS_PATTERN.match(line)
if m:
ts_str = m.group(1).replace("_", " ").replace("T", " ")
try:
return datetime.strptime(ts_str, "%Y-%m-%d %H:%M:%S")
except ValueError:
pass
return None
def classify_line(line: str) -> str | None:
"""Return event type if line matches a known pattern, else None."""
for pattern, event_type in PATTERNS:
if pattern.search(line):
return event_type
return None
def get_recent_events(log_dir: str | Path, max_events: int = 50) -> list[dict]:
"""Parse today's events from all log files in log_dir."""
log_dir = Path(log_dir)
today = date.today().isoformat()
events = []
for log_file in log_dir.glob("*.log"):
source = log_file.stem
try:
with open(log_file, "r", errors="replace") as f:
for line in f:
line = line.strip()
if not line or today not in line:
continue
ts = parse_timestamp(line)
if ts is None or ts.date().isoformat() != today:
continue
event_type = classify_line(line)
if event_type:
events.append({
"time": ts.strftime("%H:%M:%S"),
"timestamp": ts.isoformat(),
"type": event_type,
"source": source,
"message": line[len(ts.isoformat().split("T")[0]) + 1:].strip().lstrip(",").strip(),
})
except (OSError, PermissionError):
continue
events.sort(key=lambda e: e["timestamp"], reverse=True)
return events[:max_events]
def tail_logs(log_dir: str | Path) -> dict[str, int]:
"""Return current file positions (sizes) for SSE polling."""
log_dir = Path(log_dir)
positions = {}
for log_file in log_dir.glob("*.log"):
try:
positions[str(log_file)] = log_file.stat().st_size
except OSError:
positions[str(log_file)] = 0
return positions
def get_new_lines(log_dir: str | Path, positions: dict[str, int]) -> tuple[list[dict], dict[str, int]]:
"""Read new lines since last positions. Returns (new_events, updated_positions)."""
log_dir = Path(log_dir)
today = date.today().isoformat()
new_events = []
new_positions = dict(positions)
for log_file in log_dir.glob("*.log"):
path_str = str(log_file)
old_pos = positions.get(path_str, 0)
try:
current_size = log_file.stat().st_size
except OSError:
continue
if current_size <= old_pos:
new_positions[path_str] = current_size
continue
source = log_file.stem
try:
with open(log_file, "r", errors="replace") as f:
f.seek(old_pos)
for line in f:
line = line.strip()
if not line or today not in line:
continue
ts = parse_timestamp(line)
if ts is None:
continue
event_type = classify_line(line)
if event_type:
new_events.append({
"time": ts.strftime("%H:%M:%S"),
"timestamp": ts.isoformat(),
"type": event_type,
"source": source,
"message": line[len(ts.isoformat().split("T")[0]) + 1:].strip().lstrip(",").strip(),
})
new_positions[path_str] = current_size
except (OSError, PermissionError):
continue
new_events.sort(key=lambda e: e["timestamp"], reverse=True)
return new_events, new_positions

20
dashboard/api/main.py Normal file
View File

@@ -0,0 +1,20 @@
"""Homelab Dashboard API — aggregates data from homelab services."""
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from routers import overview, containers, media, automations, expenses, olares, network, logs
app = FastAPI(title="Homelab Dashboard API", version="1.0.0")
app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_methods=["*"], allow_headers=["*"])
app.include_router(overview.router, prefix="/api")
app.include_router(containers.router, prefix="/api")
app.include_router(media.router, prefix="/api")
app.include_router(automations.router, prefix="/api")
app.include_router(expenses.router, prefix="/api")
app.include_router(olares.router, prefix="/api")
app.include_router(network.router, prefix="/api")
app.include_router(logs.router, prefix="/api")
@app.get("/api/health")
def health():
return {"status": "ok"}

View File

@@ -0,0 +1,5 @@
fastapi==0.115.12
uvicorn[standard]==0.34.2
httpx==0.28.1
pyyaml>=6.0
sse-starlette==2.3.3

View File

View File

@@ -0,0 +1,146 @@
"""Automation status: email organizers, stack restarts, backup, drift."""
import sqlite3
from datetime import date
from pathlib import Path
from fastapi import APIRouter
import sys
sys.path.insert(0, str(Path(__file__).parent.parent))
from lib_bridge import GMAIL_DB, DVISH_DB, PROTON_DB, RESTART_DB, LOG_DIR
router = APIRouter(tags=["automations"])
def _query_email_db(db_path: Path, name: str) -> dict:
"""Query a processed.db for today's category counts and sender_cache stats."""
if not db_path.exists():
return {"name": name, "exists": False}
today = date.today().isoformat()
try:
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
conn.row_factory = sqlite3.Row
# Today's category counts
cur = conn.execute(
"SELECT category, COUNT(*) as cnt FROM processed "
"WHERE processed_at LIKE ? GROUP BY category",
(f"{today}%",),
)
categories = {row["category"]: row["cnt"] for row in cur}
# Total processed today
cur = conn.execute(
"SELECT COUNT(*) FROM processed WHERE processed_at LIKE ?",
(f"{today}%",),
)
total_today = cur.fetchone()[0]
# Sender cache stats
cur = conn.execute("SELECT COUNT(*) FROM sender_cache")
cache_size = cur.fetchone()[0]
cur = conn.execute(
"SELECT category, COUNT(*) as cnt FROM sender_cache GROUP BY category"
)
cache_by_category = {row["category"]: row["cnt"] for row in cur}
conn.close()
return {
"name": name,
"exists": True,
"today_total": total_today,
"today_categories": categories,
"sender_cache_size": cache_size,
"sender_cache_categories": cache_by_category,
}
except Exception as e:
return {"name": name, "exists": True, "error": str(e)}
@router.get("/automations/email")
def email_status():
"""Email organizer status for all 3 accounts."""
accounts = [
_query_email_db(GMAIL_DB, "gmail"),
_query_email_db(DVISH_DB, "dvish"),
_query_email_db(PROTON_DB, "proton"),
]
return {"accounts": accounts}
@router.get("/automations/restarts")
def restart_status():
"""Recent unhealthy container tracking entries."""
if not RESTART_DB.exists():
return {"entries": [], "count": 0}
try:
conn = sqlite3.connect(f"file:{RESTART_DB}?mode=ro", uri=True)
conn.row_factory = sqlite3.Row
cur = conn.execute(
"SELECT * FROM unhealthy_tracking ORDER BY last_checked DESC LIMIT 50"
)
entries = [dict(row) for row in cur]
conn.close()
return {"entries": entries, "count": len(entries)}
except Exception as e:
return {"entries": [], "count": 0, "error": str(e)}
@router.get("/automations/backup")
def backup_status():
"""Parse today's backup log for status."""
log_file = LOG_DIR / "gmail-backup-daily.log"
if not log_file.exists():
return {"status": "no_log", "entries": []}
today = date.today().isoformat()
entries = []
has_error = False
try:
with open(log_file, "r", errors="replace") as f:
for line in f:
if today in line:
entries.append(line.strip())
if "ERROR" in line.upper():
has_error = True
except OSError:
return {"status": "read_error", "entries": []}
return {
"status": "error" if has_error else ("ok" if entries else "no_entries_today"),
"entries": entries[-20:], # Last 20 today entries
"has_errors": has_error,
}
@router.get("/automations/drift")
def drift_status():
"""Parse config-drift.log for last result."""
log_file = LOG_DIR / "config-drift.log"
if not log_file.exists():
return {"status": "no_log", "last_result": None}
try:
with open(log_file, "r", errors="replace") as f:
lines = f.readlines()
# Find the last meaningful result
for line in reversed(lines):
line = line.strip()
if "No drifts found" in line:
return {"status": "clean", "last_result": "No drifts found", "drifts": 0}
if "drift" in line.lower():
# Try to extract count
import re
m = re.search(r"(\d+)\s+drifts?", line)
count = int(m.group(1)) if m else -1
return {"status": "drifted", "last_result": line, "drifts": count}
return {"status": "unknown", "last_result": lines[-1].strip() if lines else None}
except OSError:
return {"status": "read_error", "last_result": None}

View File

@@ -0,0 +1,63 @@
"""Container listing, logs, and management."""
from fastapi import APIRouter, Query, HTTPException
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent))
from lib_bridge import (
portainer_list_containers,
portainer_get_container_logs,
portainer_restart_container,
ENDPOINTS,
)
router = APIRouter(tags=["containers"])
@router.get("/containers")
def list_containers(endpoint: str | None = None):
"""List all containers across endpoints, optional endpoint filter."""
targets = [endpoint] if endpoint and endpoint in ENDPOINTS else list(ENDPOINTS)
results = []
for ep in targets:
try:
containers = portainer_list_containers(ep)
for c in containers:
names = c.get("Names", [])
name = names[0].lstrip("/") if names else c.get("Id", "")[:12]
results.append({
"id": c.get("Id", "")[:12],
"name": name,
"image": c.get("Image", ""),
"state": c.get("State", ""),
"status": c.get("Status", ""),
"endpoint": ep,
})
except Exception as e:
results.append({"endpoint": ep, "error": str(e)})
return results
@router.get("/containers/{container_id}/logs")
def container_logs(container_id: str, endpoint: str = Query(...)):
"""Get container logs. Requires endpoint query param."""
if endpoint not in ENDPOINTS:
raise HTTPException(400, f"Unknown endpoint: {endpoint}")
try:
logs = portainer_get_container_logs(endpoint, container_id)
return {"container_id": container_id, "endpoint": endpoint, "logs": logs}
except Exception as e:
raise HTTPException(502, f"Failed to get logs: {e}")
@router.post("/containers/{container_id}/restart")
def restart_container(container_id: str, endpoint: str = Query(...)):
"""Restart a container. Requires endpoint query param."""
if endpoint not in ENDPOINTS:
raise HTTPException(400, f"Unknown endpoint: {endpoint}")
success = portainer_restart_container(endpoint, container_id)
if not success:
raise HTTPException(502, "Restart failed")
return {"status": "restarted", "container_id": container_id, "endpoint": endpoint}

View File

@@ -0,0 +1,61 @@
"""Expenses CSV reader and summary."""
import csv
from collections import defaultdict
from fastapi import APIRouter, Query
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent))
from lib_bridge import EXPENSES_CSV
router = APIRouter(tags=["expenses"])
def _read_expenses() -> list[dict]:
"""Read all expenses from CSV."""
if not EXPENSES_CSV.exists():
return []
with open(EXPENSES_CSV, "r", newline="") as f:
return list(csv.DictReader(f))
@router.get("/expenses")
def list_expenses(month: str | None = Query(None, description="Filter by YYYY-MM")):
"""List expenses, optionally filtered by month."""
expenses = _read_expenses()
if month:
expenses = [e for e in expenses if e.get("date", "").startswith(month)]
return expenses
@router.get("/expenses/summary")
def expenses_summary(month: str | None = Query(None, description="Filter by YYYY-MM")):
"""Monthly total, count, top 10 vendors by amount."""
expenses = _read_expenses()
if month:
expenses = [e for e in expenses if e.get("date", "").startswith(month)]
if not expenses:
return {"total": 0, "count": 0, "top_vendors": [], "month": month}
total = 0.0
vendor_totals = defaultdict(float)
for e in expenses:
try:
amount = float(e.get("amount", 0))
except (ValueError, TypeError):
amount = 0.0
total += amount
vendor = e.get("vendor", "unknown")
vendor_totals[vendor] += amount
top_vendors = sorted(vendor_totals.items(), key=lambda x: x[1], reverse=True)[:10]
return {
"total": round(total, 2),
"count": len(expenses),
"top_vendors": [{"vendor": v, "amount": round(a, 2)} for v, a in top_vendors],
"month": month,
}

View File

@@ -0,0 +1,57 @@
"""Unified log viewer routes."""
from fastapi import APIRouter, Query
from pathlib import Path
router = APIRouter(tags=["logs"])
LOG_DIR = Path("/app/logs") if Path("/app/logs").exists() else Path("/tmp")
LOG_FILES = {
"stack-restart": "stack-restart.log",
"backup": "backup-validator.log",
"gmail-lz": "gmail-organizer.log",
"gmail-dvish": "gmail-organizer-dvish.log",
"proton": "proton-organizer.log",
"receipt": "receipt-tracker.log",
"drift": "config-drift.log",
"digest": "email-digest.log",
"disk": "disk-predictor.log",
"changelog": "changelog-generator.log",
}
@router.get("/logs")
def list_logs():
"""List available log files with sizes."""
result = []
for name, filename in LOG_FILES.items():
path = LOG_DIR / filename
if path.exists():
stat = path.stat()
result.append({
"name": name,
"filename": filename,
"size_bytes": stat.st_size,
"modified": stat.st_mtime,
})
return result
@router.get("/logs/{log_name}")
def get_log(log_name: str, tail: int = Query(200, le=2000), search: str = Query(None)):
"""Get log file contents."""
if log_name not in LOG_FILES:
return {"error": f"Unknown log: {log_name}", "lines": []}
path = LOG_DIR / LOG_FILES[log_name]
if not path.exists():
return {"lines": [], "total": 0}
with open(path) as f:
all_lines = f.readlines()
if search:
all_lines = [l for l in all_lines if search.lower() in l.lower()]
lines = all_lines[-tail:]
return {"lines": [l.rstrip() for l in lines], "total": len(all_lines)}

View File

@@ -0,0 +1,105 @@
"""Jellyfin + Arr suite media endpoints."""
import json
import subprocess
from fastapi import APIRouter
import httpx
router = APIRouter(tags=["media"])
JELLYFIN_API_KEY = "REDACTED_API_KEY" # pragma: allowlist secret
SONARR_URL = "http://192.168.0.200:8989"
SONARR_KEY = "REDACTED_SONARR_API_KEY" # pragma: allowlist secret
RADARR_URL = "http://192.168.0.200:7878"
RADARR_KEY = "REDACTED_RADARR_API_KEY" # pragma: allowlist secret
SABNZBD_URL = "http://192.168.0.200:8080"
SABNZBD_KEY = "6ae289de5a4f45f7a0124b43ba9c3dea" # pragma: allowlist secret
def _jellyfin(path: str) -> dict:
"""Call Jellyfin API via SSH+kubectl to bypass Olares auth sidecar."""
sep = "&" if "?" in path else "?"
url = f"http://localhost:8096{path}{sep}api_key={JELLYFIN_API_KEY}"
try:
result = subprocess.run(
["ssh", "-o", "ConnectTimeout=3", "olares",
f"kubectl exec -n jellyfin-vishinator deploy/jellyfin -c jellyfin -- curl -s '{url}'"],
capture_output=True, text=True, timeout=15,
)
return json.loads(result.stdout) if result.returncode == 0 else {}
except Exception:
return {}
@router.get("/jellyfin/status")
def jellyfin_status():
"""Jellyfin server status: version, libraries, sessions."""
info = _jellyfin("/System/Info")
libraries = _jellyfin("/Library/VirtualFolders")
sessions = _jellyfin("/Sessions")
active = []
idle_count = 0
if isinstance(sessions, list):
for s in sessions:
if s.get("NowPlayingItem"):
active.append({
"user": s.get("UserName", ""),
"client": s.get("Client", ""),
"device": s.get("DeviceName", ""),
"now_playing": s["NowPlayingItem"].get("Name", ""),
"type": s["NowPlayingItem"].get("Type", ""),
})
else:
idle_count += 1
return {
"version": info.get("Version", "unknown"),
"server_name": info.get("ServerName", "unknown"),
"libraries": [{"name": lib.get("Name"), "type": lib.get("CollectionType", "")}
for lib in libraries] if isinstance(libraries, list) else [],
"active_sessions": active,
"idle_sessions": idle_count,
}
@router.get("/sonarr/queue")
async def sonarr_queue():
"""Sonarr download queue."""
try:
async with httpx.AsyncClient(timeout=10) as client:
resp = await client.get(
f"{SONARR_URL}/api/v3/queue",
headers={"X-Api-Key": SONARR_KEY},
)
return resp.json()
except Exception as e:
return {"error": str(e)}
@router.get("/radarr/queue")
async def radarr_queue():
"""Radarr download queue."""
try:
async with httpx.AsyncClient(timeout=10) as client:
resp = await client.get(
f"{RADARR_URL}/api/v3/queue",
headers={"X-Api-Key": RADARR_KEY},
)
return resp.json()
except Exception as e:
return {"error": str(e)}
@router.get("/sabnzbd/queue")
async def sabnzbd_queue():
"""SABnzbd download queue."""
try:
async with httpx.AsyncClient(timeout=10) as client:
resp = await client.get(
f"{SABNZBD_URL}/api",
params={"apikey": SABNZBD_KEY, "output": "json", "mode": "queue"},
)
return resp.json()
except Exception as e:
return {"error": str(e)}

View File

@@ -0,0 +1,64 @@
"""Network / Headscale / AdGuard routes."""
from fastapi import APIRouter
import subprocess
import json
import httpx
router = APIRouter(tags=["network"])
ADGUARD_URL = "http://192.168.0.250:9080"
ADGUARD_USER = "vish"
ADGUARD_PASS = "REDACTED_PASSWORD"
def _adguard_get(path):
with httpx.Client(timeout=10) as client:
client.post(f"{ADGUARD_URL}/control/login", json={"name": ADGUARD_USER, "password": ADGUARD_PASS})
r = client.get(f"{ADGUARD_URL}/control{path}")
r.raise_for_status()
return r.json() if r.content else {}
@router.get("/network/headscale")
def headscale_nodes():
"""List Headscale nodes."""
result = subprocess.run(
["ssh", "-o", "ConnectTimeout=3", "calypso",
"docker exec headscale headscale nodes list -o json"],
capture_output=True, text=True, timeout=15,
)
if result.returncode != 0:
return {"nodes": [], "error": result.stderr.strip()}
nodes = json.loads(result.stdout)
return {"nodes": [
{"name": n.get("givenName") or n.get("name", "?"),
"ip": (n.get("ipAddresses") or ["?"])[0],
"online": n.get("online", False),
"last_seen": n.get("lastSeen", "")}
for n in nodes
]}
@router.get("/network/adguard")
def adguard_stats():
"""Get AdGuard DNS stats."""
try:
stats = _adguard_get("/stats")
return {
"total_queries": stats.get("num_dns_queries", 0),
"blocked": stats.get("num_blocked_filtering", 0),
"avg_time": stats.get("avg_processing_time", 0),
}
except Exception as e:
return {"error": str(e)}
@router.get("/network/adguard/rewrites")
def adguard_rewrites():
"""List AdGuard DNS rewrites."""
try:
data = _adguard_get("/rewrite/list")
return [{"domain": r.get("domain", ""), "answer": r.get("answer", "")} for r in (data or [])]
except Exception as e:
return {"error": str(e)}

View File

@@ -0,0 +1,93 @@
"""Olares K3s pod listing and GPU status."""
import subprocess
from fastapi import APIRouter, Query
router = APIRouter(tags=["olares"])
def _ssh_olares(cmd: str, timeout: int = 10) -> str:
"""Run a command on olares via SSH."""
result = subprocess.run(
["ssh", "-o", "ConnectTimeout=3", "olares", cmd],
capture_output=True, text=True, timeout=timeout,
)
return result.stdout if result.returncode == 0 else ""
@router.get("/olares/pods")
def olares_pods(namespace: str | None = Query(None)):
"""List K3s pods on olares."""
if namespace:
cmd = f"kubectl get pods -n {namespace} -o wide --no-headers"
else:
cmd = "kubectl get pods -A -o wide --no-headers"
output = _ssh_olares(cmd, timeout=15)
if not output:
return []
pods = []
for line in output.strip().split("\n"):
parts = line.split()
if not parts:
continue
if namespace:
# No namespace column when -n is used
if len(parts) >= 7:
pods.append({
"namespace": namespace,
"name": parts[0],
"ready": parts[1],
"status": parts[2],
"restarts": parts[3],
"age": parts[4],
"ip": parts[5] if len(parts) > 5 else "",
"node": parts[6] if len(parts) > 6 else "",
})
else:
# Has namespace column
if len(parts) >= 8:
pods.append({
"namespace": parts[0],
"name": parts[1],
"ready": parts[2],
"status": parts[3],
"restarts": parts[4],
"age": parts[5],
"ip": parts[6] if len(parts) > 6 else "",
"node": parts[7] if len(parts) > 7 else "",
})
return pods
@router.get("/olares/gpu")
def olares_gpu():
"""GPU status from olares."""
output = _ssh_olares(
"nvidia-smi --query-gpu=name,temperature.gpu,power.draw,power.limit,"
"memory.used,memory.total,utilization.gpu --format=csv,noheader,nounits"
)
if not output:
return {"available": False}
parts = [p.strip() for p in output.strip().split(",")]
def _float(val: str) -> float | None:
try:
return float(val)
except (ValueError, TypeError):
return None
if len(parts) >= 7:
return {
"available": True,
"name": parts[0],
"temp_c": _float(parts[1]),
"power_draw_w": _float(parts[2]),
"power_limit_w": _float(parts[3]),
"memory_used_mb": _float(parts[4]),
"memory_total_mb": _float(parts[5]),
"utilization_pct": _float(parts[6]),
}
return {"available": False}

View File

@@ -0,0 +1,194 @@
"""Overview stats and SSE activity stream."""
import asyncio
import json
import subprocess
import sqlite3
from datetime import date
from fastapi import APIRouter
from sse_starlette.sse import EventSourceResponse
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent))
from lib_bridge import (
portainer_list_containers, ENDPOINTS, ollama_available,
GMAIL_DB, DVISH_DB, PROTON_DB, RESTART_DB, LOG_DIR, OLLAMA_URL,
)
from log_parser import get_recent_events, tail_logs, get_new_lines
router = APIRouter(tags=["overview"])
def _count_today_emails(db_path: Path) -> int:
"""Count emails processed today from a processed.db file."""
if not db_path.exists():
return 0
try:
today = date.today().isoformat()
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
cur = conn.execute(
"SELECT COUNT(*) FROM processed WHERE processed_at LIKE ?",
(f"{today}%",),
)
count = cur.fetchone()[0]
conn.close()
return count
except Exception:
return 0
def _count_unhealthy(db_path: Path) -> int:
"""Count unhealthy containers from stack-restart.db."""
if not db_path.exists():
return 0
try:
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
cur = conn.execute("SELECT COUNT(*) FROM unhealthy_tracking")
count = cur.fetchone()[0]
conn.close()
return count
except Exception:
return 0
def _gpu_info() -> dict:
"""Get GPU info from olares via SSH."""
try:
result = subprocess.run(
["ssh", "-o", "ConnectTimeout=3", "olares",
"nvidia-smi --query-gpu=temperature.gpu,power.draw,power.limit,"
"memory.used,memory.total,utilization.gpu --format=csv,noheader,nounits"],
capture_output=True, text=True, timeout=10,
)
if result.returncode != 0:
return {"available": False}
parts = [p.strip() for p in result.stdout.strip().split(",")]
def _f(v):
try:
return float(v)
except (ValueError, TypeError):
return None
if len(parts) >= 6:
return {
"available": True,
"temp_c": _f(parts[0]),
"power_draw_w": _f(parts[1]),
"power_limit_w": _f(parts[2]),
"memory_used_mb": _f(parts[3]),
"memory_total_mb": _f(parts[4]),
"utilization_pct": _f(parts[5]),
}
except Exception:
pass
return {"available": False}
@router.get("/stats/overview")
def stats_overview():
"""Aggregate overview stats."""
# Container counts
container_counts = {}
total = 0
for ep_name in ENDPOINTS:
try:
containers = portainer_list_containers(ep_name)
running = sum(1 for c in containers if c.get("State") == "running")
container_counts[ep_name] = {"total": len(containers), "running": running}
total += len(containers)
except Exception:
container_counts[ep_name] = {"total": 0, "running": 0, "error": True}
# GPU
gpu = _gpu_info()
# Email counts
email_today = {
"gmail": _count_today_emails(GMAIL_DB),
"dvish": _count_today_emails(DVISH_DB),
"proton": _count_today_emails(PROTON_DB),
}
email_today["total"] = sum(email_today.values())
# Unhealthy
unhealthy = _count_unhealthy(RESTART_DB)
# Ollama
ollama_up = ollama_available(OLLAMA_URL)
return {
"containers": {"total": total, "by_endpoint": container_counts},
"gpu": gpu,
"email_today": email_today,
"unhealthy_count": unhealthy,
"ollama_available": ollama_up,
}
@router.get("/activity")
async def activity_stream():
"""SSE stream of today's automation events."""
async def event_generator():
# Send initial batch
events = get_recent_events(LOG_DIR)
yield {"event": "init", "data": json.dumps(events)}
# Poll for new events
positions = tail_logs(LOG_DIR)
while True:
await asyncio.sleep(5)
new_events, positions = get_new_lines(LOG_DIR, positions)
if new_events:
yield {"event": "update", "data": json.dumps(new_events)}
return EventSourceResponse(event_generator())
@router.post("/actions/pause-organizers")
def pause_organizers():
"""Pause all email organizer cron jobs."""
result = subprocess.run(
["/home/homelab/organized/repos/homelab/scripts/gmail-organizer-ctl.sh", "stop"],
capture_output=True, text=True, timeout=10,
)
return {"success": result.returncode == 0, "output": result.stdout.strip()}
@router.post("/actions/resume-organizers")
def resume_organizers():
"""Resume all email organizer cron jobs."""
result = subprocess.run(
["/home/homelab/organized/repos/homelab/scripts/gmail-organizer-ctl.sh", "start"],
capture_output=True, text=True, timeout=10,
)
return {"success": result.returncode == 0, "output": result.stdout.strip()}
@router.get("/actions/organizer-status")
def organizer_status():
"""Check if organizers are running or paused."""
result = subprocess.run(
["/home/homelab/organized/repos/homelab/scripts/gmail-organizer-ctl.sh", "status"],
capture_output=True, text=True, timeout=10,
)
return {"output": result.stdout.strip()}
@router.post("/chat")
def chat_with_ollama(body: dict):
"""Send a message to the local Ollama LLM."""
prompt = body.get("message", "")
if not prompt:
return {"error": "No message provided"}
try:
import sys as _sys
_sys.path.insert(0, str(Path("/app/scripts") if Path("/app/scripts").exists() else Path(__file__).parent.parent.parent / "scripts"))
from lib.ollama import ollama_generate
response = ollama_generate(prompt, num_predict=500, timeout=60)
return {"response": response}
except Exception as e:
return {"error": str(e)}