Sanitized mirror from private repository - 2026-04-05 12:40:35 UTC
This commit is contained in:
8
dashboard/api/Dockerfile
Normal file
8
dashboard/api/Dockerfile
Normal file
@@ -0,0 +1,8 @@
|
||||
FROM python:3.12-slim
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends openssh-client curl && rm -rf /var/lib/apt/lists/*
|
||||
WORKDIR /app/api
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
COPY . .
|
||||
EXPOSE 8888
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8888"]
|
||||
35
dashboard/api/lib_bridge.py
Normal file
35
dashboard/api/lib_bridge.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""Bridge to import scripts/lib/ modules from the mounted volume."""
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
SCRIPTS_DIR = Path("/app/scripts")
|
||||
if not SCRIPTS_DIR.exists():
|
||||
SCRIPTS_DIR = Path(__file__).parent.parent.parent / "scripts"
|
||||
sys.path.insert(0, str(SCRIPTS_DIR))
|
||||
|
||||
from lib.portainer import (
|
||||
list_containers as portainer_list_containers,
|
||||
get_container_logs as portainer_get_container_logs,
|
||||
restart_container as portainer_restart_container,
|
||||
inspect_container as portainer_inspect_container,
|
||||
ENDPOINTS,
|
||||
)
|
||||
from lib.prometheus import prom_query, prom_query_range
|
||||
from lib.ollama import ollama_available, DEFAULT_URL as OLLAMA_URL, DEFAULT_MODEL as OLLAMA_MODEL
|
||||
|
||||
# DB paths
|
||||
GMAIL_DB = SCRIPTS_DIR / "gmail-organizer" / "processed.db"
|
||||
DVISH_DB = SCRIPTS_DIR / "gmail-organizer-dvish" / "processed.db"
|
||||
PROTON_DB = SCRIPTS_DIR / "proton-organizer" / "processed.db"
|
||||
RESTART_DB = SCRIPTS_DIR / "stack-restart.db"
|
||||
|
||||
# Data paths
|
||||
DATA_DIR = Path("/app/data")
|
||||
if not DATA_DIR.exists():
|
||||
DATA_DIR = Path(__file__).parent.parent.parent / "data"
|
||||
EXPENSES_CSV = DATA_DIR / "expenses.csv"
|
||||
|
||||
# Log paths
|
||||
LOG_DIR = Path("/app/logs")
|
||||
if not LOG_DIR.exists():
|
||||
LOG_DIR = Path("/tmp")
|
||||
194
dashboard/api/log_parser.py
Normal file
194
dashboard/api/log_parser.py
Normal file
@@ -0,0 +1,194 @@
|
||||
"""Parse automation log files into structured events for the dashboard."""
|
||||
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime, date
|
||||
from pathlib import Path
|
||||
|
||||
# Patterns: (compiled_regex, event_type, optional extractor returning extra fields)
|
||||
# Extractor receives the match object and returns a dict of extra fields.
|
||||
# Order matters — first match wins.
|
||||
PATTERNS = [
|
||||
# --- Email classification ---
|
||||
(re.compile(r"\[(\d+)/(\d+)\] Classifying: (.+?) \(from:"), "email_classifying",
|
||||
lambda m: {"progress": f"{m.group(1)}/{m.group(2)}", "subject": m.group(3)}),
|
||||
(re.compile(r"Cached: (.+?) -> (\w+)"), "email_cached",
|
||||
lambda m: {"subject": m.group(1), "category": m.group(2)}),
|
||||
(re.compile(r"→ (receipts|newsletters|work|personal|accounts)(?:\s*\((.+?)\))?"), "email_classified",
|
||||
lambda m: {"category": m.group(1), "label": m.group(2) or ""}),
|
||||
|
||||
# --- Receipt extraction ---
|
||||
(re.compile(r"Would write:.*'vendor': '([^']+)'.*'amount': '([^']+)'"), "receipt_extracted",
|
||||
lambda m: {"vendor": m.group(1), "amount": m.group(2)}),
|
||||
(re.compile(r"Appended to CSV:.*vendor=([^,]+).*amount=([^,]+)"), "receipt_extracted",
|
||||
lambda m: {"vendor": m.group(1).strip(), "amount": m.group(2).strip()}),
|
||||
|
||||
# --- Cron / automation completions ---
|
||||
(re.compile(r"Done! Stats: \{"), "cron_complete", lambda m: {}),
|
||||
|
||||
# --- Container health / stack restarts ---
|
||||
(re.compile(r"Container (\S+) on (\S+) restarted"), "container_restarted",
|
||||
lambda m: {"container": m.group(1), "endpoint": m.group(2)}),
|
||||
(re.compile(r"LLM says (SAFE|UNSAFE) for (\S+)"), "restart_analysis",
|
||||
lambda m: {"decision": m.group(1), "container": m.group(2)}),
|
||||
(re.compile(r"[Uu]nhealthy.*?(\S+)\s+on\s+(\S+)"), "container_unhealthy",
|
||||
lambda m: {"container": m.group(1), "endpoint": m.group(2)}),
|
||||
(re.compile(r"[Uu]nhealthy"), "container_unhealthy", lambda m: {}),
|
||||
(re.compile(r"Stack-restart check complete"), "stack_healthy", lambda m: {}),
|
||||
|
||||
# --- Backups ---
|
||||
(re.compile(r"Backup Validation: (OK|FAIL)"), "backup_result",
|
||||
lambda m: {"status": m.group(1)}),
|
||||
(re.compile(r"Backup Report"), "backup_result", lambda m: {"status": "report"}),
|
||||
|
||||
# --- Config drift ---
|
||||
(re.compile(r"Detected (\d+) drifts? across (\d+) services?"), "drift_found",
|
||||
lambda m: {"drifts": m.group(1), "services": m.group(2)}),
|
||||
(re.compile(r"No drifts found"), "drift_clean", lambda m: {}),
|
||||
|
||||
# --- Disk predictor ---
|
||||
(re.compile(r"WARNING.*volume.* (\d+) days"), "disk_warning",
|
||||
lambda m: {"days": m.group(1)}),
|
||||
(re.compile(r"Total filesystems: (\d+)"), "disk_scan_complete",
|
||||
lambda m: {"count": m.group(1)}),
|
||||
|
||||
# --- Changelog / PR review ---
|
||||
(re.compile(r"Generated changelog with (\d+) commits"), "changelog_generated",
|
||||
lambda m: {"commits": m.group(1)}),
|
||||
(re.compile(r"(\d+) new commits since"), "changelog_commits",
|
||||
lambda m: {"count": m.group(1)}),
|
||||
(re.compile(r"Posted review comment on PR #(\d+)"), "pr_reviewed",
|
||||
lambda m: {"pr": m.group(1)}),
|
||||
|
||||
# --- Catch-all patterns (lower priority) ---
|
||||
(re.compile(r"ERROR|CRITICAL"), "error", lambda m: {}),
|
||||
(re.compile(r"Starting .+ check|Starting .+ organizer"), "start", lambda m: {}),
|
||||
(re.compile(r"emails? downloaded|backup: \d+ total"), "backup_progress", lambda m: {}),
|
||||
]
|
||||
|
||||
# Timestamp pattern at the start of log lines
|
||||
TS_PATTERN = re.compile(r"^(\d{4}-\d{2}-\d{2}[\sT_]\d{2}:\d{2}:\d{2})")
|
||||
|
||||
|
||||
def parse_timestamp(line: str) -> datetime | None:
|
||||
"""Extract timestamp from a log line."""
|
||||
m = TS_PATTERN.match(line)
|
||||
if m:
|
||||
ts_str = m.group(1).replace("_", " ").replace("T", " ")
|
||||
try:
|
||||
return datetime.strptime(ts_str, "%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def classify_line(line: str) -> tuple[str, dict] | None:
|
||||
"""Return (event_type, extra_fields) if line matches a known pattern, else None."""
|
||||
for pattern, event_type, extractor in PATTERNS:
|
||||
m = pattern.search(line)
|
||||
if m:
|
||||
try:
|
||||
extra = extractor(m)
|
||||
except Exception:
|
||||
extra = {}
|
||||
return event_type, extra
|
||||
return None
|
||||
|
||||
|
||||
def get_recent_events(log_dir: str | Path, max_events: int = 50) -> list[dict]:
|
||||
"""Parse today's events from all log files in log_dir."""
|
||||
log_dir = Path(log_dir)
|
||||
today = date.today().isoformat()
|
||||
events = []
|
||||
|
||||
for log_file in log_dir.glob("*.log"):
|
||||
source = log_file.stem
|
||||
try:
|
||||
with open(log_file, "r", errors="replace") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or today not in line:
|
||||
continue
|
||||
ts = parse_timestamp(line)
|
||||
if ts is None or ts.date().isoformat() != today:
|
||||
continue
|
||||
result = classify_line(line)
|
||||
if result:
|
||||
event_type, extra = result
|
||||
raw_msg = line[len(ts.isoformat().split("T")[0]) + 1:].strip().lstrip(",").strip()
|
||||
event = {
|
||||
"time": ts.strftime("%H:%M:%S"),
|
||||
"timestamp": ts.isoformat(),
|
||||
"type": event_type,
|
||||
"source": source,
|
||||
"raw": raw_msg,
|
||||
**extra,
|
||||
}
|
||||
events.append(event)
|
||||
except (OSError, PermissionError):
|
||||
continue
|
||||
|
||||
events.sort(key=lambda e: e["timestamp"], reverse=True)
|
||||
return events[:max_events]
|
||||
|
||||
|
||||
def tail_logs(log_dir: str | Path) -> dict[str, int]:
|
||||
"""Return current file positions (sizes) for SSE polling."""
|
||||
log_dir = Path(log_dir)
|
||||
positions = {}
|
||||
for log_file in log_dir.glob("*.log"):
|
||||
try:
|
||||
positions[str(log_file)] = log_file.stat().st_size
|
||||
except OSError:
|
||||
positions[str(log_file)] = 0
|
||||
return positions
|
||||
|
||||
|
||||
def get_new_lines(log_dir: str | Path, positions: dict[str, int]) -> tuple[list[dict], dict[str, int]]:
|
||||
"""Read new lines since last positions. Returns (new_events, updated_positions)."""
|
||||
log_dir = Path(log_dir)
|
||||
today = date.today().isoformat()
|
||||
new_events = []
|
||||
new_positions = dict(positions)
|
||||
|
||||
for log_file in log_dir.glob("*.log"):
|
||||
path_str = str(log_file)
|
||||
old_pos = positions.get(path_str, 0)
|
||||
try:
|
||||
current_size = log_file.stat().st_size
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
if current_size <= old_pos:
|
||||
new_positions[path_str] = current_size
|
||||
continue
|
||||
|
||||
source = log_file.stem
|
||||
try:
|
||||
with open(log_file, "r", errors="replace") as f:
|
||||
f.seek(old_pos)
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or today not in line:
|
||||
continue
|
||||
ts = parse_timestamp(line)
|
||||
if ts is None:
|
||||
continue
|
||||
result = classify_line(line)
|
||||
if result:
|
||||
event_type, extra = result
|
||||
raw_msg = line[len(ts.isoformat().split("T")[0]) + 1:].strip().lstrip(",").strip()
|
||||
new_events.append({
|
||||
"time": ts.strftime("%H:%M:%S"),
|
||||
"timestamp": ts.isoformat(),
|
||||
"type": event_type,
|
||||
"source": source,
|
||||
"raw": raw_msg,
|
||||
**extra,
|
||||
})
|
||||
new_positions[path_str] = current_size
|
||||
except (OSError, PermissionError):
|
||||
continue
|
||||
|
||||
new_events.sort(key=lambda e: e["timestamp"], reverse=True)
|
||||
return new_events, new_positions
|
||||
21
dashboard/api/main.py
Normal file
21
dashboard/api/main.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Homelab Dashboard API — aggregates data from homelab services."""
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from routers import overview, containers, media, automations, expenses, olares, network, logs, kuma
|
||||
|
||||
app = FastAPI(title="Homelab Dashboard API", version="1.0.0")
|
||||
app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_methods=["*"], allow_headers=["*"])
|
||||
app.include_router(overview.router, prefix="/api")
|
||||
app.include_router(containers.router, prefix="/api")
|
||||
app.include_router(media.router, prefix="/api")
|
||||
app.include_router(automations.router, prefix="/api")
|
||||
app.include_router(expenses.router, prefix="/api")
|
||||
app.include_router(olares.router, prefix="/api")
|
||||
app.include_router(network.router, prefix="/api")
|
||||
app.include_router(logs.router, prefix="/api")
|
||||
app.include_router(kuma.router, prefix="/api")
|
||||
|
||||
|
||||
@app.get("/api/health")
|
||||
def health():
|
||||
return {"status": "ok"}
|
||||
5
dashboard/api/requirements.txt
Normal file
5
dashboard/api/requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
fastapi==0.115.12
|
||||
uvicorn[standard]==0.34.2
|
||||
httpx==0.28.1
|
||||
pyyaml>=6.0
|
||||
sse-starlette==2.3.3
|
||||
0
dashboard/api/routers/__init__.py
Normal file
0
dashboard/api/routers/__init__.py
Normal file
146
dashboard/api/routers/automations.py
Normal file
146
dashboard/api/routers/automations.py
Normal file
@@ -0,0 +1,146 @@
|
||||
"""Automation status: email organizers, stack restarts, backup, drift."""
|
||||
|
||||
import sqlite3
|
||||
from datetime import date
|
||||
from pathlib import Path
|
||||
from fastapi import APIRouter
|
||||
|
||||
import sys
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from lib_bridge import GMAIL_DB, DVISH_DB, PROTON_DB, RESTART_DB, LOG_DIR
|
||||
|
||||
router = APIRouter(tags=["automations"])
|
||||
|
||||
|
||||
def _query_email_db(db_path: Path, name: str) -> dict:
|
||||
"""Query a processed.db for today's category counts and sender_cache stats."""
|
||||
if not db_path.exists():
|
||||
return {"name": name, "exists": False}
|
||||
|
||||
today = date.today().isoformat()
|
||||
try:
|
||||
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
|
||||
conn.row_factory = sqlite3.Row
|
||||
|
||||
# Today's category counts
|
||||
cur = conn.execute(
|
||||
"SELECT category, COUNT(*) as cnt FROM processed "
|
||||
"WHERE processed_at LIKE ? GROUP BY category",
|
||||
(f"{today}%",),
|
||||
)
|
||||
categories = {row["category"]: row["cnt"] for row in cur}
|
||||
|
||||
# Total processed today
|
||||
cur = conn.execute(
|
||||
"SELECT COUNT(*) FROM processed WHERE processed_at LIKE ?",
|
||||
(f"{today}%",),
|
||||
)
|
||||
total_today = cur.fetchone()[0]
|
||||
|
||||
# Sender cache stats
|
||||
cur = conn.execute("SELECT COUNT(*) FROM sender_cache")
|
||||
cache_size = cur.fetchone()[0]
|
||||
|
||||
cur = conn.execute(
|
||||
"SELECT category, COUNT(*) as cnt FROM sender_cache GROUP BY category"
|
||||
)
|
||||
cache_by_category = {row["category"]: row["cnt"] for row in cur}
|
||||
|
||||
conn.close()
|
||||
return {
|
||||
"name": name,
|
||||
"exists": True,
|
||||
"today_total": total_today,
|
||||
"today_categories": categories,
|
||||
"sender_cache_size": cache_size,
|
||||
"sender_cache_categories": cache_by_category,
|
||||
}
|
||||
except Exception as e:
|
||||
return {"name": name, "exists": True, "error": str(e)}
|
||||
|
||||
|
||||
@router.get("/automations/email")
|
||||
def email_status():
|
||||
"""Email organizer status for all 3 accounts."""
|
||||
accounts = [
|
||||
_query_email_db(GMAIL_DB, "gmail"),
|
||||
_query_email_db(DVISH_DB, "dvish"),
|
||||
_query_email_db(PROTON_DB, "proton"),
|
||||
]
|
||||
return {"accounts": accounts}
|
||||
|
||||
|
||||
@router.get("/automations/restarts")
|
||||
def restart_status():
|
||||
"""Recent unhealthy container tracking entries."""
|
||||
if not RESTART_DB.exists():
|
||||
return {"entries": [], "count": 0}
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(f"file:{RESTART_DB}?mode=ro", uri=True)
|
||||
conn.row_factory = sqlite3.Row
|
||||
cur = conn.execute(
|
||||
"SELECT * FROM unhealthy_tracking ORDER BY last_checked DESC LIMIT 50"
|
||||
)
|
||||
entries = [dict(row) for row in cur]
|
||||
conn.close()
|
||||
return {"entries": entries, "count": len(entries)}
|
||||
except Exception as e:
|
||||
return {"entries": [], "count": 0, "error": str(e)}
|
||||
|
||||
|
||||
@router.get("/automations/backup")
|
||||
def backup_status():
|
||||
"""Parse today's backup log for status."""
|
||||
log_file = LOG_DIR / "gmail-backup-daily.log"
|
||||
if not log_file.exists():
|
||||
return {"status": "no_log", "entries": []}
|
||||
|
||||
today = date.today().isoformat()
|
||||
entries = []
|
||||
has_error = False
|
||||
|
||||
try:
|
||||
with open(log_file, "r", errors="replace") as f:
|
||||
for line in f:
|
||||
if today in line:
|
||||
entries.append(line.strip())
|
||||
if "ERROR" in line.upper():
|
||||
has_error = True
|
||||
except OSError:
|
||||
return {"status": "read_error", "entries": []}
|
||||
|
||||
return {
|
||||
"status": "error" if has_error else ("ok" if entries else "no_entries_today"),
|
||||
"entries": entries[-20:], # Last 20 today entries
|
||||
"has_errors": has_error,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/automations/drift")
|
||||
def drift_status():
|
||||
"""Parse config-drift.log for last result."""
|
||||
log_file = LOG_DIR / "config-drift.log"
|
||||
if not log_file.exists():
|
||||
return {"status": "no_log", "last_result": None}
|
||||
|
||||
try:
|
||||
with open(log_file, "r", errors="replace") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
# Find the last meaningful result
|
||||
for line in reversed(lines):
|
||||
line = line.strip()
|
||||
if "No drifts found" in line:
|
||||
return {"status": "clean", "last_result": "No drifts found", "drifts": 0}
|
||||
if "drift" in line.lower():
|
||||
# Try to extract count
|
||||
import re
|
||||
m = re.search(r"(\d+)\s+drifts?", line)
|
||||
count = int(m.group(1)) if m else -1
|
||||
return {"status": "drifted", "last_result": line, "drifts": count}
|
||||
|
||||
return {"status": "unknown", "last_result": lines[-1].strip() if lines else None}
|
||||
except OSError:
|
||||
return {"status": "read_error", "last_result": None}
|
||||
63
dashboard/api/routers/containers.py
Normal file
63
dashboard/api/routers/containers.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Container listing, logs, and management."""
|
||||
|
||||
from fastapi import APIRouter, Query, HTTPException
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from lib_bridge import (
|
||||
portainer_list_containers,
|
||||
portainer_get_container_logs,
|
||||
portainer_restart_container,
|
||||
ENDPOINTS,
|
||||
)
|
||||
|
||||
router = APIRouter(tags=["containers"])
|
||||
|
||||
|
||||
@router.get("/containers")
|
||||
def list_containers(endpoint: str | None = None):
|
||||
"""List all containers across endpoints, optional endpoint filter."""
|
||||
targets = [endpoint] if endpoint and endpoint in ENDPOINTS else list(ENDPOINTS)
|
||||
results = []
|
||||
for ep in targets:
|
||||
try:
|
||||
containers = portainer_list_containers(ep)
|
||||
for c in containers:
|
||||
names = c.get("Names", [])
|
||||
name = names[0].lstrip("/") if names else c.get("Id", "")[:12]
|
||||
results.append({
|
||||
"id": c.get("Id", "")[:12],
|
||||
"name": name,
|
||||
"image": c.get("Image", ""),
|
||||
"state": c.get("State", ""),
|
||||
"status": c.get("Status", ""),
|
||||
"endpoint": ep,
|
||||
})
|
||||
except Exception as e:
|
||||
results.append({"endpoint": ep, "error": str(e)})
|
||||
return results
|
||||
|
||||
|
||||
@router.get("/containers/{container_id}/logs")
|
||||
def container_logs(container_id: str, endpoint: str = Query(...)):
|
||||
"""Get container logs. Requires endpoint query param."""
|
||||
if endpoint not in ENDPOINTS:
|
||||
raise HTTPException(400, f"Unknown endpoint: {endpoint}")
|
||||
try:
|
||||
logs = portainer_get_container_logs(endpoint, container_id)
|
||||
return {"container_id": container_id, "endpoint": endpoint, "logs": logs}
|
||||
except Exception as e:
|
||||
raise HTTPException(502, f"Failed to get logs: {e}")
|
||||
|
||||
|
||||
@router.post("/containers/{container_id}/restart")
|
||||
def restart_container(container_id: str, endpoint: str = Query(...)):
|
||||
"""Restart a container. Requires endpoint query param."""
|
||||
if endpoint not in ENDPOINTS:
|
||||
raise HTTPException(400, f"Unknown endpoint: {endpoint}")
|
||||
success = portainer_restart_container(endpoint, container_id)
|
||||
if not success:
|
||||
raise HTTPException(502, "Restart failed")
|
||||
return {"status": "restarted", "container_id": container_id, "endpoint": endpoint}
|
||||
64
dashboard/api/routers/expenses.py
Normal file
64
dashboard/api/routers/expenses.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Expenses CSV reader and summary."""
|
||||
|
||||
import csv
|
||||
from collections import defaultdict
|
||||
from fastapi import APIRouter, Query
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from lib_bridge import EXPENSES_CSV
|
||||
|
||||
router = APIRouter(tags=["expenses"])
|
||||
|
||||
|
||||
def _read_expenses() -> list[dict]:
|
||||
"""Read all expenses from CSV."""
|
||||
if not EXPENSES_CSV.exists():
|
||||
return []
|
||||
with open(EXPENSES_CSV, "r", newline="") as f:
|
||||
return list(csv.DictReader(f))
|
||||
|
||||
|
||||
@router.get("/expenses")
|
||||
def list_expenses(month: str | None = Query(None, description="Filter by YYYY-MM")):
|
||||
"""List expenses, optionally filtered by month."""
|
||||
expenses = _read_expenses()
|
||||
if month:
|
||||
expenses = [e for e in expenses if e.get("date", "").startswith(month)]
|
||||
return expenses
|
||||
|
||||
|
||||
@router.get("/expenses/summary")
|
||||
def expenses_summary(month: str | None = Query(None, description="Filter by YYYY-MM")):
|
||||
"""Monthly total, count, top 10 vendors by amount."""
|
||||
from datetime import date
|
||||
if not month:
|
||||
month = date.today().strftime("%Y-%m")
|
||||
expenses = _read_expenses()
|
||||
all_time_count = len(expenses)
|
||||
expenses = [e for e in expenses if e.get("date", "").startswith(month)]
|
||||
|
||||
if not expenses:
|
||||
return {"total": 0, "count": 0, "all_time": all_time_count, "top_vendors": [], "month": month}
|
||||
|
||||
total = 0.0
|
||||
vendor_totals = defaultdict(float)
|
||||
for e in expenses:
|
||||
try:
|
||||
amount = float(e.get("amount", 0))
|
||||
except (ValueError, TypeError):
|
||||
amount = 0.0
|
||||
total += amount
|
||||
vendor = e.get("vendor", "unknown")
|
||||
vendor_totals[vendor] += amount
|
||||
|
||||
top_vendors = sorted(vendor_totals.items(), key=lambda x: x[1], reverse=True)[:10]
|
||||
|
||||
return {
|
||||
"total": round(total, 2),
|
||||
"count": len(expenses),
|
||||
"top_vendors": [{"vendor": v, "amount": round(a, 2)} for v, a in top_vendors],
|
||||
"month": month,
|
||||
}
|
||||
56
dashboard/api/routers/kuma.py
Normal file
56
dashboard/api/routers/kuma.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""Uptime Kuma monitor status via SSH+sqlite3."""
|
||||
|
||||
import subprocess
|
||||
from fastapi import APIRouter
|
||||
|
||||
router = APIRouter(tags=["kuma"])
|
||||
|
||||
KUMA_HOST = "pi-5"
|
||||
KUMA_CONTAINER = "uptime-kuma"
|
||||
|
||||
|
||||
def _kuma_query(sql: str) -> str:
|
||||
"""Run a sqlite3 query against Uptime Kuma's database via SSH."""
|
||||
result = subprocess.run(
|
||||
["ssh", "-o", "ConnectTimeout=3", KUMA_HOST,
|
||||
f'docker exec {KUMA_CONTAINER} sqlite3 /app/data/kuma.db "{sql}"'],
|
||||
capture_output=True, text=True, timeout=15)
|
||||
if result.returncode != 0:
|
||||
raise RuntimeError(result.stderr.strip())
|
||||
return result.stdout.strip()
|
||||
|
||||
|
||||
@router.get("/kuma/monitors")
|
||||
def kuma_monitors():
|
||||
"""List all Uptime Kuma monitors with status."""
|
||||
try:
|
||||
rows = _kuma_query(
|
||||
"SELECT m.id, m.name, m.type, m.active, m.url, m.hostname, m.parent, "
|
||||
"COALESCE((SELECT h.status FROM heartbeat h WHERE h.monitor_id=m.id "
|
||||
"ORDER BY h.time DESC LIMIT 1), -1) as last_status "
|
||||
"FROM monitor m ORDER BY m.parent, m.name"
|
||||
)
|
||||
if not rows:
|
||||
return {"monitors": [], "total": 0, "up": 0, "down": 0}
|
||||
|
||||
monitors = []
|
||||
for row in rows.splitlines():
|
||||
parts = row.split("|")
|
||||
if len(parts) < 8:
|
||||
continue
|
||||
mid, name, mtype, active, url, hostname, parent, status = parts[:8]
|
||||
monitors.append({
|
||||
"id": int(mid),
|
||||
"name": name,
|
||||
"type": mtype,
|
||||
"active": active == "1",
|
||||
"url": url or hostname or "",
|
||||
"parent": int(parent) if parent and parent != "" else None,
|
||||
"status": int(status), # 1=up, 0=down, -1=unknown
|
||||
})
|
||||
|
||||
up = sum(1 for m in monitors if m["status"] == 1 and m["active"])
|
||||
down = sum(1 for m in monitors if m["status"] == 0 and m["active"])
|
||||
return {"monitors": monitors, "total": len(monitors), "up": up, "down": down}
|
||||
except Exception as e:
|
||||
return {"monitors": [], "error": str(e)}
|
||||
59
dashboard/api/routers/logs.py
Normal file
59
dashboard/api/routers/logs.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""Unified log viewer routes."""
|
||||
|
||||
from fastapi import APIRouter, Query
|
||||
from pathlib import Path
|
||||
|
||||
router = APIRouter(tags=["logs"])
|
||||
|
||||
LOG_DIR = Path("/app/logs") if Path("/app/logs").exists() else Path("/tmp")
|
||||
|
||||
LOG_FILES = {
|
||||
"stack-restart": "stack-restart.log",
|
||||
"backup": "backup-validator.log",
|
||||
"gmail-lz": "gmail-organizer.log",
|
||||
"gmail-dvish": "gmail-organizer-dvish.log",
|
||||
"proton": "proton-organizer.log",
|
||||
"receipt": "receipt-tracker.log",
|
||||
"drift": "config-drift.log",
|
||||
"digest": "email-digest.log",
|
||||
"disk": "disk-predictor.log",
|
||||
"changelog": "changelog-generator.log",
|
||||
"subscription": "subscription-auditor.log",
|
||||
"pr-review": "pr-reviewer.log",
|
||||
}
|
||||
|
||||
|
||||
@router.get("/logs")
|
||||
def list_logs():
|
||||
"""List available log files with sizes."""
|
||||
result = []
|
||||
for name, filename in LOG_FILES.items():
|
||||
path = LOG_DIR / filename
|
||||
if path.exists():
|
||||
stat = path.stat()
|
||||
result.append({
|
||||
"name": name,
|
||||
"filename": filename,
|
||||
"size_bytes": stat.st_size,
|
||||
"modified": stat.st_mtime,
|
||||
})
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/logs/{log_name}")
|
||||
def get_log(log_name: str, tail: int = Query(200, le=2000), search: str = Query(None)):
|
||||
"""Get log file contents."""
|
||||
if log_name not in LOG_FILES:
|
||||
return {"error": f"Unknown log: {log_name}", "lines": []}
|
||||
path = LOG_DIR / LOG_FILES[log_name]
|
||||
if not path.exists():
|
||||
return {"lines": [], "total": 0}
|
||||
|
||||
with open(path) as f:
|
||||
all_lines = f.readlines()
|
||||
|
||||
if search:
|
||||
all_lines = [l for l in all_lines if search.lower() in l.lower()]
|
||||
|
||||
lines = all_lines[-tail:]
|
||||
return {"lines": [l.rstrip() for l in lines], "total": len(all_lines)}
|
||||
365
dashboard/api/routers/media.py
Normal file
365
dashboard/api/routers/media.py
Normal file
@@ -0,0 +1,365 @@
|
||||
"""Jellyfin + Arr suite media endpoints."""
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
from fastapi import APIRouter
|
||||
import httpx
|
||||
|
||||
router = APIRouter(tags=["media"])
|
||||
|
||||
JELLYFIN_API_KEY = "REDACTED_API_KEY" # pragma: allowlist secret
|
||||
JELLYFIN_USER_ID = "308e0dab19ce4a2180a2933d73694514"
|
||||
SONARR_URL = "http://192.168.0.200:8989"
|
||||
SONARR_KEY = "REDACTED_SONARR_API_KEY" # pragma: allowlist secret
|
||||
RADARR_URL = "http://192.168.0.200:7878"
|
||||
RADARR_KEY = "REDACTED_RADARR_API_KEY" # pragma: allowlist secret
|
||||
SABNZBD_URL = "http://192.168.0.200:8080"
|
||||
SABNZBD_KEY = "6ae289de5a4f45f7a0124b43ba9c3dea" # pragma: allowlist secret
|
||||
|
||||
|
||||
def _jellyfin(path: str) -> dict:
|
||||
"""Call Jellyfin API via SSH+kubectl to bypass Olares auth sidecar."""
|
||||
sep = "&" if "?" in path else "?"
|
||||
url = f"http://localhost:8096{path}{sep}api_key={JELLYFIN_API_KEY}"
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["ssh", "-o", "ConnectTimeout=3", "olares",
|
||||
f"kubectl exec -n jellyfin-vishinator deploy/jellyfin -c jellyfin -- curl -s '{url}'"],
|
||||
capture_output=True, text=True, timeout=15,
|
||||
)
|
||||
return json.loads(result.stdout) if result.returncode == 0 else {}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
@router.get("/jellyfin/latest")
|
||||
def jellyfin_latest():
|
||||
"""Get recently added items from Jellyfin."""
|
||||
try:
|
||||
items = _jellyfin(f"/Users/{JELLYFIN_USER_ID}/Items/Latest?Limit=10&Fields=Overview,DateCreated")
|
||||
return [{"name": i.get("Name", "?"), "type": i.get("Type", "?"),
|
||||
"series": i.get("SeriesName"), "date": i.get("DateCreated", "?")[:10],
|
||||
"year": i.get("ProductionYear")} for i in (items if isinstance(items, list) else [])]
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
@router.get("/sonarr/history")
|
||||
def sonarr_history():
|
||||
"""Recent Sonarr grabs/imports."""
|
||||
try:
|
||||
with httpx.Client(timeout=10) as client:
|
||||
r = client.get(f"{SONARR_URL}/api/v3/history",
|
||||
headers={"X-Api-Key": SONARR_KEY},
|
||||
params={"pageSize": 10, "sortKey": "date", "sortDirection": "descending"})
|
||||
r.raise_for_status()
|
||||
records = r.json().get("records", [])
|
||||
return [{"title": rec.get("sourceTitle", "?"), "event": rec.get("eventType", "?"),
|
||||
"date": rec.get("date", "?")[:10],
|
||||
"quality": rec.get("quality", {}).get("quality", {}).get("name", "?")}
|
||||
for rec in records]
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
@router.get("/radarr/history")
|
||||
def radarr_history():
|
||||
"""Recent Radarr grabs/imports."""
|
||||
try:
|
||||
with httpx.Client(timeout=10) as client:
|
||||
r = client.get(f"{RADARR_URL}/api/v3/history",
|
||||
headers={"X-Api-Key": RADARR_KEY},
|
||||
params={"pageSize": 10, "sortKey": "date", "sortDirection": "descending"})
|
||||
r.raise_for_status()
|
||||
records = r.json().get("records", [])
|
||||
return [{"title": rec.get("sourceTitle", "?"), "event": rec.get("eventType", "?"),
|
||||
"date": rec.get("date", "?")[:10],
|
||||
"quality": rec.get("quality", {}).get("quality", {}).get("name", "?")}
|
||||
for rec in records]
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
@router.get("/jellyfin/status")
|
||||
def jellyfin_status():
|
||||
"""Jellyfin server status: version, libraries, sessions."""
|
||||
info = _jellyfin("/System/Info")
|
||||
libraries = _jellyfin("/Library/VirtualFolders")
|
||||
sessions = _jellyfin("/Sessions")
|
||||
|
||||
active = []
|
||||
idle_count = 0
|
||||
if isinstance(sessions, list):
|
||||
for s in sessions:
|
||||
if s.get("NowPlayingItem"):
|
||||
active.append({
|
||||
"user": s.get("UserName", ""),
|
||||
"client": s.get("Client", ""),
|
||||
"device": s.get("DeviceName", ""),
|
||||
"now_playing": s["NowPlayingItem"].get("Name", ""),
|
||||
"type": s["NowPlayingItem"].get("Type", ""),
|
||||
})
|
||||
else:
|
||||
idle_count += 1
|
||||
|
||||
return {
|
||||
"version": info.get("Version", "unknown"),
|
||||
"server_name": info.get("ServerName", "unknown"),
|
||||
"libraries": [{"name": lib.get("Name"), "type": lib.get("CollectionType", "")}
|
||||
for lib in libraries] if isinstance(libraries, list) else [],
|
||||
"active_sessions": active,
|
||||
"idle_sessions": idle_count,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/sonarr/queue")
|
||||
async def sonarr_queue():
|
||||
"""Sonarr download queue."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
f"{SONARR_URL}/api/v3/queue",
|
||||
headers={"X-Api-Key": SONARR_KEY},
|
||||
)
|
||||
return resp.json()
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
@router.get("/radarr/queue")
|
||||
async def radarr_queue():
|
||||
"""Radarr download queue."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
f"{RADARR_URL}/api/v3/queue",
|
||||
headers={"X-Api-Key": RADARR_KEY},
|
||||
)
|
||||
return resp.json()
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
@router.get("/sabnzbd/queue")
|
||||
async def sabnzbd_queue():
|
||||
"""SABnzbd download queue."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
f"{SABNZBD_URL}/api",
|
||||
params={"apikey": SABNZBD_KEY, "output": "json", "mode": "queue"},
|
||||
)
|
||||
return resp.json()
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Prowlarr (indexer manager)
|
||||
# ---------------------------------------------------------------------------
|
||||
PROWLARR_URL = "http://192.168.0.200:9696"
|
||||
PROWLARR_KEY = "58b5963e008243cf8cc4fae5276e68af" # pragma: allowlist secret
|
||||
|
||||
|
||||
@router.get("/prowlarr/stats")
|
||||
async def prowlarr_stats():
|
||||
"""Prowlarr indexer status."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
r = await client.get(
|
||||
f"{PROWLARR_URL}/api/v1/indexer",
|
||||
headers={"X-Api-Key": PROWLARR_KEY},
|
||||
)
|
||||
r.raise_for_status()
|
||||
indexers = r.json()
|
||||
enabled = [i for i in indexers if i.get("enable")]
|
||||
return {
|
||||
"total": len(indexers),
|
||||
"enabled": len(enabled),
|
||||
"indexers": [
|
||||
{"name": i["name"], "protocol": i.get("protocol", "?")}
|
||||
for i in enabled[:10]
|
||||
],
|
||||
}
|
||||
except Exception as e:
|
||||
return {"total": 0, "enabled": 0, "error": str(e)}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Bazarr (subtitles)
|
||||
# ---------------------------------------------------------------------------
|
||||
BAZARR_URL = "http://192.168.0.200:6767"
|
||||
BAZARR_KEY = "REDACTED_BAZARR_API_KEY" # pragma: allowlist secret
|
||||
|
||||
|
||||
@router.get("/bazarr/status")
|
||||
async def bazarr_status():
|
||||
"""Bazarr subtitle status."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
r = await client.get(
|
||||
f"{BAZARR_URL}/api/system/status",
|
||||
headers={"X-Api-Key": BAZARR_KEY},
|
||||
)
|
||||
r.raise_for_status()
|
||||
status = r.json().get("data", r.json())
|
||||
w = await client.get(
|
||||
f"{BAZARR_URL}/api/badges",
|
||||
headers={"X-Api-Key": BAZARR_KEY},
|
||||
)
|
||||
badges = w.json() if w.status_code == 200 else {}
|
||||
return {
|
||||
"version": status.get("bazarr_version", "?"),
|
||||
"sonarr_signalr": badges.get("sonarr_signalr", "?"),
|
||||
"radarr_signalr": badges.get("radarr_signalr", "?"),
|
||||
"wanted_episodes": badges.get("episodes", 0),
|
||||
"wanted_movies": badges.get("movies", 0),
|
||||
}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Audiobookshelf
|
||||
# ---------------------------------------------------------------------------
|
||||
ABS_URL = "http://192.168.0.200:13378"
|
||||
ABS_TOKEN = "REDACTED_TOKEN" # pragma: allowlist secret
|
||||
|
||||
|
||||
@router.get("/audiobookshelf/stats")
|
||||
async def audiobookshelf_stats():
|
||||
"""Audiobookshelf library stats."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
libs = await client.get(
|
||||
f"{ABS_URL}/api/libraries",
|
||||
headers={"Authorization": f"Bearer {ABS_TOKEN}"},
|
||||
)
|
||||
libs.raise_for_status()
|
||||
libraries = libs.json().get("libraries", [])
|
||||
result = []
|
||||
for lib in libraries:
|
||||
result.append({
|
||||
"name": lib.get("name", "?"),
|
||||
"type": lib.get("mediaType", "?"),
|
||||
"items": lib.get("stats", {}).get("totalItems", 0),
|
||||
})
|
||||
return {"libraries": result, "total": sum(l["items"] for l in result)}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Plex
|
||||
# ---------------------------------------------------------------------------
|
||||
PLEX_TOKEN = "REDACTED_TOKEN" # pragma: allowlist secret
|
||||
PLEX_SERVERS = {
|
||||
"Calypso": "http://192.168.0.250:32400",
|
||||
"Atlantis": "http://192.168.0.200:32400",
|
||||
}
|
||||
|
||||
|
||||
@router.get("/plex/status")
|
||||
def plex_status():
|
||||
"""Get Plex server status and active sessions."""
|
||||
import xml.etree.ElementTree as ET
|
||||
results = []
|
||||
for name, url in PLEX_SERVERS.items():
|
||||
try:
|
||||
with httpx.Client(timeout=5) as client:
|
||||
# Get sessions
|
||||
r = client.get(f"{url}/status/sessions", headers={"X-Plex-Token": PLEX_TOKEN})
|
||||
r.raise_for_status()
|
||||
root = ET.fromstring(r.text)
|
||||
sessions = []
|
||||
for v in root.iter("Video"):
|
||||
session = {
|
||||
"title": v.get("title", "?"),
|
||||
"type": v.get("type", "?"),
|
||||
"year": v.get("year"),
|
||||
}
|
||||
for p in v.iter("Player"):
|
||||
session["player"] = p.get("product", "?")
|
||||
session["device"] = p.get("device", "?")
|
||||
session["state"] = p.get("state", "?")
|
||||
session["local"] = p.get("local") == "1"
|
||||
for s in v.iter("Session"):
|
||||
session["bandwidth"] = s.get("bandwidth")
|
||||
session["location"] = s.get("location")
|
||||
for t in v.iter("REDACTED_APP_PASSWORD"):
|
||||
session["transcode"] = True
|
||||
session["video_decision"] = t.get("videoDecision")
|
||||
sessions.append(session)
|
||||
|
||||
# Get library counts
|
||||
lr = client.get(f"{url}/library/sections", headers={"X-Plex-Token": PLEX_TOKEN})
|
||||
libraries = []
|
||||
if lr.status_code == 200:
|
||||
lroot = ET.fromstring(lr.text)
|
||||
for d in lroot.iter("Directory"):
|
||||
libraries.append({
|
||||
"title": d.get("title", "?"),
|
||||
"type": d.get("type", "?"),
|
||||
})
|
||||
|
||||
results.append({
|
||||
"name": name,
|
||||
"url": url,
|
||||
"online": True,
|
||||
"sessions": sessions,
|
||||
"libraries": libraries,
|
||||
})
|
||||
except Exception as e:
|
||||
results.append({"name": name, "url": url, "online": False, "error": str(e), "sessions": [], "libraries": []})
|
||||
|
||||
return {"servers": results}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Deluge (torrent client)
|
||||
# ---------------------------------------------------------------------------
|
||||
DELUGE_URL = "http://192.168.0.200:8112"
|
||||
|
||||
|
||||
@router.get("/deluge/status")
|
||||
async def deluge_status():
|
||||
"""Deluge torrent client status."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
login = await client.post(
|
||||
f"{DELUGE_URL}/json",
|
||||
json={"method": "auth.login", "params": ["deluge"], "id": 1},
|
||||
)
|
||||
if login.status_code != 200:
|
||||
return {"available": False}
|
||||
stats = await client.post(
|
||||
f"{DELUGE_URL}/json",
|
||||
json={
|
||||
"method": "web.update_ui",
|
||||
"params": [
|
||||
["name", "state", "progress", "download_payload_rate",
|
||||
"upload_payload_rate"],
|
||||
{},
|
||||
],
|
||||
"id": 2,
|
||||
},
|
||||
)
|
||||
data = stats.json().get("result", {})
|
||||
torrents = data.get("torrents", {})
|
||||
active = [
|
||||
t for t in torrents.values()
|
||||
if t.get("state") in ("Downloading", "Seeding")
|
||||
]
|
||||
return {
|
||||
"available": True,
|
||||
"total": len(torrents),
|
||||
"active": len(active),
|
||||
"downloading": len(
|
||||
[t for t in torrents.values() if t.get("state") == "Downloading"]
|
||||
),
|
||||
"seeding": len(
|
||||
[t for t in torrents.values() if t.get("state") == "Seeding"]
|
||||
),
|
||||
}
|
||||
except Exception as e:
|
||||
return {"available": False, "error": str(e)}
|
||||
199
dashboard/api/routers/network.py
Normal file
199
dashboard/api/routers/network.py
Normal file
@@ -0,0 +1,199 @@
|
||||
"""Network / Headscale / AdGuard routes."""
|
||||
|
||||
from fastapi import APIRouter
|
||||
import subprocess
|
||||
import json
|
||||
import httpx
|
||||
|
||||
router = APIRouter(tags=["network"])
|
||||
|
||||
CLOUDFLARE_TOKEN = "REDACTED_TOKEN" # pragma: allowlist secret
|
||||
CLOUDFLARE_ZONE_ID = "4dbd15d096d71101b7c0c6362b307a66"
|
||||
AUTHENTIK_URL = "https://sso.vish.gg"
|
||||
AUTHENTIK_TOKEN = "REDACTED_TOKEN" # pragma: allowlist secret
|
||||
GITEA_URL = "https://git.vish.gg"
|
||||
GITEA_TOKEN = "REDACTED_TOKEN" # pragma: allowlist secret
|
||||
|
||||
ADGUARD_URL = "http://192.168.0.250:9080"
|
||||
ADGUARD_USER = "vish"
|
||||
ADGUARD_PASS = "REDACTED_PASSWORD"
|
||||
|
||||
|
||||
def _adguard_get(path):
|
||||
with httpx.Client(timeout=10) as client:
|
||||
client.post(f"{ADGUARD_URL}/control/login", json={"name": ADGUARD_USER, "password": ADGUARD_PASS})
|
||||
r = client.get(f"{ADGUARD_URL}/control{path}")
|
||||
r.raise_for_status()
|
||||
return r.json() if r.content else {}
|
||||
|
||||
|
||||
def _parse_headscale_time(val) -> str:
|
||||
"""Convert headscale timestamp (protobuf or string) to ISO format."""
|
||||
if not val:
|
||||
return ""
|
||||
if isinstance(val, dict) and "seconds" in val:
|
||||
from datetime import datetime, timezone
|
||||
return datetime.fromtimestamp(val["seconds"], tz=timezone.utc).isoformat()
|
||||
if isinstance(val, str):
|
||||
return val[:19]
|
||||
return ""
|
||||
|
||||
|
||||
@router.get("/network/headscale")
|
||||
def headscale_nodes():
|
||||
"""List Headscale nodes."""
|
||||
result = subprocess.run(
|
||||
["ssh", "-o", "ConnectTimeout=3", "calypso",
|
||||
"sudo /usr/local/bin/docker exec headscale headscale nodes list -o json"],
|
||||
capture_output=True, text=True, timeout=15,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return {"nodes": [], "error": result.stderr.strip()}
|
||||
try:
|
||||
nodes = json.loads(result.stdout)
|
||||
except json.JSONDecodeError:
|
||||
return {"nodes": [], "error": "Invalid JSON from headscale"}
|
||||
online_count = sum(1 for n in nodes if n.get("online"))
|
||||
return {
|
||||
"nodes": [
|
||||
{"name": n.get("given_name") or n.get("givenName") or n.get("name", "?"),
|
||||
"ip": (n.get("ip_addresses") or n.get("ipAddresses") or ["?"])[0],
|
||||
"online": n.get("online", False),
|
||||
"last_seen": _parse_headscale_time(n.get("last_seen") or n.get("lastSeen"))}
|
||||
for n in nodes
|
||||
],
|
||||
"total": len(nodes),
|
||||
"online": online_count,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/network/adguard")
|
||||
def adguard_stats():
|
||||
"""Get AdGuard DNS stats."""
|
||||
try:
|
||||
stats = _adguard_get("/stats")
|
||||
return {
|
||||
"total_queries": stats.get("num_dns_queries", 0),
|
||||
"blocked": stats.get("num_blocked_filtering", 0),
|
||||
"avg_time": stats.get("avg_processing_time", 0),
|
||||
}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
@router.get("/network/adguard/rewrites")
|
||||
def adguard_rewrites():
|
||||
"""List AdGuard DNS rewrites."""
|
||||
try:
|
||||
data = _adguard_get("/rewrite/list")
|
||||
return [{"domain": r.get("domain", ""), "answer": r.get("answer", "")} for r in (data or [])]
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
@router.get("/network/cloudflare")
|
||||
def cloudflare_stats():
|
||||
"""Cloudflare DNS record summary."""
|
||||
try:
|
||||
with httpx.Client(timeout=10) as client:
|
||||
r = client.get(f"https://api.cloudflare.com/client/v4/zones/{CLOUDFLARE_ZONE_ID}/dns_records",
|
||||
headers={"Authorization": f"Bearer {CLOUDFLARE_TOKEN}"},
|
||||
params={"per_page": 100})
|
||||
r.raise_for_status()
|
||||
records = r.json().get("result", [])
|
||||
proxied = sum(1 for rec in records if rec.get("proxied"))
|
||||
types = {}
|
||||
for rec in records:
|
||||
t = rec.get("type", "?")
|
||||
types[t] = types.get(t, 0) + 1
|
||||
return {"total": len(records), "proxied": proxied, "dns_only": len(records) - proxied, "types": types}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
@router.get("/network/authentik")
|
||||
def authentik_info():
|
||||
"""Authentik users, sessions, and recent events."""
|
||||
try:
|
||||
with httpx.Client(timeout=10, verify=False) as client:
|
||||
headers = {"Authorization": f"Bearer {AUTHENTIK_TOKEN}"}
|
||||
|
||||
# Users
|
||||
ur = client.get(f"{AUTHENTIK_URL}/api/v3/core/users/", headers=headers, params={"page_size": 20})
|
||||
users = []
|
||||
if ur.status_code == 200:
|
||||
for u in ur.json().get("results", []):
|
||||
if u.get("username", "").startswith("ak-"):
|
||||
continue # Skip service accounts
|
||||
users.append({
|
||||
"username": u.get("username", "?"),
|
||||
"last_login": u.get("last_login", "")[:19] if u.get("last_login") else "never",
|
||||
"active": u.get("is_active", False),
|
||||
})
|
||||
|
||||
# Sessions
|
||||
sr = client.get(f"{AUTHENTIK_URL}/api/v3/core/authenticated_sessions/", headers=headers)
|
||||
session_count = sr.json().get("pagination", {}).get("count", 0) if sr.status_code == 200 else 0
|
||||
|
||||
# Recent events (skip noisy secret_rotate)
|
||||
er = client.get(f"{AUTHENTIK_URL}/api/v3/events/events/", headers=headers,
|
||||
params={"page_size": 20, "ordering": "-created"})
|
||||
events = []
|
||||
if er.status_code == 200:
|
||||
for e in er.json().get("results", []):
|
||||
action = e.get("action", "?")
|
||||
if action in ("secret_rotate",):
|
||||
continue
|
||||
user = e.get("user", {}).get("username") or e.get("context", {}).get("username", "system")
|
||||
events.append({
|
||||
"action": action,
|
||||
"user": user,
|
||||
"created": e.get("created", "?")[:19],
|
||||
})
|
||||
if len(events) >= 5:
|
||||
break
|
||||
|
||||
return {
|
||||
"users": users,
|
||||
"active_sessions": session_count,
|
||||
"recent_events": events,
|
||||
}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
@router.get("/network/gitea")
|
||||
def gitea_activity():
|
||||
"""Recent Gitea commits and open PRs."""
|
||||
try:
|
||||
with httpx.Client(timeout=10) as client:
|
||||
# Recent commits
|
||||
cr = client.get(f"{GITEA_URL}/api/v1/repos/vish/homelab/commits",
|
||||
headers={"Authorization": f"token {GITEA_TOKEN}"},
|
||||
params={"limit": 5, "sha": "main"})
|
||||
commits = []
|
||||
if cr.status_code == 200:
|
||||
for c in cr.json()[:5]:
|
||||
commits.append({
|
||||
"sha": c.get("sha", "?")[:7],
|
||||
"message": c.get("commit", {}).get("message", "?").split("\n")[0][:80],
|
||||
"date": c.get("commit", {}).get("committer", {}).get("date", "?")[:10],
|
||||
"author": c.get("commit", {}).get("author", {}).get("name", "?"),
|
||||
})
|
||||
|
||||
# Open PRs
|
||||
pr = client.get(f"{GITEA_URL}/api/v1/repos/vish/homelab/pulls",
|
||||
headers={"Authorization": f"token {GITEA_TOKEN}"},
|
||||
params={"state": "open", "limit": 5})
|
||||
prs = []
|
||||
if pr.status_code == 200:
|
||||
for p in pr.json():
|
||||
prs.append({
|
||||
"number": p.get("number"),
|
||||
"title": p.get("title", "?"),
|
||||
"user": p.get("user", {}).get("login", "?"),
|
||||
})
|
||||
|
||||
return {"commits": commits, "open_prs": prs}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
93
dashboard/api/routers/olares.py
Normal file
93
dashboard/api/routers/olares.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Olares K3s pod listing and GPU status."""
|
||||
|
||||
import subprocess
|
||||
from fastapi import APIRouter, Query
|
||||
|
||||
router = APIRouter(tags=["olares"])
|
||||
|
||||
|
||||
def _ssh_olares(cmd: str, timeout: int = 10) -> str:
|
||||
"""Run a command on olares via SSH."""
|
||||
result = subprocess.run(
|
||||
["ssh", "-o", "ConnectTimeout=3", "olares", cmd],
|
||||
capture_output=True, text=True, timeout=timeout,
|
||||
)
|
||||
return result.stdout if result.returncode == 0 else ""
|
||||
|
||||
|
||||
@router.get("/olares/pods")
|
||||
def olares_pods(namespace: str | None = Query(None)):
|
||||
"""List K3s pods on olares."""
|
||||
if namespace:
|
||||
cmd = f"kubectl get pods -n {namespace} -o wide --no-headers"
|
||||
else:
|
||||
cmd = "kubectl get pods -A -o wide --no-headers"
|
||||
|
||||
output = _ssh_olares(cmd, timeout=15)
|
||||
if not output:
|
||||
return []
|
||||
|
||||
pods = []
|
||||
for line in output.strip().split("\n"):
|
||||
parts = line.split()
|
||||
if not parts:
|
||||
continue
|
||||
if namespace:
|
||||
# No namespace column when -n is used
|
||||
if len(parts) >= 7:
|
||||
pods.append({
|
||||
"namespace": namespace,
|
||||
"name": parts[0],
|
||||
"ready": parts[1],
|
||||
"status": parts[2],
|
||||
"restarts": parts[3],
|
||||
"age": parts[4],
|
||||
"ip": parts[5] if len(parts) > 5 else "",
|
||||
"node": parts[6] if len(parts) > 6 else "",
|
||||
})
|
||||
else:
|
||||
# Has namespace column
|
||||
if len(parts) >= 8:
|
||||
pods.append({
|
||||
"namespace": parts[0],
|
||||
"name": parts[1],
|
||||
"ready": parts[2],
|
||||
"status": parts[3],
|
||||
"restarts": parts[4],
|
||||
"age": parts[5],
|
||||
"ip": parts[6] if len(parts) > 6 else "",
|
||||
"node": parts[7] if len(parts) > 7 else "",
|
||||
})
|
||||
return pods
|
||||
|
||||
|
||||
@router.get("/olares/gpu")
|
||||
def olares_gpu():
|
||||
"""GPU status from olares."""
|
||||
output = _ssh_olares(
|
||||
"nvidia-smi --query-gpu=name,temperature.gpu,power.draw,power.limit,"
|
||||
"memory.used,memory.total,utilization.gpu --format=csv,noheader,nounits"
|
||||
)
|
||||
if not output:
|
||||
return {"available": False}
|
||||
|
||||
parts = [p.strip() for p in output.strip().split(",")]
|
||||
|
||||
def _float(val: str) -> float | None:
|
||||
try:
|
||||
return float(val)
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
if len(parts) >= 7:
|
||||
return {
|
||||
"available": True,
|
||||
"name": parts[0],
|
||||
"temp_c": _float(parts[1]),
|
||||
"power_draw_w": _float(parts[2]),
|
||||
"power_limit_w": _float(parts[3]),
|
||||
"memory_used_mb": _float(parts[4]),
|
||||
"memory_total_mb": _float(parts[5]),
|
||||
"utilization_pct": _float(parts[6]),
|
||||
}
|
||||
return {"available": False}
|
||||
628
dashboard/api/routers/overview.py
Normal file
628
dashboard/api/routers/overview.py
Normal file
@@ -0,0 +1,628 @@
|
||||
"""Overview stats and SSE activity stream."""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sqlite3
|
||||
from datetime import date, datetime, timezone
|
||||
from fastapi import APIRouter
|
||||
from sse_starlette.sse import EventSourceResponse
|
||||
import httpx
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from lib_bridge import (
|
||||
portainer_list_containers, ENDPOINTS, ollama_available,
|
||||
GMAIL_DB, DVISH_DB, PROTON_DB, RESTART_DB, LOG_DIR, OLLAMA_URL,
|
||||
prom_query,
|
||||
)
|
||||
from log_parser import get_recent_events, tail_logs, get_new_lines
|
||||
|
||||
router = APIRouter(tags=["overview"])
|
||||
|
||||
|
||||
def _count_today_emails(db_path: Path) -> int:
|
||||
"""Count emails processed today from a processed.db file."""
|
||||
if not db_path.exists():
|
||||
return 0
|
||||
try:
|
||||
today = date.today().isoformat()
|
||||
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
|
||||
cur = conn.execute(
|
||||
"SELECT COUNT(*) FROM processed WHERE processed_at LIKE ?",
|
||||
(f"{today}%",),
|
||||
)
|
||||
count = cur.fetchone()[0]
|
||||
conn.close()
|
||||
return count
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
|
||||
def _count_unhealthy(db_path: Path) -> int:
|
||||
"""Count unhealthy containers from stack-restart.db."""
|
||||
if not db_path.exists():
|
||||
return 0
|
||||
try:
|
||||
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
|
||||
cur = conn.execute("SELECT COUNT(*) FROM unhealthy_tracking")
|
||||
count = cur.fetchone()[0]
|
||||
conn.close()
|
||||
return count
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
|
||||
def _gpu_info() -> dict:
|
||||
"""Get GPU info from olares via SSH."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["ssh", "-o", "ConnectTimeout=3", "olares",
|
||||
"nvidia-smi --query-gpu=temperature.gpu,power.draw,power.limit,"
|
||||
"memory.used,memory.total,utilization.gpu --format=csv,noheader,nounits"],
|
||||
capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return {"available": False}
|
||||
parts = [p.strip() for p in result.stdout.strip().split(",")]
|
||||
|
||||
def _f(v):
|
||||
try:
|
||||
return float(v)
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
if len(parts) >= 6:
|
||||
return {
|
||||
"available": True,
|
||||
"temp_c": _f(parts[0]),
|
||||
"power_draw_w": _f(parts[1]),
|
||||
"power_limit_w": _f(parts[2]),
|
||||
"memory_used_mb": _f(parts[3]),
|
||||
"memory_total_mb": _f(parts[4]),
|
||||
"utilization_pct": _f(parts[5]),
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
return {"available": False}
|
||||
|
||||
|
||||
@router.get("/stats/overview")
|
||||
def stats_overview():
|
||||
"""Aggregate overview stats."""
|
||||
# Container counts
|
||||
container_counts = {}
|
||||
total = 0
|
||||
for ep_name in ENDPOINTS:
|
||||
try:
|
||||
containers = portainer_list_containers(ep_name)
|
||||
running = sum(1 for c in containers if c.get("State") == "running")
|
||||
container_counts[ep_name] = {"total": len(containers), "running": running}
|
||||
total += len(containers)
|
||||
except Exception:
|
||||
container_counts[ep_name] = {"total": 0, "running": 0, "error": True}
|
||||
|
||||
# GPU
|
||||
gpu = _gpu_info()
|
||||
|
||||
# Email counts
|
||||
email_today = {
|
||||
"gmail": _count_today_emails(GMAIL_DB),
|
||||
"dvish": _count_today_emails(DVISH_DB),
|
||||
"proton": _count_today_emails(PROTON_DB),
|
||||
}
|
||||
email_today["total"] = sum(email_today.values())
|
||||
|
||||
# Unhealthy
|
||||
unhealthy = _count_unhealthy(RESTART_DB)
|
||||
|
||||
# Ollama
|
||||
ollama_up = ollama_available(OLLAMA_URL)
|
||||
|
||||
return {
|
||||
"containers": {"total": total, "by_endpoint": container_counts},
|
||||
"gpu": gpu,
|
||||
"email_today": email_today,
|
||||
"unhealthy_count": unhealthy,
|
||||
"ollama_available": ollama_up,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/activity")
|
||||
async def activity_stream():
|
||||
"""SSE stream of today's automation events."""
|
||||
|
||||
async def event_generator():
|
||||
# Send initial batch
|
||||
events = get_recent_events(LOG_DIR)
|
||||
yield {"event": "init", "data": json.dumps(events)}
|
||||
|
||||
# Poll for new events
|
||||
positions = tail_logs(LOG_DIR)
|
||||
while True:
|
||||
await asyncio.sleep(5)
|
||||
new_events, positions = get_new_lines(LOG_DIR, positions)
|
||||
if new_events:
|
||||
yield {"event": "update", "data": json.dumps(new_events)}
|
||||
|
||||
return EventSourceResponse(event_generator())
|
||||
|
||||
|
||||
@router.post("/actions/pause-organizers")
|
||||
def pause_organizers():
|
||||
"""Pause all email organizer cron jobs."""
|
||||
result = subprocess.run(
|
||||
["/home/homelab/organized/repos/homelab/scripts/gmail-organizer-ctl.sh", "stop"],
|
||||
capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
return {"success": result.returncode == 0, "output": result.stdout.strip()}
|
||||
|
||||
|
||||
@router.post("/actions/resume-organizers")
|
||||
def resume_organizers():
|
||||
"""Resume all email organizer cron jobs."""
|
||||
result = subprocess.run(
|
||||
["/home/homelab/organized/repos/homelab/scripts/gmail-organizer-ctl.sh", "start"],
|
||||
capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
return {"success": result.returncode == 0, "output": result.stdout.strip()}
|
||||
|
||||
|
||||
@router.get("/actions/organizer-status")
|
||||
def organizer_status():
|
||||
"""Check if organizers are running or paused."""
|
||||
result = subprocess.run(
|
||||
["/home/homelab/organized/repos/homelab/scripts/gmail-organizer-ctl.sh", "status"],
|
||||
capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
return {"output": result.stdout.strip()}
|
||||
|
||||
|
||||
@router.get("/calendar")
|
||||
def get_calendar_events():
|
||||
"""Fetch upcoming events from Baikal CalDAV."""
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
|
||||
BAIKAL_URL = "http://192.168.0.200:12852/dav.php/calendars/vish/default/"
|
||||
BAIKAL_USER = "vish"
|
||||
BAIKAL_PASS = "REDACTED_PASSWORD"
|
||||
|
||||
today = datetime.now(timezone.utc).strftime("%Y%m%dT000000Z")
|
||||
body = f'''<?xml version="1.0" encoding="UTF-8"?>
|
||||
<c:calendar-query xmlns:d="DAV:" xmlns:c="urn:ietf:params:xml:ns:caldav">
|
||||
<d:prop><d:getetag/><c:calendar-data/></d:prop>
|
||||
<c:filter>
|
||||
<c:comp-filter name="VCALENDAR">
|
||||
<c:comp-filter name="VEVENT">
|
||||
<c:time-range start="{today}"/>
|
||||
</c:comp-filter>
|
||||
</c:comp-filter>
|
||||
</c:filter>
|
||||
</c:calendar-query>'''
|
||||
|
||||
try:
|
||||
auth = httpx.DigestAuth(BAIKAL_USER, BAIKAL_PASS)
|
||||
with httpx.Client(timeout=10) as client:
|
||||
r = client.request("REPORT", BAIKAL_URL, content=body,
|
||||
headers={"Content-Type": "application/xml", "Depth": "1"}, auth=auth)
|
||||
r.raise_for_status()
|
||||
|
||||
# Parse iCal events
|
||||
summaries = re.findall(r'SUMMARY:(.*?)(?:\r?\n)', r.text)
|
||||
starts = re.findall(r'DTSTART[^:]*:(.*?)(?:\r?\n)', r.text)
|
||||
locations = re.findall(r'LOCATION:(.*?)(?:\r?\n)', r.text)
|
||||
|
||||
events = []
|
||||
now = datetime.now(timezone.utc)
|
||||
for i, (start, summary) in enumerate(zip(starts, summaries)):
|
||||
# Parse date — handle both date and datetime formats
|
||||
try:
|
||||
if len(start) == 8:
|
||||
dt = datetime.strptime(start, "%Y%m%d").replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
clean = start.replace("Z", "")
|
||||
dt = datetime.strptime(clean[:15], "%Y%m%dT%H%M%S").replace(tzinfo=timezone.utc)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
# Only future events
|
||||
if dt < now:
|
||||
continue
|
||||
|
||||
# Clean up summary (unescape iCal)
|
||||
clean_summary = summary.replace("\\,", ",").replace("\\;", ";").replace("&", "&")
|
||||
|
||||
events.append({
|
||||
"summary": clean_summary,
|
||||
"start": dt.isoformat(),
|
||||
"date": dt.strftime("%b %d"),
|
||||
"time": dt.strftime("%I:%M %p") if len(start) > 8 else "All day",
|
||||
"location": locations[i].replace("\\,", ",").replace("\\n", ", ") if i < len(locations) else None,
|
||||
})
|
||||
|
||||
# Sort by date, limit to next 8
|
||||
events.sort(key=lambda e: e["start"])
|
||||
return {"events": events[:8], "total": len(events)}
|
||||
except Exception as e:
|
||||
return {"events": [], "error": str(e)}
|
||||
|
||||
|
||||
def _search_repo_docs(query: str, max_chars: int = 2000) -> str:
|
||||
"""Search repo docs/scripts for relevant snippets. Lightweight keyword match."""
|
||||
import re
|
||||
repo = Path("/app/scripts").parent if Path("/app/scripts").exists() else Path(__file__).parent.parent.parent.parent
|
||||
search_dirs = [repo / "docs" / "services" / "individual", repo / "scripts", repo / "docs"]
|
||||
|
||||
keywords = [w.lower() for w in re.findall(r'\w{3,}', query) if w.lower() not in {
|
||||
"the", "how", "what", "does", "can", "are", "this", "that", "have",
|
||||
"many", "much", "about", "from", "with", "your", "there", "which",
|
||||
}]
|
||||
if not keywords:
|
||||
return ""
|
||||
# Add aliases so related terms find each other
|
||||
aliases = {"tailscale": "headscale", "headscale": "tailscale", "gpu": "nvidia",
|
||||
"jellyfin": "olares", "containers": "portainer", "dns": "adguard"}
|
||||
extra = [aliases[k] for k in keywords if k in aliases]
|
||||
keywords = list(set(keywords + extra))
|
||||
|
||||
scored = []
|
||||
for search_dir in search_dirs:
|
||||
if not search_dir.exists():
|
||||
continue
|
||||
for f in search_dir.rglob("*.md"):
|
||||
try:
|
||||
text = f.read_text(errors="ignore")[:8000]
|
||||
score = sum(text.lower().count(kw) for kw in keywords)
|
||||
if score > 0:
|
||||
scored.append((score, f, text))
|
||||
except Exception:
|
||||
continue
|
||||
for f in search_dir.rglob("*.py"):
|
||||
if f.name.startswith("__"):
|
||||
continue
|
||||
try:
|
||||
# Only read the docstring/header, not full scripts
|
||||
text = f.read_text(errors="ignore")[:1000]
|
||||
score = sum(text.lower().count(kw) for kw in keywords)
|
||||
if score > 0:
|
||||
scored.append((score, f, text))
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if not scored:
|
||||
return ""
|
||||
|
||||
scored.sort(key=lambda x: -x[0])
|
||||
snippets = []
|
||||
total = 0
|
||||
for _, path, text in scored[:2]: # max 2 files
|
||||
# Trim to relevant section — find paragraphs with keywords
|
||||
lines = text.split("\n")
|
||||
relevant = []
|
||||
for i, line in enumerate(lines):
|
||||
if any(kw in line.lower() for kw in keywords):
|
||||
start = max(0, i - 2)
|
||||
end = min(len(lines), i + 5)
|
||||
relevant.extend(lines[start:end])
|
||||
snippet = "\n".join(dict.fromkeys(relevant))[:1000] # dedup, cap at 1K
|
||||
if not snippet.strip():
|
||||
snippet = text[:500]
|
||||
snippets.append(f"[{path.name}]\n{snippet}")
|
||||
total += len(snippet)
|
||||
if total >= max_chars:
|
||||
break
|
||||
|
||||
return "\n\n".join(snippets)
|
||||
|
||||
|
||||
@router.post("/chat")
|
||||
def chat_with_ollama(body: dict):
|
||||
"""Chat with Ollama using live homelab context + repo docs."""
|
||||
message = body.get("message", "")
|
||||
if not message:
|
||||
return {"error": "No message provided"}
|
||||
|
||||
# Gather live context from multiple sources
|
||||
context_parts = []
|
||||
try:
|
||||
overview = stats_overview()
|
||||
containers = overview.get("containers", {})
|
||||
gpu = overview.get("gpu", {})
|
||||
context_parts.append(
|
||||
f"Containers: {containers.get('total', '?')} total across endpoints: "
|
||||
+ ", ".join(f"{k} ({v.get('total','?')} containers, {v.get('running','?')} running)"
|
||||
for k, v in containers.get("by_endpoint", {}).items())
|
||||
)
|
||||
if gpu.get("available"):
|
||||
context_parts.append(
|
||||
f"GPU: {gpu.get('name','RTX 5090')}, {gpu.get('temp_c','?')}°C, "
|
||||
f"{gpu.get('memory_used_mb','?')}/{gpu.get('memory_total_mb','?')} MB VRAM, "
|
||||
f"{gpu.get('utilization_pct','?')}% util"
|
||||
)
|
||||
email_data = overview.get("email_today", {})
|
||||
if isinstance(email_data, dict):
|
||||
context_parts.append(f"Emails today: {email_data.get('total', 0)} (dvish: {email_data.get('dvish', 0)}, proton: {email_data.get('proton', 0)})")
|
||||
context_parts.append(f"Ollama: {'online' if overview.get('ollama_available') else 'offline'}")
|
||||
context_parts.append(f"Unhealthy containers: {overview.get('unhealthy_count', 0)}")
|
||||
except Exception:
|
||||
context_parts.append("(could not fetch live stats)")
|
||||
|
||||
# Fetch Headscale nodes if question mentions network/tailscale/headscale/nodes
|
||||
msg_lower = message.lower()
|
||||
if any(kw in msg_lower for kw in ["tailscale", "headscale", "node", "mesh", "vpn", "network"]):
|
||||
try:
|
||||
import json as _json
|
||||
hs_result = subprocess.run(
|
||||
["ssh", "-o", "ConnectTimeout=3", "calypso",
|
||||
"/usr/local/bin/docker exec headscale headscale nodes list -o json"],
|
||||
capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
if hs_result.returncode == 0:
|
||||
nodes = _json.loads(hs_result.stdout)
|
||||
online = [n for n in nodes if n.get("online")]
|
||||
node_names = ", ".join(n.get("givenName") or n.get("name", "?") for n in nodes)
|
||||
context_parts.append(f"Headscale/Tailscale: {len(nodes)} nodes ({len(online)} online): {node_names}")
|
||||
else:
|
||||
context_parts.append("Headscale: 26 nodes (could not fetch live list, but documented as 26)")
|
||||
except Exception:
|
||||
context_parts.append("Headscale: 26 nodes (documented, could not fetch live)")
|
||||
|
||||
# Fetch Jellyfin status if question mentions media/jellyfin/streaming
|
||||
if any(kw in msg_lower for kw in ["jellyfin", "media", "stream", "movie", "tv", "playing"]):
|
||||
try:
|
||||
from routers.media import jellyfin_status
|
||||
jf = jellyfin_status()
|
||||
libs = ", ".join(f"{l['name']} ({l['type']})" for l in jf.get("libraries", []))
|
||||
active = jf.get("active_sessions", [])
|
||||
playing = ", ".join(f"{s['title']} by {s['user']}" for s in active) if active else "nothing"
|
||||
context_parts.append(f"Jellyfin v{jf.get('version','?')}: libraries={libs}. Now playing: {playing}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Fetch AdGuard stats if question mentions dns/adguard/blocked
|
||||
if any(kw in msg_lower for kw in ["dns", "adguard", "blocked", "queries", "domain"]):
|
||||
try:
|
||||
from routers.network import adguard_stats
|
||||
ag = adguard_stats()
|
||||
context_parts.append(f"AdGuard DNS: {ag.get('total_queries', '?')} total queries, {ag.get('blocked', '?')} blocked, {ag.get('avg_time', '?')}s avg response")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
system_context = (
|
||||
"You are a homelab assistant. You have direct access to the following live infrastructure data:\n\n"
|
||||
+ "\n".join(f"- {p}" for p in context_parts)
|
||||
+ "\n\n"
|
||||
"Homelab hosts: Atlantis (Synology NAS, media/arr stack), Calypso (Synology, AdGuard DNS, Headscale, Authentik SSO), "
|
||||
"Olares (K3s, RTX 5090, Jellyfin, Ollama), NUC (lightweight services), RPi5 (Uptime Kuma), "
|
||||
"homelab-vm (Prometheus, Grafana, dashboard), Guava (TrueNAS), Seattle (remote VM), matrix-ubuntu (NPM, CrowdSec).\n\n"
|
||||
"Services: Sonarr, Radarr, SABnzbd, Deluge, Prowlarr, Bazarr, Lidarr, Tdarr, Audiobookshelf, LazyLibrarian on Atlantis. "
|
||||
"Jellyfin + Ollama on Olares with GPU transcoding. 3 email auto-organizers (Gmail x2 + Proton). "
|
||||
"11 Ollama-powered automation scripts. Gitea CI with AI PR reviewer.\n\n"
|
||||
"IMPORTANT: Answer using the LIVE DATA above, not general knowledge. The container counts are REAL numbers from Portainer right now. "
|
||||
"When asked 'how many containers on atlantis' answer with the exact number from the live data (e.g. 59). Be concise."
|
||||
)
|
||||
|
||||
# Search repo docs for relevant context (max 2K chars)
|
||||
doc_context = _search_repo_docs(message, max_chars=2000)
|
||||
if doc_context:
|
||||
system_context += f"\n\nRelevant documentation:\n{doc_context}"
|
||||
|
||||
prompt = f"{system_context}\n\nUser: {message}\nAssistant:"
|
||||
|
||||
try:
|
||||
from lib_bridge import ollama_available as _ollama_check
|
||||
if not _ollama_check():
|
||||
return {"response": "Ollama is currently offline. Try again later."}
|
||||
import sys as _sys
|
||||
scripts_dir = str(Path("/app/scripts") if Path("/app/scripts").exists() else Path(__file__).parent.parent.parent / "scripts")
|
||||
if scripts_dir not in _sys.path:
|
||||
_sys.path.insert(0, scripts_dir)
|
||||
from lib.ollama import ollama_generate
|
||||
response = ollama_generate(prompt, num_predict=800, timeout=90)
|
||||
return {"response": response}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Health score
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/health-score")
|
||||
def health_score():
|
||||
"""Calculate aggregate system health score 0-100."""
|
||||
score = 100
|
||||
details = []
|
||||
|
||||
try:
|
||||
overview = stats_overview()
|
||||
containers = overview.get("containers", {})
|
||||
by_ep = containers.get("by_endpoint", {})
|
||||
|
||||
# Container health (40 points)
|
||||
not_running = sum(ep.get("total", 0) - ep.get("running", 0) for ep in by_ep.values())
|
||||
if not_running > 0:
|
||||
penalty = min(40, not_running * 4)
|
||||
score -= penalty
|
||||
details.append(f"-{penalty}: {not_running} containers not running")
|
||||
else:
|
||||
details.append("+40: all containers running")
|
||||
|
||||
# Unhealthy containers (20 points)
|
||||
unhealthy = overview.get("unhealthy_count", 0)
|
||||
if unhealthy > 0:
|
||||
penalty = min(20, unhealthy * 10)
|
||||
score -= penalty
|
||||
details.append(f"-{penalty}: {unhealthy} unhealthy containers")
|
||||
else:
|
||||
details.append("+20: no unhealthy containers")
|
||||
|
||||
# GPU available (10 points)
|
||||
gpu = overview.get("gpu", {})
|
||||
if not gpu.get("available"):
|
||||
score -= 10
|
||||
details.append("-10: GPU unavailable")
|
||||
else:
|
||||
details.append("+10: GPU online")
|
||||
|
||||
# Ollama available (10 points)
|
||||
if not overview.get("ollama_available"):
|
||||
score -= 10
|
||||
details.append("-10: Ollama offline")
|
||||
else:
|
||||
details.append("+10: Ollama online")
|
||||
|
||||
# Backup status (10 points)
|
||||
backup_log = Path("/app/logs" if Path("/app/logs").exists() else "/tmp") / "gmail-backup-daily.log"
|
||||
if backup_log.exists():
|
||||
with open(backup_log) as f:
|
||||
content = f.read()
|
||||
if "ERROR" in content[-2000:]:
|
||||
score -= 10
|
||||
details.append("-10: backup has errors")
|
||||
else:
|
||||
details.append("+10: backup OK")
|
||||
else:
|
||||
score -= 5
|
||||
details.append("-5: no backup log found")
|
||||
|
||||
# Config drift (10 points)
|
||||
drift_log = Path("/app/logs" if Path("/app/logs").exists() else "/tmp") / "config-drift.log"
|
||||
if drift_log.exists():
|
||||
with open(drift_log) as f:
|
||||
lines = f.readlines()
|
||||
last_lines = "".join(lines[-20:])
|
||||
if "drifts" in last_lines.lower() and "no drifts" not in last_lines.lower():
|
||||
score -= 10
|
||||
details.append("-10: config drift detected")
|
||||
else:
|
||||
details.append("+10: no drift")
|
||||
else:
|
||||
details.append("+10: no drift (no log)")
|
||||
except Exception as e:
|
||||
details.append(f"Error calculating: {e}")
|
||||
|
||||
return {
|
||||
"score": max(0, min(100, score)),
|
||||
"grade": "A" if score >= 90 else "B" if score >= 80 else "C" if score >= 70 else "D" if score >= 60 else "F",
|
||||
"details": details,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Quick actions
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.post("/actions/restart-jellyfin")
|
||||
def restart_jellyfin():
|
||||
"""Restart Jellyfin on Olares."""
|
||||
result = subprocess.run(
|
||||
["ssh", "-o", "ConnectTimeout=3", "olares",
|
||||
"kubectl rollout restart deployment/jellyfin -n jellyfin-vishinator"],
|
||||
capture_output=True, text=True, timeout=15)
|
||||
return {"success": result.returncode == 0, "output": result.stdout.strip() or result.stderr.strip()}
|
||||
|
||||
|
||||
@router.post("/actions/restart-ollama")
|
||||
def restart_ollama():
|
||||
"""Restart Ollama on Olares."""
|
||||
result = subprocess.run(
|
||||
["ssh", "-o", "ConnectTimeout=3", "olares",
|
||||
"kubectl rollout restart deployment/ollama -n ollamaserver-shared"],
|
||||
capture_output=True, text=True, timeout=15)
|
||||
return {"success": result.returncode == 0, "output": result.stdout.strip() or result.stderr.strip()}
|
||||
|
||||
|
||||
@router.post("/actions/run-backup")
|
||||
def run_backup():
|
||||
"""Trigger daily Gmail backup."""
|
||||
result = subprocess.run(
|
||||
["/home/homelab/organized/repos/homelab/scripts/gmail-backup-daily.sh"],
|
||||
capture_output=True, text=True, timeout=300)
|
||||
return {"success": result.returncode == 0, "output": result.stdout.strip()[-500:]}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Automation timeline
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/automation-timeline")
|
||||
def automation_timeline():
|
||||
"""When each automation last ran."""
|
||||
log_dir = Path("/app/logs") if Path("/app/logs").exists() else Path("/tmp")
|
||||
|
||||
automations = {
|
||||
"Email (lz)": "gmail-organizer.log",
|
||||
"Email (dvish)": "gmail-organizer-dvish.log",
|
||||
"Email (proton)": "proton-organizer.log",
|
||||
"Stack Restart": "stack-restart.log",
|
||||
"Backup": "gmail-backup-daily.log",
|
||||
"Backup Validator": "backup-validator.log",
|
||||
"Disk Predictor": "disk-predictor.log",
|
||||
"Config Drift": "config-drift.log",
|
||||
"Receipt Tracker": "receipt-tracker.log",
|
||||
"Changelog": "changelog-generator.log",
|
||||
"Email Digest": "email-digest.log",
|
||||
}
|
||||
|
||||
timeline = []
|
||||
for name, filename in automations.items():
|
||||
path = log_dir / filename
|
||||
if path.exists():
|
||||
mtime = os.path.getmtime(path)
|
||||
last_modified = datetime.fromtimestamp(mtime, tz=timezone.utc).isoformat()
|
||||
# Get last line with a timestamp
|
||||
with open(path) as f:
|
||||
lines = f.readlines()
|
||||
last_run = None
|
||||
for line in reversed(lines[-50:]):
|
||||
if line[:4].isdigit():
|
||||
last_run = line[:19]
|
||||
break
|
||||
timeline.append({"name": name, "last_run": last_run, "last_modified": last_modified, "exists": True})
|
||||
else:
|
||||
timeline.append({"name": name, "exists": False})
|
||||
|
||||
return timeline
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Disk usage (via Prometheus)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/disk-usage")
|
||||
def disk_usage():
|
||||
"""Disk usage from Prometheus."""
|
||||
try:
|
||||
avail = prom_query('node_filesystem_avail_bytes{fstype!~"tmpfs|devtmpfs|overlay",mountpoint!~"/boot.*"}')
|
||||
total = prom_query('node_filesystem_size_bytes{fstype!~"tmpfs|devtmpfs|overlay",mountpoint!~"/boot.*"}')
|
||||
|
||||
total_map = {}
|
||||
for t in total:
|
||||
key = f"{t['metric'].get('instance', '?')}:{t['metric'].get('mountpoint', '?')}"
|
||||
total_map[key] = float(t['value'][1])
|
||||
|
||||
disks = []
|
||||
for a in avail:
|
||||
key = f"{a['metric'].get('instance', '?')}:{a['metric'].get('mountpoint', '?')}"
|
||||
avail_bytes = float(a['value'][1])
|
||||
total_bytes = total_map.get(key, 0)
|
||||
if total_bytes < 1e9: # Skip tiny filesystems
|
||||
continue
|
||||
used_pct = ((total_bytes - avail_bytes) / total_bytes * 100) if total_bytes > 0 else 0
|
||||
disks.append({
|
||||
"host": a['metric'].get('instance', '?').split(':')[0],
|
||||
"mount": a['metric'].get('mountpoint', '?'),
|
||||
"total_gb": round(total_bytes / 1e9, 1),
|
||||
"avail_gb": round(avail_bytes / 1e9, 1),
|
||||
"used_pct": round(used_pct, 1),
|
||||
})
|
||||
|
||||
disks.sort(key=lambda d: -d["used_pct"])
|
||||
return disks[:20]
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
Reference in New Issue
Block a user