Sanitized mirror from private repository - 2026-04-05 08:31:50 UTC
This commit is contained in:
0
dashboard/api/routers/__init__.py
Normal file
0
dashboard/api/routers/__init__.py
Normal file
146
dashboard/api/routers/automations.py
Normal file
146
dashboard/api/routers/automations.py
Normal file
@@ -0,0 +1,146 @@
|
||||
"""Automation status: email organizers, stack restarts, backup, drift."""
|
||||
|
||||
import sqlite3
|
||||
from datetime import date
|
||||
from pathlib import Path
|
||||
from fastapi import APIRouter
|
||||
|
||||
import sys
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from lib_bridge import GMAIL_DB, DVISH_DB, PROTON_DB, RESTART_DB, LOG_DIR
|
||||
|
||||
router = APIRouter(tags=["automations"])
|
||||
|
||||
|
||||
def _query_email_db(db_path: Path, name: str) -> dict:
|
||||
"""Query a processed.db for today's category counts and sender_cache stats."""
|
||||
if not db_path.exists():
|
||||
return {"name": name, "exists": False}
|
||||
|
||||
today = date.today().isoformat()
|
||||
try:
|
||||
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
|
||||
conn.row_factory = sqlite3.Row
|
||||
|
||||
# Today's category counts
|
||||
cur = conn.execute(
|
||||
"SELECT category, COUNT(*) as cnt FROM processed "
|
||||
"WHERE processed_at LIKE ? GROUP BY category",
|
||||
(f"{today}%",),
|
||||
)
|
||||
categories = {row["category"]: row["cnt"] for row in cur}
|
||||
|
||||
# Total processed today
|
||||
cur = conn.execute(
|
||||
"SELECT COUNT(*) FROM processed WHERE processed_at LIKE ?",
|
||||
(f"{today}%",),
|
||||
)
|
||||
total_today = cur.fetchone()[0]
|
||||
|
||||
# Sender cache stats
|
||||
cur = conn.execute("SELECT COUNT(*) FROM sender_cache")
|
||||
cache_size = cur.fetchone()[0]
|
||||
|
||||
cur = conn.execute(
|
||||
"SELECT category, COUNT(*) as cnt FROM sender_cache GROUP BY category"
|
||||
)
|
||||
cache_by_category = {row["category"]: row["cnt"] for row in cur}
|
||||
|
||||
conn.close()
|
||||
return {
|
||||
"name": name,
|
||||
"exists": True,
|
||||
"today_total": total_today,
|
||||
"today_categories": categories,
|
||||
"sender_cache_size": cache_size,
|
||||
"sender_cache_categories": cache_by_category,
|
||||
}
|
||||
except Exception as e:
|
||||
return {"name": name, "exists": True, "error": str(e)}
|
||||
|
||||
|
||||
@router.get("/automations/email")
|
||||
def email_status():
|
||||
"""Email organizer status for all 3 accounts."""
|
||||
accounts = [
|
||||
_query_email_db(GMAIL_DB, "gmail"),
|
||||
_query_email_db(DVISH_DB, "dvish"),
|
||||
_query_email_db(PROTON_DB, "proton"),
|
||||
]
|
||||
return {"accounts": accounts}
|
||||
|
||||
|
||||
@router.get("/automations/restarts")
|
||||
def restart_status():
|
||||
"""Recent unhealthy container tracking entries."""
|
||||
if not RESTART_DB.exists():
|
||||
return {"entries": [], "count": 0}
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(f"file:{RESTART_DB}?mode=ro", uri=True)
|
||||
conn.row_factory = sqlite3.Row
|
||||
cur = conn.execute(
|
||||
"SELECT * FROM unhealthy_tracking ORDER BY last_checked DESC LIMIT 50"
|
||||
)
|
||||
entries = [dict(row) for row in cur]
|
||||
conn.close()
|
||||
return {"entries": entries, "count": len(entries)}
|
||||
except Exception as e:
|
||||
return {"entries": [], "count": 0, "error": str(e)}
|
||||
|
||||
|
||||
@router.get("/automations/backup")
|
||||
def backup_status():
|
||||
"""Parse today's backup log for status."""
|
||||
log_file = LOG_DIR / "gmail-backup-daily.log"
|
||||
if not log_file.exists():
|
||||
return {"status": "no_log", "entries": []}
|
||||
|
||||
today = date.today().isoformat()
|
||||
entries = []
|
||||
has_error = False
|
||||
|
||||
try:
|
||||
with open(log_file, "r", errors="replace") as f:
|
||||
for line in f:
|
||||
if today in line:
|
||||
entries.append(line.strip())
|
||||
if "ERROR" in line.upper():
|
||||
has_error = True
|
||||
except OSError:
|
||||
return {"status": "read_error", "entries": []}
|
||||
|
||||
return {
|
||||
"status": "error" if has_error else ("ok" if entries else "no_entries_today"),
|
||||
"entries": entries[-20:], # Last 20 today entries
|
||||
"has_errors": has_error,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/automations/drift")
|
||||
def drift_status():
|
||||
"""Parse config-drift.log for last result."""
|
||||
log_file = LOG_DIR / "config-drift.log"
|
||||
if not log_file.exists():
|
||||
return {"status": "no_log", "last_result": None}
|
||||
|
||||
try:
|
||||
with open(log_file, "r", errors="replace") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
# Find the last meaningful result
|
||||
for line in reversed(lines):
|
||||
line = line.strip()
|
||||
if "No drifts found" in line:
|
||||
return {"status": "clean", "last_result": "No drifts found", "drifts": 0}
|
||||
if "drift" in line.lower():
|
||||
# Try to extract count
|
||||
import re
|
||||
m = re.search(r"(\d+)\s+drifts?", line)
|
||||
count = int(m.group(1)) if m else -1
|
||||
return {"status": "drifted", "last_result": line, "drifts": count}
|
||||
|
||||
return {"status": "unknown", "last_result": lines[-1].strip() if lines else None}
|
||||
except OSError:
|
||||
return {"status": "read_error", "last_result": None}
|
||||
63
dashboard/api/routers/containers.py
Normal file
63
dashboard/api/routers/containers.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Container listing, logs, and management."""
|
||||
|
||||
from fastapi import APIRouter, Query, HTTPException
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from lib_bridge import (
|
||||
portainer_list_containers,
|
||||
portainer_get_container_logs,
|
||||
portainer_restart_container,
|
||||
ENDPOINTS,
|
||||
)
|
||||
|
||||
router = APIRouter(tags=["containers"])
|
||||
|
||||
|
||||
@router.get("/containers")
|
||||
def list_containers(endpoint: str | None = None):
|
||||
"""List all containers across endpoints, optional endpoint filter."""
|
||||
targets = [endpoint] if endpoint and endpoint in ENDPOINTS else list(ENDPOINTS)
|
||||
results = []
|
||||
for ep in targets:
|
||||
try:
|
||||
containers = portainer_list_containers(ep)
|
||||
for c in containers:
|
||||
names = c.get("Names", [])
|
||||
name = names[0].lstrip("/") if names else c.get("Id", "")[:12]
|
||||
results.append({
|
||||
"id": c.get("Id", "")[:12],
|
||||
"name": name,
|
||||
"image": c.get("Image", ""),
|
||||
"state": c.get("State", ""),
|
||||
"status": c.get("Status", ""),
|
||||
"endpoint": ep,
|
||||
})
|
||||
except Exception as e:
|
||||
results.append({"endpoint": ep, "error": str(e)})
|
||||
return results
|
||||
|
||||
|
||||
@router.get("/containers/{container_id}/logs")
|
||||
def container_logs(container_id: str, endpoint: str = Query(...)):
|
||||
"""Get container logs. Requires endpoint query param."""
|
||||
if endpoint not in ENDPOINTS:
|
||||
raise HTTPException(400, f"Unknown endpoint: {endpoint}")
|
||||
try:
|
||||
logs = portainer_get_container_logs(endpoint, container_id)
|
||||
return {"container_id": container_id, "endpoint": endpoint, "logs": logs}
|
||||
except Exception as e:
|
||||
raise HTTPException(502, f"Failed to get logs: {e}")
|
||||
|
||||
|
||||
@router.post("/containers/{container_id}/restart")
|
||||
def restart_container(container_id: str, endpoint: str = Query(...)):
|
||||
"""Restart a container. Requires endpoint query param."""
|
||||
if endpoint not in ENDPOINTS:
|
||||
raise HTTPException(400, f"Unknown endpoint: {endpoint}")
|
||||
success = portainer_restart_container(endpoint, container_id)
|
||||
if not success:
|
||||
raise HTTPException(502, "Restart failed")
|
||||
return {"status": "restarted", "container_id": container_id, "endpoint": endpoint}
|
||||
61
dashboard/api/routers/expenses.py
Normal file
61
dashboard/api/routers/expenses.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""Expenses CSV reader and summary."""
|
||||
|
||||
import csv
|
||||
from collections import defaultdict
|
||||
from fastapi import APIRouter, Query
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from lib_bridge import EXPENSES_CSV
|
||||
|
||||
router = APIRouter(tags=["expenses"])
|
||||
|
||||
|
||||
def _read_expenses() -> list[dict]:
|
||||
"""Read all expenses from CSV."""
|
||||
if not EXPENSES_CSV.exists():
|
||||
return []
|
||||
with open(EXPENSES_CSV, "r", newline="") as f:
|
||||
return list(csv.DictReader(f))
|
||||
|
||||
|
||||
@router.get("/expenses")
|
||||
def list_expenses(month: str | None = Query(None, description="Filter by YYYY-MM")):
|
||||
"""List expenses, optionally filtered by month."""
|
||||
expenses = _read_expenses()
|
||||
if month:
|
||||
expenses = [e for e in expenses if e.get("date", "").startswith(month)]
|
||||
return expenses
|
||||
|
||||
|
||||
@router.get("/expenses/summary")
|
||||
def expenses_summary(month: str | None = Query(None, description="Filter by YYYY-MM")):
|
||||
"""Monthly total, count, top 10 vendors by amount."""
|
||||
expenses = _read_expenses()
|
||||
if month:
|
||||
expenses = [e for e in expenses if e.get("date", "").startswith(month)]
|
||||
|
||||
if not expenses:
|
||||
return {"total": 0, "count": 0, "top_vendors": [], "month": month}
|
||||
|
||||
total = 0.0
|
||||
vendor_totals = defaultdict(float)
|
||||
for e in expenses:
|
||||
try:
|
||||
amount = float(e.get("amount", 0))
|
||||
except (ValueError, TypeError):
|
||||
amount = 0.0
|
||||
total += amount
|
||||
vendor = e.get("vendor", "unknown")
|
||||
vendor_totals[vendor] += amount
|
||||
|
||||
top_vendors = sorted(vendor_totals.items(), key=lambda x: x[1], reverse=True)[:10]
|
||||
|
||||
return {
|
||||
"total": round(total, 2),
|
||||
"count": len(expenses),
|
||||
"top_vendors": [{"vendor": v, "amount": round(a, 2)} for v, a in top_vendors],
|
||||
"month": month,
|
||||
}
|
||||
57
dashboard/api/routers/logs.py
Normal file
57
dashboard/api/routers/logs.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""Unified log viewer routes."""
|
||||
|
||||
from fastapi import APIRouter, Query
|
||||
from pathlib import Path
|
||||
|
||||
router = APIRouter(tags=["logs"])
|
||||
|
||||
LOG_DIR = Path("/app/logs") if Path("/app/logs").exists() else Path("/tmp")
|
||||
|
||||
LOG_FILES = {
|
||||
"stack-restart": "stack-restart.log",
|
||||
"backup": "backup-validator.log",
|
||||
"gmail-lz": "gmail-organizer.log",
|
||||
"gmail-dvish": "gmail-organizer-dvish.log",
|
||||
"proton": "proton-organizer.log",
|
||||
"receipt": "receipt-tracker.log",
|
||||
"drift": "config-drift.log",
|
||||
"digest": "email-digest.log",
|
||||
"disk": "disk-predictor.log",
|
||||
"changelog": "changelog-generator.log",
|
||||
}
|
||||
|
||||
|
||||
@router.get("/logs")
|
||||
def list_logs():
|
||||
"""List available log files with sizes."""
|
||||
result = []
|
||||
for name, filename in LOG_FILES.items():
|
||||
path = LOG_DIR / filename
|
||||
if path.exists():
|
||||
stat = path.stat()
|
||||
result.append({
|
||||
"name": name,
|
||||
"filename": filename,
|
||||
"size_bytes": stat.st_size,
|
||||
"modified": stat.st_mtime,
|
||||
})
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/logs/{log_name}")
|
||||
def get_log(log_name: str, tail: int = Query(200, le=2000), search: str = Query(None)):
|
||||
"""Get log file contents."""
|
||||
if log_name not in LOG_FILES:
|
||||
return {"error": f"Unknown log: {log_name}", "lines": []}
|
||||
path = LOG_DIR / LOG_FILES[log_name]
|
||||
if not path.exists():
|
||||
return {"lines": [], "total": 0}
|
||||
|
||||
with open(path) as f:
|
||||
all_lines = f.readlines()
|
||||
|
||||
if search:
|
||||
all_lines = [l for l in all_lines if search.lower() in l.lower()]
|
||||
|
||||
lines = all_lines[-tail:]
|
||||
return {"lines": [l.rstrip() for l in lines], "total": len(all_lines)}
|
||||
250
dashboard/api/routers/media.py
Normal file
250
dashboard/api/routers/media.py
Normal file
@@ -0,0 +1,250 @@
|
||||
"""Jellyfin + Arr suite media endpoints."""
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
from fastapi import APIRouter
|
||||
import httpx
|
||||
|
||||
router = APIRouter(tags=["media"])
|
||||
|
||||
JELLYFIN_API_KEY = "REDACTED_API_KEY" # pragma: allowlist secret
|
||||
SONARR_URL = "http://192.168.0.200:8989"
|
||||
SONARR_KEY = "REDACTED_SONARR_API_KEY" # pragma: allowlist secret
|
||||
RADARR_URL = "http://192.168.0.200:7878"
|
||||
RADARR_KEY = "REDACTED_RADARR_API_KEY" # pragma: allowlist secret
|
||||
SABNZBD_URL = "http://192.168.0.200:8080"
|
||||
SABNZBD_KEY = "6ae289de5a4f45f7a0124b43ba9c3dea" # pragma: allowlist secret
|
||||
|
||||
|
||||
def _jellyfin(path: str) -> dict:
|
||||
"""Call Jellyfin API via SSH+kubectl to bypass Olares auth sidecar."""
|
||||
sep = "&" if "?" in path else "?"
|
||||
url = f"http://localhost:8096{path}{sep}api_key={JELLYFIN_API_KEY}"
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["ssh", "-o", "ConnectTimeout=3", "olares",
|
||||
f"kubectl exec -n jellyfin-vishinator deploy/jellyfin -c jellyfin -- curl -s '{url}'"],
|
||||
capture_output=True, text=True, timeout=15,
|
||||
)
|
||||
return json.loads(result.stdout) if result.returncode == 0 else {}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
@router.get("/jellyfin/status")
|
||||
def jellyfin_status():
|
||||
"""Jellyfin server status: version, libraries, sessions."""
|
||||
info = _jellyfin("/System/Info")
|
||||
libraries = _jellyfin("/Library/VirtualFolders")
|
||||
sessions = _jellyfin("/Sessions")
|
||||
|
||||
active = []
|
||||
idle_count = 0
|
||||
if isinstance(sessions, list):
|
||||
for s in sessions:
|
||||
if s.get("NowPlayingItem"):
|
||||
active.append({
|
||||
"user": s.get("UserName", ""),
|
||||
"client": s.get("Client", ""),
|
||||
"device": s.get("DeviceName", ""),
|
||||
"now_playing": s["NowPlayingItem"].get("Name", ""),
|
||||
"type": s["NowPlayingItem"].get("Type", ""),
|
||||
})
|
||||
else:
|
||||
idle_count += 1
|
||||
|
||||
return {
|
||||
"version": info.get("Version", "unknown"),
|
||||
"server_name": info.get("ServerName", "unknown"),
|
||||
"libraries": [{"name": lib.get("Name"), "type": lib.get("CollectionType", "")}
|
||||
for lib in libraries] if isinstance(libraries, list) else [],
|
||||
"active_sessions": active,
|
||||
"idle_sessions": idle_count,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/sonarr/queue")
|
||||
async def sonarr_queue():
|
||||
"""Sonarr download queue."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
f"{SONARR_URL}/api/v3/queue",
|
||||
headers={"X-Api-Key": SONARR_KEY},
|
||||
)
|
||||
return resp.json()
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
@router.get("/radarr/queue")
|
||||
async def radarr_queue():
|
||||
"""Radarr download queue."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
f"{RADARR_URL}/api/v3/queue",
|
||||
headers={"X-Api-Key": RADARR_KEY},
|
||||
)
|
||||
return resp.json()
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
@router.get("/sabnzbd/queue")
|
||||
async def sabnzbd_queue():
|
||||
"""SABnzbd download queue."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
f"{SABNZBD_URL}/api",
|
||||
params={"apikey": SABNZBD_KEY, "output": "json", "mode": "queue"},
|
||||
)
|
||||
return resp.json()
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Prowlarr (indexer manager)
|
||||
# ---------------------------------------------------------------------------
|
||||
PROWLARR_URL = "http://192.168.0.200:9696"
|
||||
PROWLARR_KEY = "58b5963e008243cf8cc4fae5276e68af" # pragma: allowlist secret
|
||||
|
||||
|
||||
@router.get("/prowlarr/stats")
|
||||
async def prowlarr_stats():
|
||||
"""Prowlarr indexer status."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
r = await client.get(
|
||||
f"{PROWLARR_URL}/api/v1/indexer",
|
||||
headers={"X-Api-Key": PROWLARR_KEY},
|
||||
)
|
||||
r.raise_for_status()
|
||||
indexers = r.json()
|
||||
enabled = [i for i in indexers if i.get("enable")]
|
||||
return {
|
||||
"total": len(indexers),
|
||||
"enabled": len(enabled),
|
||||
"indexers": [
|
||||
{"name": i["name"], "protocol": i.get("protocol", "?")}
|
||||
for i in enabled[:10]
|
||||
],
|
||||
}
|
||||
except Exception as e:
|
||||
return {"total": 0, "enabled": 0, "error": str(e)}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Bazarr (subtitles)
|
||||
# ---------------------------------------------------------------------------
|
||||
BAZARR_URL = "http://192.168.0.200:6767"
|
||||
BAZARR_KEY = "REDACTED_BAZARR_API_KEY" # pragma: allowlist secret
|
||||
|
||||
|
||||
@router.get("/bazarr/status")
|
||||
async def bazarr_status():
|
||||
"""Bazarr subtitle status."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
r = await client.get(
|
||||
f"{BAZARR_URL}/api/system/status",
|
||||
headers={"X-Api-Key": BAZARR_KEY},
|
||||
)
|
||||
r.raise_for_status()
|
||||
status = r.json().get("data", r.json())
|
||||
w = await client.get(
|
||||
f"{BAZARR_URL}/api/badges",
|
||||
headers={"X-Api-Key": BAZARR_KEY},
|
||||
)
|
||||
badges = w.json() if w.status_code == 200 else {}
|
||||
return {
|
||||
"version": status.get("bazarr_version", "?"),
|
||||
"sonarr_signalr": badges.get("sonarr_signalr", "?"),
|
||||
"radarr_signalr": badges.get("radarr_signalr", "?"),
|
||||
"wanted_episodes": badges.get("episodes", 0),
|
||||
"wanted_movies": badges.get("movies", 0),
|
||||
}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Audiobookshelf
|
||||
# ---------------------------------------------------------------------------
|
||||
ABS_URL = "http://192.168.0.200:13378"
|
||||
ABS_TOKEN = "REDACTED_TOKEN" # pragma: allowlist secret
|
||||
|
||||
|
||||
@router.get("/audiobookshelf/stats")
|
||||
async def audiobookshelf_stats():
|
||||
"""Audiobookshelf library stats."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
libs = await client.get(
|
||||
f"{ABS_URL}/api/libraries",
|
||||
headers={"Authorization": f"Bearer {ABS_TOKEN}"},
|
||||
)
|
||||
libs.raise_for_status()
|
||||
libraries = libs.json().get("libraries", [])
|
||||
result = []
|
||||
for lib in libraries:
|
||||
result.append({
|
||||
"name": lib.get("name", "?"),
|
||||
"type": lib.get("mediaType", "?"),
|
||||
"items": lib.get("stats", {}).get("totalItems", 0),
|
||||
})
|
||||
return {"libraries": result, "total": sum(l["items"] for l in result)}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Deluge (torrent client)
|
||||
# ---------------------------------------------------------------------------
|
||||
DELUGE_URL = "http://192.168.0.200:8112"
|
||||
|
||||
|
||||
@router.get("/deluge/status")
|
||||
async def deluge_status():
|
||||
"""Deluge torrent client status."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
login = await client.post(
|
||||
f"{DELUGE_URL}/json",
|
||||
json={"method": "auth.login", "params": ["deluge"], "id": 1},
|
||||
)
|
||||
if login.status_code != 200:
|
||||
return {"available": False}
|
||||
stats = await client.post(
|
||||
f"{DELUGE_URL}/json",
|
||||
json={
|
||||
"method": "web.update_ui",
|
||||
"params": [
|
||||
["name", "state", "progress", "download_payload_rate",
|
||||
"upload_payload_rate"],
|
||||
{},
|
||||
],
|
||||
"id": 2,
|
||||
},
|
||||
)
|
||||
data = stats.json().get("result", {})
|
||||
torrents = data.get("torrents", {})
|
||||
active = [
|
||||
t for t in torrents.values()
|
||||
if t.get("state") in ("Downloading", "Seeding")
|
||||
]
|
||||
return {
|
||||
"available": True,
|
||||
"total": len(torrents),
|
||||
"active": len(active),
|
||||
"downloading": len(
|
||||
[t for t in torrents.values() if t.get("state") == "Downloading"]
|
||||
),
|
||||
"seeding": len(
|
||||
[t for t in torrents.values() if t.get("state") == "Seeding"]
|
||||
),
|
||||
}
|
||||
except Exception as e:
|
||||
return {"available": False, "error": str(e)}
|
||||
64
dashboard/api/routers/network.py
Normal file
64
dashboard/api/routers/network.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Network / Headscale / AdGuard routes."""
|
||||
|
||||
from fastapi import APIRouter
|
||||
import subprocess
|
||||
import json
|
||||
import httpx
|
||||
|
||||
router = APIRouter(tags=["network"])
|
||||
|
||||
ADGUARD_URL = "http://192.168.0.250:9080"
|
||||
ADGUARD_USER = "vish"
|
||||
ADGUARD_PASS = "REDACTED_PASSWORD"
|
||||
|
||||
|
||||
def _adguard_get(path):
|
||||
with httpx.Client(timeout=10) as client:
|
||||
client.post(f"{ADGUARD_URL}/control/login", json={"name": ADGUARD_USER, "password": ADGUARD_PASS})
|
||||
r = client.get(f"{ADGUARD_URL}/control{path}")
|
||||
r.raise_for_status()
|
||||
return r.json() if r.content else {}
|
||||
|
||||
|
||||
@router.get("/network/headscale")
|
||||
def headscale_nodes():
|
||||
"""List Headscale nodes."""
|
||||
result = subprocess.run(
|
||||
["ssh", "-o", "ConnectTimeout=3", "calypso",
|
||||
"docker exec headscale headscale nodes list -o json"],
|
||||
capture_output=True, text=True, timeout=15,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return {"nodes": [], "error": result.stderr.strip()}
|
||||
nodes = json.loads(result.stdout)
|
||||
return {"nodes": [
|
||||
{"name": n.get("givenName") or n.get("name", "?"),
|
||||
"ip": (n.get("ipAddresses") or ["?"])[0],
|
||||
"online": n.get("online", False),
|
||||
"last_seen": n.get("lastSeen", "")}
|
||||
for n in nodes
|
||||
]}
|
||||
|
||||
|
||||
@router.get("/network/adguard")
|
||||
def adguard_stats():
|
||||
"""Get AdGuard DNS stats."""
|
||||
try:
|
||||
stats = _adguard_get("/stats")
|
||||
return {
|
||||
"total_queries": stats.get("num_dns_queries", 0),
|
||||
"blocked": stats.get("num_blocked_filtering", 0),
|
||||
"avg_time": stats.get("avg_processing_time", 0),
|
||||
}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
@router.get("/network/adguard/rewrites")
|
||||
def adguard_rewrites():
|
||||
"""List AdGuard DNS rewrites."""
|
||||
try:
|
||||
data = _adguard_get("/rewrite/list")
|
||||
return [{"domain": r.get("domain", ""), "answer": r.get("answer", "")} for r in (data or [])]
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
93
dashboard/api/routers/olares.py
Normal file
93
dashboard/api/routers/olares.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Olares K3s pod listing and GPU status."""
|
||||
|
||||
import subprocess
|
||||
from fastapi import APIRouter, Query
|
||||
|
||||
router = APIRouter(tags=["olares"])
|
||||
|
||||
|
||||
def _ssh_olares(cmd: str, timeout: int = 10) -> str:
|
||||
"""Run a command on olares via SSH."""
|
||||
result = subprocess.run(
|
||||
["ssh", "-o", "ConnectTimeout=3", "olares", cmd],
|
||||
capture_output=True, text=True, timeout=timeout,
|
||||
)
|
||||
return result.stdout if result.returncode == 0 else ""
|
||||
|
||||
|
||||
@router.get("/olares/pods")
|
||||
def olares_pods(namespace: str | None = Query(None)):
|
||||
"""List K3s pods on olares."""
|
||||
if namespace:
|
||||
cmd = f"kubectl get pods -n {namespace} -o wide --no-headers"
|
||||
else:
|
||||
cmd = "kubectl get pods -A -o wide --no-headers"
|
||||
|
||||
output = _ssh_olares(cmd, timeout=15)
|
||||
if not output:
|
||||
return []
|
||||
|
||||
pods = []
|
||||
for line in output.strip().split("\n"):
|
||||
parts = line.split()
|
||||
if not parts:
|
||||
continue
|
||||
if namespace:
|
||||
# No namespace column when -n is used
|
||||
if len(parts) >= 7:
|
||||
pods.append({
|
||||
"namespace": namespace,
|
||||
"name": parts[0],
|
||||
"ready": parts[1],
|
||||
"status": parts[2],
|
||||
"restarts": parts[3],
|
||||
"age": parts[4],
|
||||
"ip": parts[5] if len(parts) > 5 else "",
|
||||
"node": parts[6] if len(parts) > 6 else "",
|
||||
})
|
||||
else:
|
||||
# Has namespace column
|
||||
if len(parts) >= 8:
|
||||
pods.append({
|
||||
"namespace": parts[0],
|
||||
"name": parts[1],
|
||||
"ready": parts[2],
|
||||
"status": parts[3],
|
||||
"restarts": parts[4],
|
||||
"age": parts[5],
|
||||
"ip": parts[6] if len(parts) > 6 else "",
|
||||
"node": parts[7] if len(parts) > 7 else "",
|
||||
})
|
||||
return pods
|
||||
|
||||
|
||||
@router.get("/olares/gpu")
|
||||
def olares_gpu():
|
||||
"""GPU status from olares."""
|
||||
output = _ssh_olares(
|
||||
"nvidia-smi --query-gpu=name,temperature.gpu,power.draw,power.limit,"
|
||||
"memory.used,memory.total,utilization.gpu --format=csv,noheader,nounits"
|
||||
)
|
||||
if not output:
|
||||
return {"available": False}
|
||||
|
||||
parts = [p.strip() for p in output.strip().split(",")]
|
||||
|
||||
def _float(val: str) -> float | None:
|
||||
try:
|
||||
return float(val)
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
if len(parts) >= 7:
|
||||
return {
|
||||
"available": True,
|
||||
"name": parts[0],
|
||||
"temp_c": _float(parts[1]),
|
||||
"power_draw_w": _float(parts[2]),
|
||||
"power_limit_w": _float(parts[3]),
|
||||
"memory_used_mb": _float(parts[4]),
|
||||
"memory_total_mb": _float(parts[5]),
|
||||
"utilization_pct": _float(parts[6]),
|
||||
}
|
||||
return {"available": False}
|
||||
194
dashboard/api/routers/overview.py
Normal file
194
dashboard/api/routers/overview.py
Normal file
@@ -0,0 +1,194 @@
|
||||
"""Overview stats and SSE activity stream."""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import subprocess
|
||||
import sqlite3
|
||||
from datetime import date
|
||||
from fastapi import APIRouter
|
||||
from sse_starlette.sse import EventSourceResponse
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from lib_bridge import (
|
||||
portainer_list_containers, ENDPOINTS, ollama_available,
|
||||
GMAIL_DB, DVISH_DB, PROTON_DB, RESTART_DB, LOG_DIR, OLLAMA_URL,
|
||||
)
|
||||
from log_parser import get_recent_events, tail_logs, get_new_lines
|
||||
|
||||
router = APIRouter(tags=["overview"])
|
||||
|
||||
|
||||
def _count_today_emails(db_path: Path) -> int:
|
||||
"""Count emails processed today from a processed.db file."""
|
||||
if not db_path.exists():
|
||||
return 0
|
||||
try:
|
||||
today = date.today().isoformat()
|
||||
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
|
||||
cur = conn.execute(
|
||||
"SELECT COUNT(*) FROM processed WHERE processed_at LIKE ?",
|
||||
(f"{today}%",),
|
||||
)
|
||||
count = cur.fetchone()[0]
|
||||
conn.close()
|
||||
return count
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
|
||||
def _count_unhealthy(db_path: Path) -> int:
|
||||
"""Count unhealthy containers from stack-restart.db."""
|
||||
if not db_path.exists():
|
||||
return 0
|
||||
try:
|
||||
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
|
||||
cur = conn.execute("SELECT COUNT(*) FROM unhealthy_tracking")
|
||||
count = cur.fetchone()[0]
|
||||
conn.close()
|
||||
return count
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
|
||||
def _gpu_info() -> dict:
|
||||
"""Get GPU info from olares via SSH."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["ssh", "-o", "ConnectTimeout=3", "olares",
|
||||
"nvidia-smi --query-gpu=temperature.gpu,power.draw,power.limit,"
|
||||
"memory.used,memory.total,utilization.gpu --format=csv,noheader,nounits"],
|
||||
capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return {"available": False}
|
||||
parts = [p.strip() for p in result.stdout.strip().split(",")]
|
||||
|
||||
def _f(v):
|
||||
try:
|
||||
return float(v)
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
if len(parts) >= 6:
|
||||
return {
|
||||
"available": True,
|
||||
"temp_c": _f(parts[0]),
|
||||
"power_draw_w": _f(parts[1]),
|
||||
"power_limit_w": _f(parts[2]),
|
||||
"memory_used_mb": _f(parts[3]),
|
||||
"memory_total_mb": _f(parts[4]),
|
||||
"utilization_pct": _f(parts[5]),
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
return {"available": False}
|
||||
|
||||
|
||||
@router.get("/stats/overview")
|
||||
def stats_overview():
|
||||
"""Aggregate overview stats."""
|
||||
# Container counts
|
||||
container_counts = {}
|
||||
total = 0
|
||||
for ep_name in ENDPOINTS:
|
||||
try:
|
||||
containers = portainer_list_containers(ep_name)
|
||||
running = sum(1 for c in containers if c.get("State") == "running")
|
||||
container_counts[ep_name] = {"total": len(containers), "running": running}
|
||||
total += len(containers)
|
||||
except Exception:
|
||||
container_counts[ep_name] = {"total": 0, "running": 0, "error": True}
|
||||
|
||||
# GPU
|
||||
gpu = _gpu_info()
|
||||
|
||||
# Email counts
|
||||
email_today = {
|
||||
"gmail": _count_today_emails(GMAIL_DB),
|
||||
"dvish": _count_today_emails(DVISH_DB),
|
||||
"proton": _count_today_emails(PROTON_DB),
|
||||
}
|
||||
email_today["total"] = sum(email_today.values())
|
||||
|
||||
# Unhealthy
|
||||
unhealthy = _count_unhealthy(RESTART_DB)
|
||||
|
||||
# Ollama
|
||||
ollama_up = ollama_available(OLLAMA_URL)
|
||||
|
||||
return {
|
||||
"containers": {"total": total, "by_endpoint": container_counts},
|
||||
"gpu": gpu,
|
||||
"email_today": email_today,
|
||||
"unhealthy_count": unhealthy,
|
||||
"ollama_available": ollama_up,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/activity")
|
||||
async def activity_stream():
|
||||
"""SSE stream of today's automation events."""
|
||||
|
||||
async def event_generator():
|
||||
# Send initial batch
|
||||
events = get_recent_events(LOG_DIR)
|
||||
yield {"event": "init", "data": json.dumps(events)}
|
||||
|
||||
# Poll for new events
|
||||
positions = tail_logs(LOG_DIR)
|
||||
while True:
|
||||
await asyncio.sleep(5)
|
||||
new_events, positions = get_new_lines(LOG_DIR, positions)
|
||||
if new_events:
|
||||
yield {"event": "update", "data": json.dumps(new_events)}
|
||||
|
||||
return EventSourceResponse(event_generator())
|
||||
|
||||
|
||||
@router.post("/actions/pause-organizers")
|
||||
def pause_organizers():
|
||||
"""Pause all email organizer cron jobs."""
|
||||
result = subprocess.run(
|
||||
["/home/homelab/organized/repos/homelab/scripts/gmail-organizer-ctl.sh", "stop"],
|
||||
capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
return {"success": result.returncode == 0, "output": result.stdout.strip()}
|
||||
|
||||
|
||||
@router.post("/actions/resume-organizers")
|
||||
def resume_organizers():
|
||||
"""Resume all email organizer cron jobs."""
|
||||
result = subprocess.run(
|
||||
["/home/homelab/organized/repos/homelab/scripts/gmail-organizer-ctl.sh", "start"],
|
||||
capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
return {"success": result.returncode == 0, "output": result.stdout.strip()}
|
||||
|
||||
|
||||
@router.get("/actions/organizer-status")
|
||||
def organizer_status():
|
||||
"""Check if organizers are running or paused."""
|
||||
result = subprocess.run(
|
||||
["/home/homelab/organized/repos/homelab/scripts/gmail-organizer-ctl.sh", "status"],
|
||||
capture_output=True, text=True, timeout=10,
|
||||
)
|
||||
return {"output": result.stdout.strip()}
|
||||
|
||||
|
||||
@router.post("/chat")
|
||||
def chat_with_ollama(body: dict):
|
||||
"""Send a message to the local Ollama LLM."""
|
||||
prompt = body.get("message", "")
|
||||
if not prompt:
|
||||
return {"error": "No message provided"}
|
||||
try:
|
||||
import sys as _sys
|
||||
_sys.path.insert(0, str(Path("/app/scripts") if Path("/app/scripts").exists() else Path(__file__).parent.parent.parent / "scripts"))
|
||||
from lib.ollama import ollama_generate
|
||||
response = ollama_generate(prompt, num_predict=500, timeout=60)
|
||||
return {"response": response}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
Reference in New Issue
Block a user