"use client"; import { usePoll } from "@/lib/use-poll"; import type { OverviewStats } from "@/lib/types"; import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; import { StatusBadge } from "./status-badge"; export function OllamaCard() { const { data } = usePoll("/api/stats/overview", 60000); const gpu = data?.gpu; const ollama = data?.ollama; return ( LLM / GPU {ollama && ( )} {!data ? (

Loading...

) : gpu?.available ? ( <> {gpu.name && (

{gpu.name}

)}
{gpu.vram_used_mb != null && gpu.vram_total_mb != null && (

VRAM

{(gpu.vram_used_mb / 1024).toFixed(1)} /{" "} {(gpu.vram_total_mb / 1024).toFixed(1)} GB

)} {gpu.temp_c != null && (

Temperature

{gpu.temp_c}°C

)} {gpu.power_w != null && (

Power

{gpu.power_w}W {gpu.power_limit_w ? ` / ${gpu.power_limit_w}W` : ""}

)} {gpu.utilization_pct != null && (

Utilization

{gpu.utilization_pct}%

)}
) : (

GPU not available

)} ); }