#!/bin/bash # Reactive Resume v5 GitOps Deployment Script # Usage: ./deploy.sh [action] # Actions: deploy, restart, stop, logs, status set -e COMPOSE_FILE="docker-compose.yml" REMOTE_HOST="Vish@192.168.0.250" SSH_PORT="62000" REMOTE_PATH="/volume1/docker/rxv5" SERVICE_NAME="reactive-resume-v5" # Colors for output RED='\033[0;31m' GREEN='\033[0;32m' YELLOW='\033[1;33m' BLUE='\033[0;34m' NC='\033[0m' # No Color log() { echo -e "${BLUE}[$(date +'%Y-%m-%d %H:%M:%S')] $1${NC}" } success() { echo -e "${GREEN}✅ $1${NC}" } warning() { echo -e "${YELLOW}⚠️ $1${NC}" } error() { echo -e "${RED}❌ $1${NC}" exit 1 } check_prerequisites() { if [[ ! -f "$COMPOSE_FILE" ]]; then error "docker-compose.yml not found in current directory" fi if ! ssh -q -p "$SSH_PORT" "$REMOTE_HOST" exit; then error "Cannot connect to $REMOTE_HOST" fi } setup_npm() { log "Setting up Nginx Proxy Manager..." # Create NPM directories ssh -p "$SSH_PORT" "$REMOTE_HOST" "mkdir -p /volume1/homes/Vish/npm/{data,letsencrypt}" # Stop existing NPM if running ssh -p "$SSH_PORT" "$REMOTE_HOST" "sudo /usr/local/bin/docker stop nginx-proxy-manager 2>/dev/null || true" ssh -p "$SSH_PORT" "$REMOTE_HOST" "sudo /usr/local/bin/docker rm nginx-proxy-manager 2>/dev/null || true" # Start NPM with correct port mapping ssh -p "$SSH_PORT" "$REMOTE_HOST" "sudo /usr/local/bin/docker run -d \ --name nginx-proxy-manager \ --restart unless-stopped \ -p 8880:80 \ -p 8443:443 \ -p 81:81 \ -v /volume1/homes/Vish/npm/data:/data \ -v /volume1/homes/Vish/npm/letsencrypt:/etc/letsencrypt \ jc21/nginx-proxy-manager:latest" success "NPM started on ports 8880/8443" warning "Make sure your router forwards port 80→8880 and 443→8443" } setup_ollama() { log "Setting up Ollama AI model..." # Wait for Ollama to be ready log "Waiting for Ollama service to start..." sleep 30 # Pull the required model log "Pulling llama3.2:3b model (this may take a while)..." ssh -p "$SSH_PORT" "$REMOTE_HOST" "sudo /usr/local/bin/docker exec Resume-OLLAMA-V5 ollama pull llama3.2:3b" || { warning "Failed to pull model automatically. You can pull it manually later with:" warning "docker exec Resume-OLLAMA-V5 ollama pull llama3.2:3b" } success "Ollama setup complete" } deploy() { log "Deploying $SERVICE_NAME to $REMOTE_HOST..." # Create required directories log "Creating required directories..." ssh -p "$SSH_PORT" "$REMOTE_HOST" "mkdir -p $REMOTE_PATH/{db,seaweedfs,ollama}" # Copy compose file log "Copying docker-compose.yml to $REMOTE_HOST:$REMOTE_PATH/" ssh -p "$SSH_PORT" "$REMOTE_HOST" "cat > $REMOTE_PATH/docker-compose.yml" < "$COMPOSE_FILE" # Deploy services log "Starting services..." ssh -p "$SSH_PORT" "$REMOTE_HOST" "cd $REMOTE_PATH && sudo /usr/local/bin/docker-compose up -d" # Wait for services to be healthy log "Waiting for services to be healthy..." sleep 30 # Setup Ollama model setup_ollama # Check status if ssh -p "$SSH_PORT" "$REMOTE_HOST" "sudo /usr/local/bin/docker ps | grep -q 'Resume.*V5'"; then success "$SERVICE_NAME deployed successfully!" log "Local access: http://192.168.0.250:9751" log "External access: https://rx.vish.gg" log "Ollama API: http://192.168.0.250:11434" warning "Make sure NPM is configured for external access" else warning "Services started but may not be fully healthy yet. Check logs with: ./deploy.sh logs" fi } restart() { log "Restarting $SERVICE_NAME..." ssh -p "$SSH_PORT" "$REMOTE_HOST" "cd $REMOTE_PATH && sudo /usr/local/bin/docker-compose restart" success "$SERVICE_NAME restarted!" } stop() { log "Stopping $SERVICE_NAME..." ssh -p "$SSH_PORT" "$REMOTE_HOST" "cd $REMOTE_PATH && sudo /usr/local/bin/docker-compose down" success "$SERVICE_NAME stopped!" } logs() { log "Showing logs for Resume-ACCESS-V5..." ssh -p "$SSH_PORT" "$REMOTE_HOST" "sudo /usr/local/bin/docker logs Resume-ACCESS-V5 --tail 50 -f" } status() { log "Checking status of $SERVICE_NAME services..." echo ssh -p "$SSH_PORT" "$REMOTE_HOST" "sudo /usr/local/bin/docker ps --format 'table {{.Names}}\t{{.Image}}\t{{.Status}}\t{{.Ports}}' | grep -E 'Resume.*V5|NAMES'" echo # Check if application is responding if curl -s -f http://192.168.0.250:9751 > /dev/null; then success "Application is responding at http://192.168.0.250:9751" else warning "Application may not be responding" fi } update() { log "Updating $SERVICE_NAME (pull latest images and redeploy)..." ssh -p "$SSH_PORT" "$REMOTE_HOST" "cd $REMOTE_PATH && sudo /usr/local/bin/docker-compose pull" deploy } # Main script logic case "${1:-deploy}" in deploy) check_prerequisites deploy ;; restart) check_prerequisites restart ;; stop) check_prerequisites stop ;; logs) check_prerequisites logs ;; status) check_prerequisites status ;; update) check_prerequisites update ;; setup-npm) check_prerequisites setup_npm ;; setup-ollama) check_prerequisites setup_ollama ;; *) echo "Usage: $0 [deploy|restart|stop|logs|status|update|setup-npm|setup-ollama]" echo echo "Commands:" echo " deploy - Deploy/update the service (default)" echo " restart - Restart all services" echo " stop - Stop all services" echo " logs - Show application logs" echo " status - Show service status" echo " update - Pull latest images and redeploy" echo " setup-npm - Setup Nginx Proxy Manager" echo " setup-ollama - Setup Ollama AI model" exit 1 ;; esac