Skip to content

Instantly share code, notes, and snippets.

@laurentperrinet
Last active May 12, 2026 13:28
Show Gist options
  • Select an option

  • Save laurentperrinet/114f6eb80c9bf505963f143e2e679583 to your computer and use it in GitHub Desktop.

Select an option

Save laurentperrinet/114f6eb80c9bf505963f143e2e679583 to your computer and use it in GitHub Desktop.
My routine script to upgrade openwebui on my local box
#!/bin/bash
set -e # Exit on any error
echo "=== Open-WebUI Upgrade Script ==="
echo "Starting upgrade process..."
echo
# Function to check if docker is available
check_docker() {
if ! command -v docker >/dev/null 2>&1; then
echo "❌ Error: Docker is not installed or not in PATH"
echo "Docker path: $(which docker 2>/dev/null || echo 'Not found')"
exit 1
fi
echo "βœ… Docker is available ($(docker --version))"
}
# Function to list current Ollama models
list_ollama_models() {
echo "πŸ” Checking current Ollama models..."
if docker exec open-webui ollama list 2>/dev/null; then
echo "βœ… Successfully listed Ollama models"
else
echo "⚠️ No Ollama container found or models unavailable"
fi
echo
}
# Check docker availability
check_docker
# List current models before update
list_ollama_models
# Pull the latest image
echo "πŸ“₯ Pulling the latest Open-WebUI image..."
if docker pull ghcr.io/open-webui/open-webui:ollama; then
echo "βœ… Successfully pulled the latest image"
else
echo "❌ Pull failed, aborting"
exit 1
fi
echo
# Stop and remove the old container
echo "⏹️ Stopping Open-WebUI container..."
docker stop open-webui 2>/dev/null && echo "βœ… Container stopped" || echo "ℹ️ Container was not running"
echo "πŸ—‘οΈ Removing old Open-WebUI container..."
docker rm open-webui 2>/dev/null && echo "βœ… Old container removed" || echo "ℹ️ No existing container to remove"
echo
# Start a new container with the updated image
echo "πŸš€ Starting new Open-WebUI container..."
echo " Configuration:"
echo " - Port mapping: 3000:8080"
echo " - Data volume: open-webui-data"
echo " - GPU access: all GPUs enabled"
echo " - Ollama volume: ollama"
echo " - Restart policy: always"
if docker run -d \
--name open-webui \
-p 3000:8080 \
-v open-webui-data:/app/backend/data \
--gpus=all \
-v ollama:/root/.ollama \
--restart always \
ghcr.io/open-webui/open-webui:ollama; then
echo "βœ… New container started successfully!"
echo
echo "πŸ“Š Container status:"
docker ps -f name=open-webui --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}"
else
echo "❌ Failed to start new container"
exit 1
fi
echo
echo "πŸ”„ Updating Ollama models..."
echo "This will pull the latest versions of your models. This may take some time depending on model sizes."
echo
echo "πŸ”„ Updating Ollama models..."
echo "This will pull the latest versions of your models. This may take some time depending on model sizes."
# Update common Ollama models (you can modify this list based on your needs)
MODELS_TO_UPDATE="qwen3.6:latest wizardlm-uncensored:latest qwen3-coder:480b-cloud gpt-oss:120b-cloud"
MODEL_UPDATE_SUCCESS=0
MODEL_UPDATE_TOTAL=0
# Loop through models using word splitting
for model in $MODELS_TO_UPDATE; do
MODEL_UPDATE_TOTAL=$((MODEL_UPDATE_TOTAL + 1))
echo "πŸ“₯ Updating $model..."
if docker exec open-webui ollama pull "$model" >/dev/null 2>&1; then
echo "βœ… $model updated successfully"
MODEL_UPDATE_SUCCESS=$((MODEL_UPDATE_SUCCESS + 1))
else
echo "⚠️ Failed to update $model (may not be installed or network issue)"
fi
done
echo
echo "πŸ“‹ Final Ollama model list:"
if docker exec open-webui ollama list >/dev/null 2>&1; then
docker exec open-webui ollama list
echo "βœ… Model list retrieved successfully"
else
echo "⚠️ Could not retrieve model list"
fi
echo
echo "πŸŽ‰ Upgrade completed!"
echo "πŸ“ˆ Models: $MODEL_UPDATE_SUCCESS/$MODEL_UPDATE_TOTAL updated successfully"
echo "🌐 Open-WebUI is now running on http://localhost:3000"
echo "πŸ’‘ Note: If you have custom models, you may need to update them manually"
echo " Run: docker exec ollama ollama pull MODEL_NAME"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment