Last active
May 12, 2026 13:28
-
-
Save laurentperrinet/114f6eb80c9bf505963f143e2e679583 to your computer and use it in GitHub Desktop.
My routine script to upgrade openwebui on my local box
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/bin/bash | |
| set -e # Exit on any error | |
| echo "=== Open-WebUI Upgrade Script ===" | |
| echo "Starting upgrade process..." | |
| echo | |
| # Function to check if docker is available | |
| check_docker() { | |
| if ! command -v docker >/dev/null 2>&1; then | |
| echo "β Error: Docker is not installed or not in PATH" | |
| echo "Docker path: $(which docker 2>/dev/null || echo 'Not found')" | |
| exit 1 | |
| fi | |
| echo "β Docker is available ($(docker --version))" | |
| } | |
| # Function to list current Ollama models | |
| list_ollama_models() { | |
| echo "π Checking current Ollama models..." | |
| if docker exec open-webui ollama list 2>/dev/null; then | |
| echo "β Successfully listed Ollama models" | |
| else | |
| echo "β οΈ No Ollama container found or models unavailable" | |
| fi | |
| echo | |
| } | |
| # Check docker availability | |
| check_docker | |
| # List current models before update | |
| list_ollama_models | |
| # Pull the latest image | |
| echo "π₯ Pulling the latest Open-WebUI image..." | |
| if docker pull ghcr.io/open-webui/open-webui:ollama; then | |
| echo "β Successfully pulled the latest image" | |
| else | |
| echo "β Pull failed, aborting" | |
| exit 1 | |
| fi | |
| echo | |
| # Stop and remove the old container | |
| echo "βΉοΈ Stopping Open-WebUI container..." | |
| docker stop open-webui 2>/dev/null && echo "β Container stopped" || echo "βΉοΈ Container was not running" | |
| echo "ποΈ Removing old Open-WebUI container..." | |
| docker rm open-webui 2>/dev/null && echo "β Old container removed" || echo "βΉοΈ No existing container to remove" | |
| echo | |
| # Start a new container with the updated image | |
| echo "π Starting new Open-WebUI container..." | |
| echo " Configuration:" | |
| echo " - Port mapping: 3000:8080" | |
| echo " - Data volume: open-webui-data" | |
| echo " - GPU access: all GPUs enabled" | |
| echo " - Ollama volume: ollama" | |
| echo " - Restart policy: always" | |
| if docker run -d \ | |
| --name open-webui \ | |
| -p 3000:8080 \ | |
| -v open-webui-data:/app/backend/data \ | |
| --gpus=all \ | |
| -v ollama:/root/.ollama \ | |
| --restart always \ | |
| ghcr.io/open-webui/open-webui:ollama; then | |
| echo "β New container started successfully!" | |
| echo | |
| echo "π Container status:" | |
| docker ps -f name=open-webui --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}" | |
| else | |
| echo "β Failed to start new container" | |
| exit 1 | |
| fi | |
| echo | |
| echo "π Updating Ollama models..." | |
| echo "This will pull the latest versions of your models. This may take some time depending on model sizes." | |
| echo | |
| echo "π Updating Ollama models..." | |
| echo "This will pull the latest versions of your models. This may take some time depending on model sizes." | |
| # Update common Ollama models (you can modify this list based on your needs) | |
| MODELS_TO_UPDATE="qwen3.6:latest wizardlm-uncensored:latest qwen3-coder:480b-cloud gpt-oss:120b-cloud" | |
| MODEL_UPDATE_SUCCESS=0 | |
| MODEL_UPDATE_TOTAL=0 | |
| # Loop through models using word splitting | |
| for model in $MODELS_TO_UPDATE; do | |
| MODEL_UPDATE_TOTAL=$((MODEL_UPDATE_TOTAL + 1)) | |
| echo "π₯ Updating $model..." | |
| if docker exec open-webui ollama pull "$model" >/dev/null 2>&1; then | |
| echo "β $model updated successfully" | |
| MODEL_UPDATE_SUCCESS=$((MODEL_UPDATE_SUCCESS + 1)) | |
| else | |
| echo "β οΈ Failed to update $model (may not be installed or network issue)" | |
| fi | |
| done | |
| echo | |
| echo "π Final Ollama model list:" | |
| if docker exec open-webui ollama list >/dev/null 2>&1; then | |
| docker exec open-webui ollama list | |
| echo "β Model list retrieved successfully" | |
| else | |
| echo "β οΈ Could not retrieve model list" | |
| fi | |
| echo | |
| echo "π Upgrade completed!" | |
| echo "π Models: $MODEL_UPDATE_SUCCESS/$MODEL_UPDATE_TOTAL updated successfully" | |
| echo "π Open-WebUI is now running on http://localhost:3000" | |
| echo "π‘ Note: If you have custom models, you may need to update them manually" | |
| echo " Run: docker exec ollama ollama pull MODEL_NAME" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment