Last active
March 21, 2026 21:27
-
-
Save carterhudson/3a288adb2ec921996c07a5ed6fcbe918 to your computer and use it in GitHub Desktop.
SlothServ bootstrap — one-command media server setup for macOS
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env bash | |
| # | |
| # SlothServ Bootstrap | |
| # Sets up the entire media server stack on a fresh macOS machine. | |
| # | |
| # Usage: | |
| # bash bootstrap.sh # Interactive — prompts for everything | |
| # bash bootstrap.sh --import config.json # Headless — reads saved credentials | |
| # bash bootstrap.sh --export config.json # Snapshot — dumps live stack to file | |
| # | |
| # Everything is prompted interactively — no hardcoded providers, indexers, | |
| # or VPN services. Bring your own Newznab indexer, download client, and VPN. | |
| # | |
| set -euo pipefail | |
| IMPORT_FILE="" | |
| EXPORT_FILE="" | |
| while [[ $# -gt 0 ]]; do | |
| case "$1" in | |
| --import) IMPORT_FILE="$2"; shift 2 ;; | |
| --export) EXPORT_FILE="$2"; shift 2 ;; | |
| *) echo "Unknown option: $1"; exit 1 ;; | |
| esac | |
| done | |
| # ─── Helpers ────────────────────────────────────────────────────────── | |
| RED='\033[0;31m' GREEN='\033[0;32m' YELLOW='\033[1;33m' | |
| CYAN='\033[0;36m' BOLD='\033[1m' NC='\033[0m' | |
| info() { echo -e "${CYAN}[info]${NC} $*"; } | |
| ok() { echo -e "${GREEN}[ok]${NC} $*"; } | |
| warn() { echo -e "${YELLOW}[warn]${NC} $*"; } | |
| err() { echo -e "${RED}[error]${NC} $*"; } | |
| step() { echo -e "\n${BOLD}━━━ $* ━━━${NC}"; } | |
| prompt() { | |
| local var_name="$1" prompt_text="$2" secret="${3:-false}" value="" | |
| while [[ -z "$value" ]]; do | |
| if [[ "$secret" == "true" ]]; then | |
| read -rsp "$prompt_text: " value; echo | |
| else | |
| read -rp "$prompt_text: " value | |
| fi | |
| done | |
| printf -v "$var_name" '%s' "$value" | |
| } | |
| prompt_default() { | |
| local var_name="$1" prompt_text="$2" default="$3" value="" | |
| read -rp "$prompt_text [$default]: " value | |
| printf -v "$var_name" '%s' "${value:-$default}" | |
| } | |
| confirm() { | |
| local reply="" | |
| read -rp "$1 [y/N]: " reply | |
| [[ "$reply" =~ ^[Yy] ]] | |
| } | |
| # ─── Export mode (early exit) ───────────────────────────────────────── | |
| if [[ -n "$EXPORT_FILE" ]]; then | |
| MEDIA_SERVER_DIR="${HOME}/media-server" | |
| EXPORT_SCRIPT="$MEDIA_SERVER_DIR/scripts/setup/export-config.py" | |
| if [[ ! -f "$EXPORT_SCRIPT" ]]; then | |
| err "export-config.py not found. Is SlothServ installed?" | |
| exit 1 | |
| fi | |
| python3 "$EXPORT_SCRIPT" -o "$EXPORT_FILE" | |
| exit 0 | |
| fi | |
| # ─── Import mode (load saved config) ───────────────────────────────── | |
| IMPORTED=false | |
| if [[ -n "$IMPORT_FILE" ]]; then | |
| if [[ ! -f "$IMPORT_FILE" ]]; then | |
| err "Import file not found: $IMPORT_FILE"; exit 1 | |
| fi | |
| IMPORTED=true | |
| info "Loading config from $IMPORT_FILE" | |
| TZ=$(python3 -c "import json; d=json.load(open('$IMPORT_FILE')); print(d['general']['timezone'])") | |
| NZBDAV_API_KEY=$(python3 -c "import json; d=json.load(open('$IMPORT_FILE')); print(d['nzbdav']['api_key'])") | |
| NZBDAV_WEBDAV_PASS_OBSCURED=$(python3 -c "import json; d=json.load(open('$IMPORT_FILE')); print(d['nzbdav']['webdav_password_obscured'])") | |
| INDEXER_NAME=$(python3 -c "import json; d=json.load(open('$IMPORT_FILE')); print(d['indexer']['name'])") | |
| INDEXER_URL=$(python3 -c "import json; d=json.load(open('$IMPORT_FILE')); print(d['indexer']['url'])") | |
| INDEXER_API_KEY=$(python3 -c "import json; d=json.load(open('$IMPORT_FILE')); print(d['indexer']['api_key'])") | |
| INDEXER_TV_CATS=$(python3 -c "import json; d=json.load(open('$IMPORT_FILE')); print(d['indexer'].get('tv_categories','5030,5040'))") | |
| INDEXER_ANIME_CATS=$(python3 -c "import json; d=json.load(open('$IMPORT_FILE')); print(d['indexer'].get('anime_categories','5070'))") | |
| INDEXER_MOVIE_CATS=$(python3 -c "import json; d=json.load(open('$IMPORT_FILE')); print(d['indexer'].get('movie_categories','2000,2010,2020,2030,2040,2045,2050,2060'))") | |
| USE_VPN=$(python3 -c "import json; d=json.load(open('$IMPORT_FILE')); print('true' if d['vpn']['enabled'] else 'false')") | |
| ok "Loaded: indexer=$INDEXER_NAME, vpn=$USE_VPN, tz=$TZ" | |
| fi | |
| # ─── 1. Preflight ──────────────────────────────────────────────────── | |
| step "1/8 Preflight" | |
| if [[ "$(uname)" != "Darwin" ]]; then err "macOS only."; exit 1; fi | |
| [[ "$(uname -m)" != "arm64" ]] && warn "Optimized for Apple Silicon — continuing anyway" | |
| ok "macOS detected" | |
| # ─── 2. Prerequisites ──────────────────────────────────────────────── | |
| step "2/8 Prerequisites" | |
| if ! command -v brew &>/dev/null; then | |
| info "Installing Homebrew..." | |
| /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" | |
| eval "$(/opt/homebrew/bin/brew shellenv)" | |
| else ok "Homebrew"; fi | |
| for pkg in colima docker docker-compose; do | |
| if ! brew list "$pkg" &>/dev/null; then | |
| info "Installing $pkg..."; brew install "$pkg" | |
| else ok "$pkg"; fi | |
| done | |
| command -v sqlite3 &>/dev/null && ok "sqlite3" || { err "sqlite3 not found"; exit 1; } | |
| # ─── 3. Configuration ──────────────────────────────────────────────── | |
| step "3/8 Configuration" | |
| MEDIA_SERVER_DIR="${HOME}/media-server" | |
| PUID="$(id -u)" | |
| PGID="$(id -g)" | |
| if $IMPORTED; then | |
| prompt_default MEDIA_SERVER_DIR "Install directory" "$MEDIA_SERVER_DIR" | |
| echo "" | |
| echo -e "${BOLD}Plex${NC} (claim tokens are one-time — you always need a fresh one)" | |
| echo " Get one at https://plex.tv/claim (expires in 4 min)" | |
| prompt PLEX_CLAIM "Plex claim token" | |
| VPN_SOURCE="imported" | |
| else | |
| echo "" | |
| echo -e "${BOLD}General${NC}" | |
| prompt_default MEDIA_SERVER_DIR "Install directory" "$MEDIA_SERVER_DIR" | |
| prompt_default TZ "Timezone" "America/New_York" | |
| echo "" | |
| echo -e "${BOLD}Plex${NC}" | |
| echo " Get a claim token at https://plex.tv/claim (expires in 4 min)" | |
| prompt PLEX_CLAIM "Plex claim token" | |
| echo "" | |
| echo -e "${BOLD}Download client (NzbDAV)${NC}" | |
| echo " NzbDAV is configured via its web UI after first boot." | |
| echo " Choose a WebDAV password now — you'll enter it in the UI later." | |
| prompt NZBDAV_WEBDAV_PASS "WebDAV password (choose one)" true | |
| echo "" | |
| echo -e "${BOLD}Usenet indexer${NC}" | |
| echo " Any Newznab-compatible indexer works (NzbGEEK, NZBFinder, DrunkenSlug, etc.)" | |
| prompt INDEXER_NAME "Indexer name (e.g. NzbGEEK)" | |
| prompt INDEXER_URL "Indexer Newznab URL (e.g. https://api.nzbgeek.info/)" | |
| prompt INDEXER_API_KEY "Indexer API key" true | |
| prompt_default INDEXER_TV_CATS "TV categories (comma-separated Newznab IDs)" "5030,5040" | |
| prompt_default INDEXER_ANIME_CATS "Anime categories (blank to skip)" "5070" | |
| prompt_default INDEXER_MOVIE_CATS "Movie categories" "2000,2010,2020,2030,2040,2045,2050,2060" | |
| echo "" | |
| echo -e "${BOLD}VPN${NC} (routes Usenet traffic through a WireGuard tunnel)" | |
| USE_VPN=false | |
| if confirm "Set up a VPN?"; then | |
| USE_VPN=true | |
| MOZVPN="/Applications/Mozilla VPN.app/Contents/MacOS/Mozilla VPN" | |
| HAS_MOZVPN=false | |
| [[ -x "$MOZVPN" ]] && HAS_MOZVPN=true | |
| if $HAS_MOZVPN && confirm " Mozilla VPN detected — auto-generate server pool?"; then | |
| VPN_SOURCE="mozilla" | |
| else | |
| VPN_SOURCE="manual" | |
| echo " Provide a standard WireGuard config ([Interface] + [Peer])." | |
| prompt WG_CONF_PATH " Path to wg0.conf" | |
| fi | |
| fi | |
| fi | |
| # ─── 4. Files ───────────────────────────────────────────────────────── | |
| step "4/8 Directory structure & config files" | |
| mkdir -p "$MEDIA_SERVER_DIR"/{config/{plex,nzbdav,overseerr,sonarr,radarr,gluetun/wireguard},data/media/{tv,anime,movies},mnt,scripts,logs} | |
| ok "Directories" | |
| DL_CLIENT_HOST="nzbdav" | |
| $USE_VPN && DL_CLIENT_HOST="gluetun" | |
| # .env | |
| cat > "$MEDIA_SERVER_DIR/.env" << ENVEOF | |
| PUID=${PUID} | |
| PGID=${PGID} | |
| TZ=${TZ} | |
| MEDIA_ROOT=${MEDIA_SERVER_DIR}/data | |
| PLEX_CLAIM=${PLEX_CLAIM} | |
| NZBDAV_PORT=3000 | |
| SONARR_PORT=8989 | |
| RADARR_PORT=7878 | |
| OVERSEERR_PORT=5055 | |
| PLEX_PORT=32400 | |
| ENVEOF | |
| ok ".env" | |
| # docker-compose.yml | |
| COMPOSE_FILE="$MEDIA_SERVER_DIR/docker-compose.yml" | |
| python3 - "$USE_VPN" "$COMPOSE_FILE" << 'PYCOMPOSE' | |
| import sys | |
| use_vpn = sys.argv[1] == "true" | |
| out_path = sys.argv[2] | |
| plex = """ plex: | |
| image: lscr.io/linuxserver/plex:latest | |
| container_name: plex | |
| environment: | |
| - PUID=${PUID} | |
| - PGID=${PGID} | |
| - TZ=${TZ} | |
| - PLEX_CLAIM=${PLEX_CLAIM} | |
| - VERSION=docker | |
| volumes: | |
| - ./config/plex:/config | |
| - ${MEDIA_ROOT}/media/tv:/tv | |
| - ${MEDIA_ROOT}/media/movies:/movies | |
| - ${MEDIA_ROOT}/media/anime:/anime | |
| - ./mnt:/mnt:rshared | |
| ports: | |
| - ${PLEX_PORT}:32400 | |
| restart: unless-stopped | |
| depends_on: | |
| nzbdav_rclone: | |
| condition: service_started | |
| """ | |
| gluetun = """ gluetun: | |
| image: qmcgaw/gluetun | |
| container_name: gluetun | |
| cap_add: | |
| - NET_ADMIN | |
| environment: | |
| - VPN_SERVICE_PROVIDER=custom | |
| - VPN_TYPE=wireguard | |
| - HEALTH_VPN_DURATION_INITIAL=10s | |
| - HEALTH_VPN_DURATION_ADDITION=5s | |
| - HEALTH_TARGET_ADDRESS=1.1.1.1:443 | |
| healthcheck: | |
| test: /gluetun-entrypoint healthcheck | |
| interval: 30s | |
| retries: 3 | |
| start_period: 15s | |
| timeout: 5s | |
| volumes: | |
| - ./config/gluetun/wireguard:/gluetun/wireguard | |
| ports: | |
| - ${NZBDAV_PORT}:3000 | |
| restart: unless-stopped | |
| """ | |
| nzbdav_vpn = """ nzbdav: | |
| image: nzbdav/nzbdav:alpha | |
| container_name: nzbdav | |
| network_mode: "service:gluetun" | |
| healthcheck: | |
| test: curl -f http://localhost:3000/health || exit 1 | |
| interval: 1m | |
| retries: 3 | |
| start_period: 5s | |
| timeout: 5s | |
| environment: | |
| - PUID=${PUID} | |
| - PGID=${PGID} | |
| volumes: | |
| - ./config/nzbdav:/config | |
| - ./mnt:/mnt | |
| - ${MEDIA_ROOT}/media:/data/media | |
| restart: unless-stopped | |
| depends_on: | |
| gluetun: | |
| condition: service_healthy | |
| """ | |
| nzbdav_plain = """ nzbdav: | |
| image: nzbdav/nzbdav:alpha | |
| container_name: nzbdav | |
| healthcheck: | |
| test: curl -f http://localhost:3000/health || exit 1 | |
| interval: 1m | |
| retries: 3 | |
| start_period: 5s | |
| timeout: 5s | |
| environment: | |
| - PUID=${PUID} | |
| - PGID=${PGID} | |
| volumes: | |
| - ./config/nzbdav:/config | |
| - ./mnt:/mnt | |
| - ${MEDIA_ROOT}/media:/data/media | |
| ports: | |
| - ${NZBDAV_PORT}:3000 | |
| restart: unless-stopped | |
| """ | |
| rest = """ nzbdav_rclone: | |
| image: rclone/rclone:latest | |
| container_name: nzbdav_rclone | |
| restart: unless-stopped | |
| environment: | |
| - PUID=${PUID} | |
| - PGID=${PGID} | |
| - TZ=${TZ} | |
| volumes: | |
| - ./mnt:/mnt:rshared | |
| - ./rclone.conf:/config/rclone/rclone.conf | |
| cap_add: | |
| - SYS_ADMIN | |
| security_opt: | |
| - apparmor:unconfined | |
| devices: | |
| - /dev/fuse:/dev/fuse:rwm | |
| depends_on: | |
| nzbdav: | |
| condition: service_healthy | |
| restart: true | |
| command: > | |
| mount nzbdav: /mnt/remote/nzbdav | |
| --contimeout=30s | |
| --uid=${PUID} | |
| --gid=${PGID} | |
| --allow-other | |
| --links | |
| --use-cookies | |
| --vfs-cache-mode=full | |
| --vfs-cache-max-size=20G | |
| --vfs-cache-max-age=24h | |
| --buffer-size=0M | |
| --vfs-read-ahead=512M | |
| --dir-cache-time=20s | |
| sonarr: | |
| image: lscr.io/linuxserver/sonarr:latest | |
| container_name: sonarr | |
| environment: | |
| - PUID=${PUID} | |
| - PGID=${PGID} | |
| - TZ=${TZ} | |
| volumes: | |
| - ./config/sonarr:/config | |
| - ${MEDIA_ROOT}:/data | |
| - ./mnt:/mnt:rshared | |
| ports: | |
| - ${SONARR_PORT}:8989 | |
| restart: unless-stopped | |
| depends_on: | |
| nzbdav_rclone: | |
| condition: service_started | |
| radarr: | |
| image: lscr.io/linuxserver/radarr:latest | |
| container_name: radarr | |
| environment: | |
| - PUID=${PUID} | |
| - PGID=${PGID} | |
| - TZ=${TZ} | |
| volumes: | |
| - ./config/radarr:/config | |
| - ${MEDIA_ROOT}:/data | |
| - ./mnt:/mnt:rshared | |
| ports: | |
| - ${RADARR_PORT}:7878 | |
| restart: unless-stopped | |
| depends_on: | |
| nzbdav_rclone: | |
| condition: service_started | |
| seerr: | |
| image: ghcr.io/seerr-team/seerr:latest | |
| container_name: seerr | |
| init: true | |
| environment: | |
| - TZ=${TZ} | |
| volumes: | |
| - ./config/overseerr:/app/config | |
| ports: | |
| - ${OVERSEERR_PORT}:5055 | |
| restart: unless-stopped | |
| depends_on: | |
| - plex | |
| - sonarr | |
| - radarr | |
| """ | |
| with open(out_path, "w") as f: | |
| f.write("services:\n") | |
| f.write(plex) | |
| if use_vpn: | |
| f.write("\n" + gluetun) | |
| f.write("\n" + nzbdav_vpn) | |
| else: | |
| f.write("\n" + nzbdav_plain) | |
| f.write("\n" + rest) | |
| PYCOMPOSE | |
| ok "docker-compose.yml" | |
| # rclone.conf | |
| if $IMPORTED; then | |
| RCLONE_OBSCURED="$NZBDAV_WEBDAV_PASS_OBSCURED" | |
| else | |
| RCLONE_OBSCURED=$(docker run --rm rclone/rclone obscure "$NZBDAV_WEBDAV_PASS" 2>/dev/null || echo "PLACEHOLDER") | |
| fi | |
| cat > "$MEDIA_SERVER_DIR/rclone.conf" << RCLONEEOF | |
| [nzbdav] | |
| type = webdav | |
| url = http://${DL_CLIENT_HOST}:3000/ | |
| vendor = other | |
| user = admin | |
| pass = ${RCLONE_OBSCURED} | |
| RCLONEEOF | |
| ok "rclone.conf" | |
| # ─── 5. Colima & VPN ───────────────────────────────────────────────── | |
| step "5/8 Colima & VPN" | |
| COLIMA_CONF="$HOME/.colima/default/colima.yaml" | |
| if [[ ! -f "$COLIMA_CONF" ]]; then | |
| info "Initializing Colima..." | |
| colima start --vm-type vz --mount-type virtiofs --network-address 2>/dev/null || true | |
| colima stop 2>/dev/null || true | |
| fi | |
| if [[ -f "$COLIMA_CONF" ]]; then | |
| grep -q "portForwarder:" "$COLIMA_CONF" && \ | |
| sed -i '' 's/portForwarder: .*/portForwarder: ssh/' "$COLIMA_CONF" | |
| ok "Colima configured" | |
| else | |
| warn "Colima config not found — configure manually" | |
| fi | |
| WG_DIR="$MEDIA_SERVER_DIR/config/gluetun/wireguard" | |
| if $USE_VPN; then | |
| if [[ "${VPN_SOURCE:-}" == "imported" ]]; then | |
| info "Restoring VPN configs from import..." | |
| python3 -c " | |
| import json, pathlib | |
| d = json.load(open('$IMPORT_FILE')) | |
| wg_dir = pathlib.Path('$WG_DIR') | |
| wg_dir.mkdir(parents=True, exist_ok=True) | |
| count = 0 | |
| for name, content in d['vpn'].get('wireguard_configs', {}).items(): | |
| (wg_dir / name).write_text(content + '\n') | |
| count += 1 | |
| print(count) | |
| " | while read count; do ok "$count VPN config(s) restored"; done | |
| elif [[ "${VPN_SOURCE:-}" == "mozilla" ]]; then | |
| info "Generating VPN server pool..." | |
| # Let the user pick a country or default to US | |
| echo "" | |
| echo " Available server regions (showing first 20):" | |
| "$MOZVPN" servers 2>&1 | grep "Country:" | head -20 | sed 's/^/ /' | |
| echo "" | |
| prompt_default VPN_COUNTRY " Country code for server pool" "us" | |
| # Grab up to 5 servers from that country | |
| mapfile -t ALL_SERVERS < <("$MOZVPN" servers 2>&1 \ | |
| | grep -A 999 "code: ${VPN_COUNTRY})" \ | |
| | grep "Server:" \ | |
| | head -5 \ | |
| | awk '{print $NF}') | |
| generated=0 | |
| for server in "${ALL_SERVERS[@]}"; do | |
| "$MOZVPN" select "$server" 2>/dev/null | |
| sleep 1 | |
| raw=$("$MOZVPN" wgconf 2>/dev/null || true) | |
| if [[ -n "$raw" ]]; then | |
| echo "$raw" \ | |
| | sed 's/Address = \([^,]*\),.*/Address = \1/' \ | |
| | sed 's/AllowedIPs = .*/AllowedIPs = 0.0.0.0\/0/' \ | |
| > "$WG_DIR/${server}.conf" | |
| ((generated++)) | |
| fi | |
| done | |
| if (( generated > 0 )); then | |
| cp "$WG_DIR/${ALL_SERVERS[0]}.conf" "$WG_DIR/wg0.conf" | |
| ok "$generated VPN server configs generated" | |
| else | |
| warn "No configs produced — run bootstrap again after logging into Mozilla VPN" | |
| fi | |
| elif [[ "${VPN_SOURCE:-}" == "manual" ]]; then | |
| if [[ -f "$WG_CONF_PATH" ]]; then | |
| cp "$WG_CONF_PATH" "$WG_DIR/wg0.conf" | |
| ok "WireGuard config installed" | |
| else | |
| err "File not found: $WG_CONF_PATH" | |
| warn "Place wg0.conf in $WG_DIR manually" | |
| fi | |
| fi | |
| else | |
| info "No VPN — NzbDAV connects directly" | |
| fi | |
| # ─── 6. Start services ─────────────────────────────────────────────── | |
| step "6/8 Starting services" | |
| if ! colima status &>/dev/null; then | |
| info "Starting Colima..." | |
| colima start --vm-type vz --mount-type virtiofs --network-address | |
| fi | |
| ok "Colima running" | |
| cd "$MEDIA_SERVER_DIR" | |
| info "Pulling images..." | |
| docker compose pull | |
| info "Starting containers..." | |
| docker compose up -d | |
| info "Waiting for services to initialize..." | |
| for _ in $(seq 1 90); do | |
| [[ -f "$MEDIA_SERVER_DIR/config/sonarr/config.xml" ]] && \ | |
| [[ -f "$MEDIA_SERVER_DIR/config/radarr/config.xml" ]] && break | |
| sleep 1 | |
| done | |
| [[ ! -f "$MEDIA_SERVER_DIR/config/sonarr/config.xml" ]] && { err "Sonarr didn't start."; exit 1; } | |
| VM_IP=$(colima ls --json 2>/dev/null | python3 -c " | |
| import json, sys | |
| for line in sys.stdin: | |
| info = json.loads(line) | |
| if info.get('status') == 'Running': | |
| a = info.get('address', '') | |
| if a: print(a); break | |
| " 2>/dev/null || echo "") | |
| [[ -z "$VM_IP" ]] && VM_IP="localhost" | |
| info "API target: $VM_IP" | |
| SONARR_KEY=$(xmllint --xpath '//ApiKey/text()' "$MEDIA_SERVER_DIR/config/sonarr/config.xml" 2>/dev/null) | |
| RADARR_KEY=$(xmllint --xpath '//ApiKey/text()' "$MEDIA_SERVER_DIR/config/radarr/config.xml" 2>/dev/null) | |
| for port in 8989 7878; do | |
| for _ in $(seq 1 30); do | |
| key="$SONARR_KEY"; [[ "$port" == "7878" ]] && key="$RADARR_KEY" | |
| curl -sf "http://${VM_IP}:${port}/api/v3/system/status" -H "X-Api-Key: $key" &>/dev/null && break | |
| sleep 2 | |
| done | |
| done | |
| ok "Sonarr & Radarr APIs ready" | |
| # ─── 7. Configure Sonarr & Radarr ──────────────────────────────────── | |
| step "7/8 Configuring Sonarr & Radarr" | |
| if $IMPORTED; then | |
| info "Using NzbDAV API key from import file" | |
| echo "" | |
| echo "NzbDAV still needs Usenet provider setup via its web UI." | |
| echo "Open ${BOLD}http://localhost:3000${NC} and:" | |
| echo " 1. Set up your Usenet provider (Settings > Usenet)" | |
| echo " 2. Verify WebDAV password (Settings > WebDAV)" | |
| echo " 3. Set Rclone Mount Dir: /mnt/remote/nzbdav (Settings > SABnzbd)" | |
| echo " 4. Set Repairs Library Dir: /data/media (Settings > Repairs)" | |
| echo "" | |
| echo "Press Enter once NzbDAV is configured..." | |
| read -r | |
| else | |
| echo "" | |
| echo "NzbDAV needs initial setup via its web UI before we can continue." | |
| echo "Open ${BOLD}http://localhost:3000${NC} and:" | |
| echo " 1. Set up your Usenet provider (Settings > Usenet)" | |
| echo " 2. Set the WebDAV password (Settings > WebDAV)" | |
| echo " 3. Set Rclone Mount Dir: /mnt/remote/nzbdav (Settings > SABnzbd)" | |
| echo " 4. Set Repairs Library Dir: /data/media (Settings > Repairs)" | |
| echo " 5. Copy the API key from the UI" | |
| echo "" | |
| prompt NZBDAV_API_KEY "NzbDAV API key" true | |
| fi | |
| # Download configure.py if not present locally | |
| CONFIGURE_PY="$MEDIA_SERVER_DIR/scripts/setup/configure.py" | |
| if [[ ! -f "$CONFIGURE_PY" ]]; then | |
| mkdir -p "$(dirname "$CONFIGURE_PY")" | |
| curl -fsSL "https://gist.githubusercontent.com/carterhudson/3a288adb2ec921996c07a5ed6fcbe918/raw/configure.py" \ | |
| -o "$CONFIGURE_PY" | |
| fi | |
| python3 "$CONFIGURE_PY" \ | |
| --base-dir "$MEDIA_SERVER_DIR" \ | |
| --vm-ip "$VM_IP" \ | |
| --dl-host "$DL_CLIENT_HOST" \ | |
| --dl-api-key "$NZBDAV_API_KEY" \ | |
| --indexer-name "$INDEXER_NAME" \ | |
| --indexer-url "$INDEXER_URL" \ | |
| --indexer-api-key "$INDEXER_API_KEY" \ | |
| --indexer-tv-cats "$INDEXER_TV_CATS" \ | |
| --indexer-anime-cats "$INDEXER_ANIME_CATS" \ | |
| --indexer-movie-cats "$INDEXER_MOVIE_CATS" | |
| ok "Sonarr & Radarr configured" | |
| # ─── 8. Watchdog & shell aliases ───────────────────────────────────── | |
| step "8/8 Watchdog daemon & shell aliases" | |
| info "Downloading watchdog package..." | |
| WATCHDOG_DIR="$MEDIA_SERVER_DIR/scripts/watchdog" | |
| mkdir -p "$WATCHDOG_DIR" | |
| GIST_BASE="https://gist.githubusercontent.com/carterhudson/62d10dd336038131541a592c6af8e928/raw" | |
| for f in __init__.py __main__.py config.py api.py sonarr.py radarr.py plex.py health.py vpn.py connectivity.py; do | |
| curl -fsSL "$GIST_BASE/$f" -o "$WATCHDOG_DIR/$f" | |
| done | |
| ok "watchdog package (10 modules)" | |
| mkdir -p "$HOME/Library/LaunchAgents" | |
| PLIST_PATH="$HOME/Library/LaunchAgents/com.slothserv.watchdog.plist" | |
| cat > "$PLIST_PATH" << PLISTEOF | |
| <?xml version="1.0" encoding="UTF-8"?> | |
| <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> | |
| <plist version="1.0"> | |
| <dict> | |
| <key>Label</key> | |
| <string>com.slothserv.watchdog</string> | |
| <key>ProgramArguments</key> | |
| <array> | |
| <string>/usr/bin/python3</string> | |
| <string>${MEDIA_SERVER_DIR}/scripts/watchdog</string> | |
| </array> | |
| <key>RunAtLoad</key> | |
| <true/> | |
| <key>KeepAlive</key> | |
| <true/> | |
| <key>StandardOutPath</key> | |
| <string>${MEDIA_SERVER_DIR}/logs/watchdog-stdout.log</string> | |
| <key>StandardErrorPath</key> | |
| <string>${MEDIA_SERVER_DIR}/logs/watchdog-stderr.log</string> | |
| </dict> | |
| </plist> | |
| PLISTEOF | |
| launchctl load "$PLIST_PATH" 2>/dev/null || true | |
| ok "Watchdog daemon" | |
| if ! grep -q "# SlothServ" "$HOME/.zshrc" 2>/dev/null; then | |
| cat >> "$HOME/.zshrc" << 'ALIASEOF' | |
| # SlothServ media server shortcuts | |
| alias mstatus="python3 ~/media-server/scripts/cli/status.py" | |
| alias mstatus-json="python3 ~/media-server/scripts/cli/status.py --json" | |
| alias msearch="python3 ~/media-server/scripts/cli/episode-search.py" | |
| alias mretry="python3 ~/media-server/scripts/cli/retry-failed.py" | |
| alias mlogs="tail -30 ~/media-server/logs/watchdog.log" | |
| alias mrestart="cd ~/media-server && docker compose down && colima restart && docker compose up -d" | |
| ALIASEOF | |
| ok "Shell aliases → ~/.zshrc" | |
| else | |
| ok "Shell aliases already present" | |
| fi | |
| # ─── Done ───────────────────────────────────────────────────────────── | |
| echo "" | |
| echo -e "${GREEN}${BOLD}━━━ SlothServ is running! ━━━${NC}" | |
| echo "" | |
| echo "Remaining manual steps:" | |
| echo " 1. Claim Plex at http://localhost:32400/web" | |
| echo " 2. Create libraries: TV → /tv, Anime → /anime, Movies → /movies" | |
| echo " 3. Set Anime library: audio=Japanese, subtitles=English (always on)" | |
| echo " 4. In NzbDAV: configure Sonarr integration (host: http://sonarr:8989)" | |
| echo " 5. In Sonarr & Radarr: Settings > Import Lists > Plex Watchlist" | |
| echo " 6. Set customConnections in Plex Preferences.xml to your LAN IP" | |
| $USE_VPN && echo -e "\n VPN active — watchdog auto-rotates servers on failure." | |
| echo "" | |
| echo "Commands: mstatus mlogs msearch mretry mrestart" | |
| echo "" | |
| echo "Backup your config for easy migration to another machine:" | |
| echo " bash bootstrap.sh --export slothserv-config.json" | |
| echo " bash bootstrap.sh --import slothserv-config.json" | |
| echo "" |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| """ | |
| Programmatic first-run configuration for Sonarr and Radarr. | |
| Called by bootstrap.sh after containers are running. | |
| Usage: | |
| python3 configure.py \ | |
| --vm-ip <ip> \ | |
| --dl-host <hostname> \ | |
| --dl-api-key <key> \ | |
| --indexer-name <name> \ | |
| --indexer-url <url> \ | |
| --indexer-api-key <key> \ | |
| --indexer-tv-cats 5030,5040 \ | |
| --indexer-anime-cats 5070 \ | |
| --indexer-movie-cats 2000,2010,... | |
| """ | |
| import argparse | |
| import json | |
| import sys | |
| import urllib.request | |
| import urllib.error | |
| import xml.etree.ElementTree as ET | |
| from pathlib import Path | |
| def api(base_url, api_key, method, path, data=None, quiet=False): | |
| body = json.dumps(data).encode() if data else None | |
| req = urllib.request.Request( | |
| f"{base_url}{path}", | |
| data=body, | |
| headers={"X-Api-Key": api_key, "Content-Type": "application/json"}, | |
| method=method, | |
| ) | |
| try: | |
| with urllib.request.urlopen(req, timeout=30) as r: | |
| raw = r.read() | |
| return json.loads(raw) if raw else None | |
| except urllib.error.HTTPError as e: | |
| if not quiet: | |
| body_text = "" | |
| try: | |
| body_text = e.read().decode()[:200] | |
| except Exception: | |
| pass | |
| print(f" API error {e.code} on {method} {path}: {body_text}", file=sys.stderr) | |
| raise | |
| def parse_csv_ints(csv_str): | |
| if not csv_str or not csv_str.strip(): | |
| return [] | |
| return [int(x.strip()) for x in csv_str.split(",") if x.strip()] | |
| FOREIGN_DUB_REGEX = ( | |
| r"\b(GerDub|German\.?Dub|GERMAN\.DL|FrenchDub|French\.?Dub|VOSTFR" | |
| r"|ITA\.?Dub|Italian\.?Dub|SPA\.?Dub|Spanish\.?Dub|POR\.?Dub" | |
| r"|RUS\.?Dub|DUBBiT|DUAL\.?AUDIO\.?(?:GER|FRE|SPA|ITA|POR|RUS))\b" | |
| ) | |
| def configure_service(service, base_url, api_key, args): | |
| is_sonarr = service == "sonarr" | |
| print(f"\n Configuring {service}...") | |
| # Root folders | |
| if is_sonarr: | |
| api(base_url, api_key, "POST", "/api/v3/rootfolder", {"path": "/data/media/tv"}) | |
| api(base_url, api_key, "POST", "/api/v3/rootfolder", {"path": "/data/media/anime"}) | |
| print(f" [ok] root folders: /data/media/tv, /data/media/anime") | |
| else: | |
| api(base_url, api_key, "POST", "/api/v3/rootfolder", {"path": "/data/media/movies"}) | |
| print(f" [ok] root folder: /data/media/movies") | |
| # Custom formats | |
| foreign_dub = api(base_url, api_key, "POST", "/api/v3/customformat", { | |
| "name": "Foreign Dub", | |
| "specifications": [{ | |
| "name": "Foreign Dub Tags", | |
| "implementation": "ReleaseTitleSpecification", | |
| "fields": [{"name": "value", "value": FOREIGN_DUB_REGEX}], | |
| "negate": False, "required": True, | |
| }], | |
| }) | |
| not_orig = api(base_url, api_key, "POST", "/api/v3/customformat", { | |
| "name": "Not Original Language", | |
| "specifications": [{ | |
| "name": "Original Language", | |
| "implementation": "LanguageSpecification", | |
| "fields": [ | |
| {"name": "value", "value": -2}, | |
| {"name": "exceptLanguage", "value": False}, | |
| ], | |
| "negate": False, "required": True, | |
| }], | |
| }) | |
| if is_sonarr: | |
| api(base_url, api_key, "POST", "/api/v3/customformat", { | |
| "name": "Season Pack", | |
| "specifications": [{ | |
| "name": "Season Pack", | |
| "implementation": "ReleaseTypeSpecification", | |
| "fields": [{"name": "value", "value": 3}], | |
| "negate": False, "required": True, | |
| }], | |
| }) | |
| fd_id = foreign_dub["id"] | |
| no_id = not_orig["id"] | |
| print(f" [ok] custom formats (Foreign Dub={fd_id}, Not Original Language={no_id})") | |
| # Apply scores to quality profiles | |
| profiles = api(base_url, api_key, "GET", "/api/v3/qualityprofile") or [] | |
| for p in profiles: | |
| changed = False | |
| for item in p.get("formatItems", []): | |
| if item.get("format", 0) in (fd_id, no_id): | |
| item["score"] = -10000 | |
| changed = True | |
| if changed: | |
| api(base_url, api_key, "PUT", f"/api/v3/qualityprofile/{p['id']}", p) | |
| print(f" [ok] custom format scores applied") | |
| # Extra quality profiles for Sonarr | |
| if is_sonarr: | |
| profiles = api(base_url, api_key, "GET", "/api/v3/qualityprofile") or [] | |
| hd_template = None | |
| for p in profiles: | |
| if p["name"] == "HD-1080p": | |
| hd_template = p | |
| break | |
| if hd_template: | |
| for name in ("HD 1080p+", "Anime HD 1080p+"): | |
| clone = dict(hd_template) | |
| clone.pop("id", None) | |
| clone["name"] = name | |
| api(base_url, api_key, "POST", "/api/v3/qualityprofile", clone) | |
| print(f" [ok] quality profiles: HD 1080p+, Anime HD 1080p+") | |
| # Download client | |
| cat_field = "tvCategory" if is_sonarr else "movieCategory" | |
| cat_value = "tv" if is_sonarr else "movies" | |
| api(base_url, api_key, "POST", "/api/v3/downloadclient", { | |
| "name": "NzbDAV", | |
| "implementation": "Sabnzbd", | |
| "configContract": "SabnzbdSettings", | |
| "enable": True, | |
| "fields": [ | |
| {"name": "host", "value": args.dl_host}, | |
| {"name": "port", "value": 3000}, | |
| {"name": "apiKey", "value": args.dl_api_key}, | |
| {"name": "useSsl", "value": False}, | |
| {"name": cat_field, "value": cat_value}, | |
| ], | |
| }) | |
| print(f" [ok] download client: NzbDAV → {args.dl_host}") | |
| # Indexer | |
| fields = [ | |
| {"name": "baseUrl", "value": args.indexer_url}, | |
| {"name": "apiPath", "value": "/api"}, | |
| {"name": "apiKey", "value": args.indexer_api_key}, | |
| ] | |
| if is_sonarr: | |
| fields.append({"name": "categories", "value": parse_csv_ints(args.indexer_tv_cats)}) | |
| anime_cats = parse_csv_ints(args.indexer_anime_cats) | |
| if anime_cats: | |
| fields.append({"name": "animeCategories", "value": anime_cats}) | |
| fields.append({"name": "animeStandardFormatSearch", "value": True}) | |
| else: | |
| fields.append({"name": "categories", "value": parse_csv_ints(args.indexer_movie_cats)}) | |
| api(base_url, api_key, "POST", "/api/v3/indexer", { | |
| "name": args.indexer_name, | |
| "implementation": "Newznab", | |
| "configContract": "NewznabSettings", | |
| "enable": True, | |
| "fields": fields, | |
| }) | |
| print(f" [ok] indexer: {args.indexer_name}") | |
| def main(): | |
| parser = argparse.ArgumentParser(description="Configure Sonarr & Radarr") | |
| parser.add_argument("--base-dir", default=str(Path(__file__).resolve().parent.parent.parent)) | |
| parser.add_argument("--vm-ip", default="localhost") | |
| parser.add_argument("--dl-host", required=True) | |
| parser.add_argument("--dl-api-key", required=True) | |
| parser.add_argument("--indexer-name", required=True) | |
| parser.add_argument("--indexer-url", required=True) | |
| parser.add_argument("--indexer-api-key", required=True) | |
| parser.add_argument("--indexer-tv-cats", default="5030,5040") | |
| parser.add_argument("--indexer-anime-cats", default="5070") | |
| parser.add_argument("--indexer-movie-cats", default="2000,2010,2020,2030,2040,2045,2050,2060") | |
| args = parser.parse_args() | |
| base = Path(args.base_dir) | |
| sonarr_key = ET.parse(str(base / "config/sonarr/config.xml")).find("ApiKey").text | |
| radarr_key = ET.parse(str(base / "config/radarr/config.xml")).find("ApiKey").text | |
| sonarr_url = f"http://{args.vm_ip}:8989" | |
| radarr_url = f"http://{args.vm_ip}:7878" | |
| configure_service("sonarr", sonarr_url, sonarr_key, args) | |
| configure_service("radarr", radarr_url, radarr_key, args) | |
| print("\n Sonarr & Radarr configured.") | |
| print(" Remaining: add Plex Watchlist import list in each UI (requires Plex OAuth).") | |
| if __name__ == "__main__": | |
| main() |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| """ | |
| Export the running SlothServ stack's credentials and configuration | |
| into a portable JSON file that bootstrap.sh --import can consume. | |
| Usage: | |
| python3 export-config.py [-o slothserv-config.json] | |
| What it captures: | |
| - Timezone | |
| - NzbDAV API key + WebDAV password (obscured, from rclone.conf) | |
| - Indexer name, URL, API key, and category IDs (from Sonarr API) | |
| - VPN enabled flag + all WireGuard config files | |
| - Sonarr/Radarr custom format names (for reference, not re-applied) | |
| What it does NOT capture (must be re-entered on fresh install): | |
| - Plex claim token (one-time, expires in 4 minutes) | |
| - NzbDAV Usenet provider credentials (stored in NzbDAV's own DB) | |
| - Plex server identity / Preferences.xml | |
| """ | |
| import argparse | |
| import json | |
| import os | |
| import sys | |
| import urllib.request | |
| import urllib.error | |
| import xml.etree.ElementTree as ET | |
| from datetime import datetime, timezone | |
| from pathlib import Path | |
| def api(base_url, api_key, path): | |
| req = urllib.request.Request( | |
| f"{base_url}{path}", | |
| headers={"X-Api-Key": api_key, "Content-Type": "application/json"}, | |
| ) | |
| with urllib.request.urlopen(req, timeout=15) as r: | |
| return json.loads(r.read()) | |
| def get_field(fields, name): | |
| for f in fields: | |
| if f.get("name") == name: | |
| return f.get("value", "") | |
| return "" | |
| def main(): | |
| parser = argparse.ArgumentParser(description="Export SlothServ config") | |
| parser.add_argument("-o", "--output", default="slothserv-config.json") | |
| parser.add_argument("--base-dir", default=str(Path(__file__).resolve().parent.parent.parent)) | |
| args = parser.parse_args() | |
| base = Path(args.base_dir) | |
| # Resolve VM IP | |
| vm_ip = "localhost" | |
| try: | |
| import subprocess | |
| result = subprocess.run( | |
| ["/opt/homebrew/bin/colima", "ls", "--json"], | |
| capture_output=True, text=True, timeout=10, | |
| env={**os.environ, "PATH": "/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin"}, | |
| ) | |
| if result.returncode == 0: | |
| for line in result.stdout.strip().splitlines(): | |
| info = json.loads(line) | |
| if info.get("status") == "Running": | |
| addr = info.get("address", "") | |
| if addr: | |
| vm_ip = addr | |
| break | |
| except Exception: | |
| pass | |
| # .env | |
| env_vars = {} | |
| env_path = base / ".env" | |
| if env_path.exists(): | |
| for line in env_path.read_text().splitlines(): | |
| line = line.strip() | |
| if line and not line.startswith("#") and "=" in line: | |
| k, v = line.split("=", 1) | |
| env_vars[k.strip()] = v.strip() | |
| tz = env_vars.get("TZ", "America/New_York") | |
| # Sonarr API key | |
| sonarr_key = "" | |
| try: | |
| tree = ET.parse(str(base / "config/sonarr/config.xml")) | |
| sonarr_key = tree.find("ApiKey").text | |
| except Exception: | |
| print("Error: Cannot read Sonarr API key", file=sys.stderr) | |
| sys.exit(1) | |
| sonarr_url = f"http://{vm_ip}:8989" | |
| # Indexer from Sonarr | |
| indexer_config = {} | |
| try: | |
| indexers = api(sonarr_url, sonarr_key, "/api/v3/indexer") | |
| if indexers: | |
| idx = indexers[0] | |
| fields = idx.get("fields", []) | |
| indexer_config = { | |
| "name": idx.get("name", ""), | |
| "url": get_field(fields, "baseUrl"), | |
| "api_key": get_field(fields, "apiKey"), | |
| "tv_categories": ",".join(str(c) for c in (get_field(fields, "categories") or [])), | |
| "anime_categories": ",".join(str(c) for c in (get_field(fields, "animeCategories") or [])), | |
| } | |
| except Exception as e: | |
| print(f"Warning: Could not read indexer config: {e}", file=sys.stderr) | |
| # Movie categories from Radarr | |
| try: | |
| radarr_key = ET.parse(str(base / "config/radarr/config.xml")).find("ApiKey").text | |
| radarr_url = f"http://{vm_ip}:7878" | |
| radarr_indexers = api(radarr_url, radarr_key, "/api/v3/indexer") | |
| if radarr_indexers: | |
| fields = radarr_indexers[0].get("fields", []) | |
| indexer_config["movie_categories"] = ",".join( | |
| str(c) for c in (get_field(fields, "categories") or []) | |
| ) | |
| except Exception: | |
| pass | |
| # NzbDAV API key from Sonarr's download client | |
| nzbdav_api_key = "" | |
| try: | |
| clients = api(sonarr_url, sonarr_key, "/api/v3/downloadclient") | |
| for c in clients: | |
| if c.get("implementation") == "Sabnzbd": | |
| nzbdav_api_key = get_field(c.get("fields", []), "apiKey") | |
| break | |
| except Exception: | |
| pass | |
| # WebDAV password from rclone.conf (obscured form) | |
| rclone_pass = "" | |
| rclone_conf = base / "rclone.conf" | |
| if rclone_conf.exists(): | |
| for line in rclone_conf.read_text().splitlines(): | |
| if line.strip().startswith("pass ="): | |
| rclone_pass = line.split("=", 1)[1].strip() | |
| # VPN: check if gluetun is in docker-compose, grab WireGuard configs | |
| vpn_enabled = False | |
| compose_path = base / "docker-compose.yml" | |
| if compose_path.exists(): | |
| vpn_enabled = "gluetun" in compose_path.read_text() | |
| wg_configs = {} | |
| wg_dir = base / "config" / "gluetun" / "wireguard" | |
| if wg_dir.is_dir(): | |
| for conf in sorted(wg_dir.glob("*.conf")): | |
| content = conf.read_text().strip() | |
| if content and "PLACEHOLDER" not in content: | |
| wg_configs[conf.name] = content | |
| # Build export | |
| export = { | |
| "version": 1, | |
| "exported_at": datetime.now(timezone.utc).isoformat(), | |
| "general": { | |
| "timezone": tz, | |
| }, | |
| "nzbdav": { | |
| "api_key": nzbdav_api_key, | |
| "webdav_password_obscured": rclone_pass, | |
| }, | |
| "indexer": indexer_config, | |
| "vpn": { | |
| "enabled": vpn_enabled, | |
| "wireguard_configs": wg_configs, | |
| }, | |
| } | |
| output_path = Path(args.output) | |
| output_path.write_text(json.dumps(export, indent=2) + "\n") | |
| # Summary | |
| print(f"\nExported to: {output_path.resolve()}") | |
| print(f" Timezone: {tz}") | |
| print(f" Indexer: {indexer_config.get('name', '?')}") | |
| print(f" NzbDAV key: {'yes' if nzbdav_api_key else 'missing'}") | |
| print(f" WebDAV pass: {'yes (obscured)' if rclone_pass else 'missing'}") | |
| print(f" VPN: {'enabled' if vpn_enabled else 'disabled'}") | |
| print(f" WG configs: {len(wg_configs)} file(s)") | |
| print() | |
| print("This file contains secrets. Keep it safe.") | |
| print("To restore on a new machine:") | |
| print(f" bash bootstrap.sh --import {output_path.name}") | |
| if __name__ == "__main__": | |
| main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment