diff --git a/setup-ai-stack.sh b/setup-ai-stack.sh
index 00ed9c0..809c9c5 100644
--- a/setup-ai-stack.sh
+++ b/setup-ai-stack.sh
@@ -195,31 +195,6 @@ clean_container() {
 # Handle INSTALL (no --update passed) ------------------------------------------------------------------------INSTALL SECTION BELOW
 
 
-# open-webui INSTALL
-if [[ "$1" != "--update" ]]; then
-  if check_container_running "open-webui"; then
-    echo "✅ Skipping Open WebUI + Ollama install (already running)."
-  else
-    read -p "➕ Install Open WebUI + Ollama? [y/N]: " openui
-    if [[ "$openui" =~ ^[Yy]$ ]]; then
-      echo "🧠 Setting up Open WebUI + Ollama..."
-      docker stop open-webui 2>/dev/null || true
-      docker rm open-webui 2>/dev/null || true
-      docker run -d -p 3000:8080 --gpus all \
-        -e OPENAI_API_BASE_URL=http://pipelines:9099 \
-        -e OPENAI_API_KEY=0p3n-w3bu! \
-        -e OLLAMA_MAX_LOADED_MODELS=2 \
-        -v ollama:/root/.ollama \
-        -v open-webui:/app/backend/data \
-        --name open-webui \
-        --network ai-stack-net \
-        --restart always \
-        ghcr.io/open-webui/open-webui:ollama
-      echo "🌐 Open WebUI running at http://localhost:3000"
-    fi
-  fi
-fi
-
 
 # jupyterlab INSTALL
 if [[ "$1" != "--update" ]]; then
@@ -489,6 +464,32 @@ if [[ "$1" != "--update" ]]; then
 fi
 
 
+# open-webui INSTALL
+if [[ "$1" != "--update" ]]; then
+  if check_container_running "open-webui"; then
+    echo "✅ Skipping Open WebUI + Ollama install (already running)."
+  else
+    read -p "➕ Install Open WebUI + Ollama? [y/N]: " openui
+    if [[ "$openui" =~ ^[Yy]$ ]]; then
+      echo "🧠 Setting up Open WebUI + Ollama..."
+      docker stop open-webui 2>/dev/null || true
+      docker rm open-webui 2>/dev/null || true
+      docker run -d -p 3000:8080 --gpus all \
+        -e OPENAI_API_BASE_URL=http://pipelines:9099 \
+        -e OPENAI_API_KEY=0p3n-w3bu! \
+        -e OLLAMA_MAX_LOADED_MODELS=2 \
+        -v ollama:/root/.ollama \
+        -v open-webui:/app/backend/data \
+        --name open-webui \
+        --network ai-stack-net \
+        --restart always \
+        ghcr.io/open-webui/open-webui:ollama
+      echo "🌐 Open WebUI running at http://localhost:3000"
+    fi
+  fi
+fi
+
+
 # pipelines INSTALL
 if [[ "$1" != "--update" ]]; then
   if check_container_running "pipelines"; then