modified start commands for open-webui and localai to optimize GPU usage

This commit is contained in:
crowetic 2025-05-21 11:00:26 -07:00
parent 2c6cb28b56
commit a07cbe73a3

View File

@ -195,31 +195,6 @@ clean_container() {
# Handle INSTALL (no --update passed) ------------------------------------------------------------------------INSTALL SECTION BELOW
# open-webui INSTALL
if [[ "$1" != "--update" ]]; then
if check_container_running "open-webui"; then
echo "✅ Skipping Open WebUI + Ollama install (already running)."
else
read -p " Install Open WebUI + Ollama? [y/N]: " openui
if [[ "$openui" =~ ^[Yy]$ ]]; then
echo "🧠 Setting up Open WebUI + Ollama..."
docker stop open-webui 2>/dev/null || true
docker rm open-webui 2>/dev/null || true
docker run -d -p 3000:8080 --gpus all \
-e OPENAI_API_BASE_URL=http://pipelines:9099 \
-e OPENAI_API_KEY=0p3n-w3bu! \
-e OLLAMA_MAX_LOADED_MODELS=2 \
-v ollama:/root/.ollama \
-v open-webui:/app/backend/data \
--name open-webui \
--network ai-stack-net \
--restart always \
ghcr.io/open-webui/open-webui:ollama
echo "🌐 Open WebUI running at http://localhost:3000"
fi
fi
fi
# jupyterlab INSTALL
if [[ "$1" != "--update" ]]; then
@ -489,6 +464,32 @@ if [[ "$1" != "--update" ]]; then
fi
# open-webui INSTALL
if [[ "$1" != "--update" ]]; then
if check_container_running "open-webui"; then
echo "✅ Skipping Open WebUI + Ollama install (already running)."
else
read -p " Install Open WebUI + Ollama? [y/N]: " openui
if [[ "$openui" =~ ^[Yy]$ ]]; then
echo "🧠 Setting up Open WebUI + Ollama..."
docker stop open-webui 2>/dev/null || true
docker rm open-webui 2>/dev/null || true
docker run -d -p 3000:8080 --gpus all \
-e OPENAI_API_BASE_URL=http://pipelines:9099 \
-e OPENAI_API_KEY=0p3n-w3bu! \
-e OLLAMA_MAX_LOADED_MODELS=2 \
-v ollama:/root/.ollama \
-v open-webui:/app/backend/data \
--name open-webui \
--network ai-stack-net \
--restart always \
ghcr.io/open-webui/open-webui:ollama
echo "🌐 Open WebUI running at http://localhost:3000"
fi
fi
fi
# pipelines INSTALL
if [[ "$1" != "--update" ]]; then
if check_container_running "pipelines"; then