desperate

This commit is contained in:
crowetic 2025-05-19 18:55:56 -07:00
parent 769c52aea9
commit dffcd2d318

View File

@ -524,7 +524,7 @@ if [[ "$1" != "--update" ]]; then
if check_container_running "localai"; then
echo "✅ Skipping LocalAI install (already running)."
else
read -p " Install LocalAI (GPU-accelerated)? [y/N]: " localai_prompt
read -p " Install LocalAI (GPU-custom)? [y/N]: " localai_prompt
if [[ "$localai_prompt" =~ ^[Yy]$ ]]; then
echo "🧠 Installing LocalAI (Custom created localAI container)..."
@ -540,13 +540,10 @@ if [[ "$1" != "--update" ]]; then
-p 8080:8080 \
-v ~/ai-stack/localai/models:/models \
-v ~/ai-stack/localai/models:/app/models \
-v ~/ai-stack/localai/config:/app/configuration\
-v ~/ai-stack/localai/config:/config \
-e ENABLE_BACKENDS=llama-cuda,whispercpp,stablediffusion,ollama \
-e INCLUDE_DEFAULT_MODELS=true \
-e AUTOLOAD_MODELS=true \
-e MODEL_PATH=/models \
-e CONFIG_PATH=/config \
-e LOG_LEVEL=debug \
--restart unless-stopped \
localai:custom