Added localai container to setup and start scripts
added local docker network to allow cross-container communications.
This commit is contained in:
parent
c4fe4a522f
commit
d0cc730469
@ -31,6 +31,7 @@ if [[ "$1" == "--update" ]]; then
|
||||
docker pull jupyter/scipy-notebook:latest
|
||||
docker run -d \
|
||||
--name jupyterlab \
|
||||
--network ai-stack-net \
|
||||
--gpus all \
|
||||
-p 8888:8888 \
|
||||
-v jupyter_data:/home/jovyan/work \
|
||||
@ -48,6 +49,7 @@ if [[ "$1" == "--update" ]]; then
|
||||
docker pull ggerganov/whisper.cpp:latest
|
||||
docker run -d \
|
||||
--name whisper \
|
||||
--network ai-stack-net \
|
||||
--gpus all \
|
||||
-v whisper_data:/app/data \
|
||||
ggerganov/whisper.cpp:latest
|
||||
@ -64,6 +66,7 @@ if [[ "$1" == "--update" ]]; then
|
||||
docker pull abraham-ai/automatic1111-webui:latest
|
||||
docker run -d \
|
||||
--name stable-diffusion \
|
||||
--network ai-stack-net \
|
||||
--gpus all \
|
||||
-p 7860:7860 \
|
||||
-v sd_models:/data \
|
||||
@ -79,11 +82,31 @@ if [[ "$1" == "--update" ]]; then
|
||||
docker pull ghcr.io/cmdr2/comfyui:latest
|
||||
docker run -d --gpus all \
|
||||
--name comfyui \
|
||||
--network ai-stack-net \
|
||||
-p 8188:8188 \
|
||||
-v comfyui_data:/workspace \
|
||||
ghcr.io/cmdr2/comfyui:latest
|
||||
echo "✅ ComfyUI updated and restarted."
|
||||
|
||||
echo "🔁 Updating LocalAI..."
|
||||
docker stop localai 2>/dev/null || true
|
||||
docker rm localai 2>/dev/null || true
|
||||
|
||||
mkdir -p ~/ai-stack/localai
|
||||
|
||||
docker pull quay.io/go-skynet/local-ai:latest
|
||||
docker run -d \
|
||||
--name localai \
|
||||
--network ai-stack-net \
|
||||
-p 8080:8080 \
|
||||
-v ~/ai-stack/localai:/models \
|
||||
-e MODELS_PATH=/models \
|
||||
-e ENABLE_OOLLAMA_BACKEND=true \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
quay.io/go-skynet/local-ai:latest
|
||||
|
||||
echo "✅ LocalAI updated and restarted."
|
||||
|
||||
|
||||
echo "🔁 Updating Whisper.cpp (custom GPU build)..."
|
||||
WHISPER_DIR=~/ai-stack/whisper.cpp
|
||||
@ -108,6 +131,7 @@ if [[ "$1" == "--update" ]]; then
|
||||
docker build -t a1111-webui .
|
||||
docker run -d --gpus all \
|
||||
--name stable-diffusion \
|
||||
--network ai-stack-net \
|
||||
-p 7860:7860 \
|
||||
-v ~/ai-stack/sd-models:/app/models/Stable-diffusion \
|
||||
-v ~/ai-stack/stable-diffusion/outputs:/app/outputs \
|
||||
@ -127,6 +151,7 @@ if [[ "$1" == "--update" ]]; then
|
||||
docker build -t tg-webui .
|
||||
docker run -d --gpus all \
|
||||
--name tg-webui \
|
||||
--network ai-stack-net \
|
||||
-p 7861:7861 \
|
||||
-v ~/ai-stack/tg-models:/app/models \
|
||||
tg-webui
|
||||
@ -145,6 +170,7 @@ if [[ "$1" == "--update" ]]; then
|
||||
docker build -t comfyui-custom .
|
||||
docker run -d --gpus all \
|
||||
--name comfyui \
|
||||
--network ai-stack-net \
|
||||
-p 8188:8188 \
|
||||
-v ~/ai-stack/comfyui/workspace:/app/output \
|
||||
comfyui-custom
|
||||
@ -176,6 +202,7 @@ if [[ "$1" != "--update" ]]; then
|
||||
-v ollama:/root/.ollama \
|
||||
-v open-webui:/app/backend/data \
|
||||
--name open-webui \
|
||||
--network ai-stack-net \
|
||||
--restart always \
|
||||
ghcr.io/open-webui/open-webui:ollama
|
||||
echo "🌐 Open WebUI running at http://localhost:3000"
|
||||
@ -195,6 +222,7 @@ if [[ "$1" != "--update" ]]; then
|
||||
docker rm jupyterlab 2>/dev/null || true
|
||||
docker run -d \
|
||||
--name jupyterlab \
|
||||
--network ai-stack-net \
|
||||
--gpus all \
|
||||
-p 8888:8888 \
|
||||
-v jupyter_data:/home/jovyan/work \
|
||||
@ -243,7 +271,7 @@ EOF
|
||||
|
||||
echo "✅ Whisper.cpp built successfully."
|
||||
echo "To run it:"
|
||||
echo "docker run --rm --gpus all -v ~/ai-stack/audio:/audio -v ~/ai-stack/whisper.cpp/models:/opt/whisper.cpp/models whispercpp-gpu ./main -m models/ggml-large.bin -f /audio/test.wav"
|
||||
echo "docker run --rm --gpus all --network ai-stack-net -v ~/ai-stack/audio:/audio -v ~/ai-stack/whisper.cpp/models:/opt/whisper.cpp/models whispercpp-gpu ./main -m models/ggml-large.bin -f /audio/test.wav"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
@ -297,6 +325,7 @@ EOF
|
||||
echo "🚀 Running Stable Diffusion WebUI on port 7860..."
|
||||
docker run -d --gpus all \
|
||||
--name stable-diffusion \
|
||||
--network ai-stack-net \
|
||||
-p 7860:7860 \
|
||||
-v ~/ai-stack/sd-models:/app/models/Stable-diffusion \
|
||||
-v ~/ai-stack/stable-diffusion/outputs:/app/outputs \
|
||||
@ -350,6 +379,7 @@ EOF
|
||||
echo "🚀 Running text-generation-webui on port 7861..."
|
||||
docker run -d --gpus all \
|
||||
--name tg-webui \
|
||||
--network ai-stack-net \
|
||||
-p 7861:7861 \
|
||||
-v ~/ai-stack/tg-models:/app/models \
|
||||
tg-webui
|
||||
@ -403,6 +433,7 @@ EOF
|
||||
echo "🚀 Running ComfyUI on port 8188..."
|
||||
docker run -d --gpus all \
|
||||
--name comfyui \
|
||||
--network ai-stack-net \
|
||||
-p 8188:8188 \
|
||||
-v ~/ai-stack/comfyui/workspace:/app/output \
|
||||
comfyui-custom
|
||||
@ -412,6 +443,33 @@ EOF
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$1" != "--update" ]]; then
|
||||
if check_container_running "localai"; then
|
||||
echo "✅ Skipping LocalAI install (already running)."
|
||||
else
|
||||
read -p "➕ Install LocalAI (OpenAI-compatible API proxy)? [y/N]: " lai
|
||||
if [[ "$lai" =~ ^[Yy]$ ]]; then
|
||||
echo "🧠 Setting up LocalAI..."
|
||||
|
||||
docker stop localai 2>/dev/null || true
|
||||
docker rm localai 2>/dev/null || true
|
||||
|
||||
mkdir -p ~/ai-stack/localai
|
||||
|
||||
docker run -d \
|
||||
--name localai \
|
||||
--network ai-stack-net \
|
||||
-p 8080:8080 \
|
||||
-v ~/ai-stack/localai:/models \
|
||||
-e MODELS_PATH=/models \
|
||||
-e ENABLE_OOLLAMA_BACKEND=true \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
quay.io/go-skynet/local-ai:latest
|
||||
|
||||
echo "🌐 LocalAI running at http://localhost:8080"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
@ -23,7 +23,7 @@ start_container jupyterlab
|
||||
start_container comfyui
|
||||
start_container whisper
|
||||
start_container stable-diffusion
|
||||
start_container tg-webui
|
||||
start_container localai
|
||||
|
||||
# Custom-built (may need rebuilding if deleted)
|
||||
start_container whispercpp-gpu
|
||||
|
Loading…
x
Reference in New Issue
Block a user