Switched back to AIO docker container for now, until we can figure out how to re-build the more extensive container.
This commit is contained in:
parent
9cf8d1b07c
commit
63a1527b8f
@ -494,12 +494,11 @@ if [[ "$1" != "--update" ]]; then
|
||||
--name localai \
|
||||
--gpus all \
|
||||
-p 8080:8080 \
|
||||
-v ~/ai-stack/localai/models:/models \
|
||||
-v ~/ai-stack/localai/models:/build/models \
|
||||
-v ~/ai-stack/localai/config:/config \
|
||||
-e ENABLE_BACKENDS=llama-cuda,ollama \
|
||||
-e INCLUDE_DEFAULT_MODELS=true \
|
||||
-e AUTOLOAD_MODELS=true \
|
||||
-e MODEL_PATH=/models \
|
||||
--restart unless-stopped \
|
||||
localai/localai:latest-aio-gpu-nvidia-cuda-12
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user