AI-Dev/Dockerfile

28 lines
668 B
Docker

FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04
ENV DEBIAN_FRONTEND=noninteractive
WORKDIR /app
RUN apt-get update && apt-get install -y --no-install-recommends \
libgomp1 libgl1 ffmpeg curl python3 ca-certificates && \
rm -rf /var/lib/apt/lists/*
COPY local-ai /usr/local/bin/local-ai
ENV MODEL_PATH=/models
ENV CONFIG_PATH=/config
ENV ENABLE_BACKENDS=llama-cuda,whispercpp,stablediffusion,ollama
ENV AUTOLOAD_MODELS=true
ENV INCLUDE_DEFAULT_MODELS=true
VOLUME /models
VOLUME /config
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD curl -f http://localhost:8080/readyz || exit 1
EXPOSE 8080
CMD ["/usr/local/bin/local-ai"]