diff --git a/templates/localai/.env.example b/templates/localai/.env.example new file mode 100644 index 0000000..26e4a1f --- /dev/null +++ b/templates/localai/.env.example @@ -0,0 +1 @@ +# https://github.com/mudler/LocalAI diff --git a/templates/localai/compose.yml b/templates/localai/compose.yml new file mode 100644 index 0000000..0ede738 --- /dev/null +++ b/templates/localai/compose.yml @@ -0,0 +1,30 @@ +services: + api: + image: localai/localai:latest-aio-cpu + # For GPU support, use one of: + # image: localai/localai:latest-aio-gpu-nvidia-cuda-12 + # image: localai/localai:latest-aio-gpu-nvidia-cuda-11 + # image: localai/localai:latest-aio-gpu-hipblas + # image: localai/localai:latest-aio-gpu-intel + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"] + interval: 1m + timeout: 20m + retries: 5 + ports: + - 8080:8080 + environment: + - DEBUG=true + volumes: + - localai_models:/models:cached + # For NVIDIA GPUs, uncomment: + # deploy: + # resources: + # reservations: + # devices: + # - driver: nvidia + # count: 1 + # capabilities: [gpu] + +volumes: + localai_models: diff --git a/templates/localai/template.json b/templates/localai/template.json new file mode 100644 index 0000000..f39b2a2 --- /dev/null +++ b/templates/localai/template.json @@ -0,0 +1,7 @@ +{ + "name": "LocalAI", + "description": "Open Source alternative to OpenAI.", + "version": "1.0.0", + "author": "rjcndev", + "tags": ["AI", "LLM", "ChatGPT", "OpenAI"] +}