diff --git a/hosts/isengard.nix b/hosts/isengard.nix index 742183d..fee4d23 100644 --- a/hosts/isengard.nix +++ b/hosts/isengard.nix @@ -8,12 +8,16 @@ # Secrets ../modules/agenix.nix + # Nvidia + ../modules/nvidia.nix + # Containers ../modules/containers/ntfy.nix ../modules/containers/mindwtr.nix ../modules/containers/vaultwarden.nix ../modules/containers/nextcloud.nix ../modules/containers/actualbudget.nix + ../modules/containers/ai.nix ]; # Disks diff --git a/modules/containers/ai.nix b/modules/containers/ai.nix new file mode 100644 index 0000000..922297d --- /dev/null +++ b/modules/containers/ai.nix @@ -0,0 +1,38 @@ +{ config, pkgs, ... }: + +let + workDir = "/srv/containers/ai"; +in +{ + systemd.tmpfiles.rules = [ + "d ${workDir}/ollama 2700 root admin" + "d ${workDir}/open-webui 2700 root admin" + ]; + + hardware.nvidia-container-toolkit.enable = true; + virtualisation.docker.daemon.settings.features.cdi = true; + + virtualisation.oci-containers.containers = { + ollama = { + image = "ollama/ollama:latest"; + volumes = [ + "${workDir}/ollama:/root/.ollama" + ]; + extraOptions = [ + "--gpus=all" + ]; + }; + + open-webui = { + image = "ghcr.io/open-webui/open-webui:main"; + ports = [ "0.0.0.0:10005:8080" ]; + environment = { + OLLAMA_BASE_URL = "http://ollama:11434"; + }; + volumes = [ + "${workDir}/open-webui:/app/backend/data" + ]; + dependsOn = [ "ollama" ]; + }; + }; +}