39 lines
989 B
Nix
39 lines
989 B
Nix
{ config, pkgs, ... }:
|
|
|
|
let
|
|
workDir = "/srv/containers/ai";
|
|
in
|
|
{
|
|
systemd.tmpfiles.rules = [
|
|
"d ${workDir}/ollama 2700 root admin"
|
|
"d ${workDir}/open-webui 2700 root admin"
|
|
];
|
|
|
|
hardware.nvidia-container-toolkit.enable = true;
|
|
virtualisation.docker.daemon.settings.features.cdi = true;
|
|
|
|
virtualisation.oci-containers.containers = {
|
|
ollama = {
|
|
image = "ollama/ollama:latest";
|
|
volumes = [
|
|
"${workDir}/ollama:/root/.ollama"
|
|
];
|
|
extraOptions = [
|
|
"--gpus=all"
|
|
];
|
|
};
|
|
|
|
open-webui = {
|
|
image = "ghcr.io/open-webui/open-webui:main";
|
|
ports = [ "0.0.0.0:10005:8080" ];
|
|
environment = {
|
|
OLLAMA_BASE_URL = "http://ollama:11434";
|
|
};
|
|
volumes = [
|
|
"${workDir}/open-webui:/app/backend/data"
|
|
];
|
|
dependsOn = [ "ollama" ];
|
|
};
|
|
};
|
|
}
|