From 243520badbee58ce2033fe0337c922510a488aee Mon Sep 17 00:00:00 2001 From: KP64 Date: Thu, 12 Dec 2024 14:16:07 +0100 Subject: [PATCH] feat: ai ollama module --- hosts/kg/configuration.nix | 16 +++++++++++--- services/ai/default.nix | 1 + services/ai/ollama.nix | 45 ++++++++++++++++++++++++++++++++++++++ services/default.nix | 1 + 4 files changed, 60 insertions(+), 3 deletions(-) create mode 100644 services/ai/default.nix create mode 100644 services/ai/ollama.nix diff --git a/hosts/kg/configuration.nix b/hosts/kg/configuration.nix index 320acdd..6bd76df 100644 --- a/hosts/kg/configuration.nix +++ b/hosts/kg/configuration.nix @@ -203,9 +203,19 @@ physicalConnections = [ (topology.mkConnectionRev "router" "wifi") ]; }; - services.xserver.xkb = { - layout = "de"; - variant = ""; + services = { + ai.ollama = { + enable = true; + acceleration = "cuda"; + models = [ + "llama3.2" + "llama3.1:8b" + ]; + }; + xserver.xkb = { + layout = "de"; + variant = ""; + }; }; time.timeZone = "Europe/Berlin"; diff --git a/services/ai/default.nix b/services/ai/default.nix new file mode 100644 index 0000000..c5cbb37 --- /dev/null +++ b/services/ai/default.nix @@ -0,0 +1 @@ +{ imports = [ ./ollama.nix ]; } diff --git a/services/ai/ollama.nix b/services/ai/ollama.nix new file mode 100644 index 0000000..48b599c --- /dev/null +++ b/services/ai/ollama.nix @@ -0,0 +1,45 @@ +{ config, lib, ... }: +let + cfg = config.services.ai.ollama; +in +{ + options.services.ai.ollama = { + enable = lib.mkEnableOption "Ollama"; + + acceleration = lib.mkOption { + readOnly = true; + type = lib.types.enum [ + "cpu" + "cuda" + "rocm" + ]; + example = "cpu"; + description = "Whether to use the GPU or CPU"; + }; + + models = lib.mkOption { + readOnly = true; + type = with lib.types; nonEmptyListOf nonEmptyStr; + example = [ + "llama3.1" + "llama3.2:1b" + ]; + description = "The models to be automatically downloaded."; + }; + }; + + config = lib.mkMerge [ + (lib.mkIf cfg.enable { + services.ollama = { + enable = true; + openFirewall = true; + inherit (cfg) acceleration; + loadModels = cfg.models; + }; + }) + + (lib.mkIf config.system.impermanence.enable { + environment.persistence."/persist".directories = lib.optional cfg.enable "/var/lib/private/ollama"; + }) + ]; +} diff --git a/services/default.nix b/services/default.nix index 34e040e..c5c5931 100644 --- a/services/default.nix +++ b/services/default.nix @@ -1,5 +1,6 @@ { imports = [ + ./ai ./gaming ./networking ];