this repo has no description

chore: add ollama for local ai (#42)

authored by tghanken.tngl.sh and committed by GitHub d032eb9f c31ca361

Changed files
+18 -1
machines
hosts
desktops
modules
desktop
apps
+1 -1
machines/hosts/desktops/inwin-tower/configuration.nix
··· 14 14 ./hardware-configuration.nix 15 15 16 16 # Include any additional apps desired 17 - ../../../modules/desktop/apps/aider.nix 17 + ../../../modules/desktop/apps/ollama.nix 18 18 ../../../modules/desktop/apps/jetbrains.nix 19 19 ../../../modules/desktop/apps/steam.nix 20 20 ];
+5
machines/hosts/desktops/inwin-tower/devices.nix
··· 167 167 type = "zfs_fs"; 168 168 mountpoint = "/mnt/steam"; 169 169 }; 170 + ollama = { 171 + type = "zfs_fs"; 172 + mountpoint = "/mnt/ollama"; 173 + options.mountpoint = "legacy"; 174 + }; 170 175 reserved = { 171 176 type = "zfs_fs"; 172 177 options.refreservation = "10G";
+12
machines/modules/desktop/apps/ollama.nix
··· 1 + {pkgs, ...}: { 2 + users.users.ollama = { 3 + isNormalUser = false; 4 + description = "Ollama"; 5 + }; 6 + services.ollama = { 7 + enable = true; 8 + home = "/mnt/ollama"; 9 + acceleration = "cuda"; 10 + user = "ollama"; 11 + }; 12 + }