diff --git a/configurations/nixos/horizon/configuration.nix b/configurations/nixos/horizon/configuration.nix index f27f585..8448654 100644 --- a/configurations/nixos/horizon/configuration.nix +++ b/configurations/nixos/horizon/configuration.nix @@ -23,6 +23,29 @@ hardware = { directAccess.enable = true; }; + + ai = { + enable = true; + models = { + "Llama 3.1 8B" = { + model = "lamma3.1:8b"; + roles = ["chat" "edit" "apply"]; + apiBase = "http://twilight:11434"; + }; + + "qwen2.5-coder:1.5b-base" = { + model = "qwen2.5-coder:1.5b-base"; + roles = ["autocomplete"]; + apiBase = "http://twilight:11434"; + }; + + "nomic-embed-text:latest" = { + model = "nomic-embed-text:latest"; + roles = ["embed"]; + apiBase = "http://twilight:11434"; + }; + }; + }; }; environment.systemPackages = [ diff --git a/configurations/nixos/twilight/configuration.nix b/configurations/nixos/twilight/configuration.nix index 82ca9b7..10f2606 100644 --- a/configurations/nixos/twilight/configuration.nix +++ b/configurations/nixos/twilight/configuration.nix @@ -83,6 +83,7 @@ services = { ollama = { enable = true; + exposePort = true; loadModels = [ # conversation models diff --git a/modules/nixos-modules/ai.nix b/modules/nixos-modules/ai.nix index 646e1b5..d8cd63d 100644 --- a/modules/nixos-modules/ai.nix +++ b/modules/nixos-modules/ai.nix @@ -40,7 +40,7 @@ }; config = { - # TODO: configure ollama to download any modules listed in options.host.ai.models.{name}.model if options.host.ai.models.{name}.apiBase is the default value - # TODO: if we have any models that have a non null options.host.ai.models.{name}.apiBase then set services.ollama.enable to a lib.mkAfter true + # TODO: configure ollama to download any modules listed in options.host.ai.models.{name}.model if options.host.ai.models.{name}.apiBase is localhost + # TODO: if we have any models that have a non localhost options.host.ai.models.{name}.apiBase then set services.ollama.enable to a lib.mkAfter true }; }