set default api base for ai models to localhost

This commit is contained in:
Leyla Becker 2025-05-26 17:49:47 -05:00
parent 0bd483147d
commit 8c36fe5a72
2 changed files with 5 additions and 13 deletions

View file

@ -51,14 +51,6 @@
fprintd = {
enable = true;
};
ollama = {
enable = false;
loadModels = [
"deepseek-coder:1.3b"
"deepseek-r1:1.5b"
];
};
tailscale = {
enable = true;
authKeyFile = config.sops.secrets."vpn-keys/tailscale-authkey/horizon".path;

View file

@ -16,10 +16,10 @@
type = lib.types.str;
default = "ollama";
};
# apiBase = lib.mkOption {
# type = lib.types.nullOr lib.types.str;
# default = null;
# };
apiBase = lib.mkOption {
type = lib.types.str;
default = "http://localhost:11434";
};
roles = lib.mkOption {
type = lib.types.listOf (lib.types.enum [
"chat"
@ -40,7 +40,7 @@
};
config = {
# TODO: configure ollama to download any modules listed in options.host.ai.models.{name}.model if options.host.ai.models.{name}.apiBase is null
# TODO: configure ollama to download any modules listed in options.host.ai.models.{name}.model if options.host.ai.models.{name}.apiBase is the default value
# TODO: if we have any models that have a non null options.host.ai.models.{name}.apiBase then set services.ollama.enable to a lib.mkAfter true
};
}