started to draft out configuration of continue models

This commit is contained in:
Leyla Becker 2025-05-24 23:03:20 -05:00
parent 7b6344b419
commit 77ab4781ac
4 changed files with 154 additions and 52 deletions

View file

@ -0,0 +1,44 @@
{lib, ...}: {
options.host = {
ai = {
enable = lib.mkEnableOption "should we use AI on this machine";
models = lib.mkOption {
type = lib.types.attrsOf (lib.types.submodule ({name, ...}: {
option = {
name = lib.mkOption {
type = lib.types.str;
default = name;
};
model = {
type = lib.types.str;
};
provider = {
type = lib.types.str;
default = "ollama";
};
apiBase = {
type = lib.types.str;
default = null;
};
roles = {
type = lib.types.listOf lib.types.enumOf [
"chat"
"autocomplete"
"embed"
"rerank"
"edit"
"apply"
"summarize"
];
};
};
}));
};
};
};
config = {
# TODO: configure ollama to download any modules listed in options.host.ai.models.{name}.model if options.host.ai.models.{name}.apiBase is null
# TODO: if we have any models that have a non null options.host.ai.models.{name}.apiBase then set services.ollama.enable to a lib.mkAfter true
};
}

View file

@ -12,6 +12,7 @@
./impermanence.nix
./disko.nix
./ollama.nix
./continue.nix
./tailscale.nix
./server
];