nix-config/modules/nixos-modules/ai.nix

46 lines
1.4 KiB
Nix

{lib, ...}: {
options.host = {
ai = {
enable = lib.mkEnableOption "should we use AI on this machine";
models = lib.mkOption {
type = lib.types.attrsOf (lib.types.submodule ({name, ...}: {
options = {
name = lib.mkOption {
type = lib.types.str;
default = name;
};
model = lib.mkOption {
type = lib.types.str;
};
provider = lib.mkOption {
type = lib.types.str;
default = "ollama";
};
apiBase = lib.mkOption {
type = lib.types.str;
default = "http://localhost:11434";
};
roles = lib.mkOption {
type = lib.types.listOf (lib.types.enum [
"chat"
"autocomplete"
"embed"
"rerank"
"edit"
"apply"
"summarize"
]);
default = [];
};
};
}));
};
default = {};
};
};
config = {
# TODO: configure ollama to download any modules listed in options.host.ai.models.{name}.model if options.host.ai.models.{name}.apiBase is localhost
# TODO: if we have any models that have a non localhost options.host.ai.models.{name}.apiBase then set services.ollama.enable to a lib.mkAfter true
};
}