set default api base for ai models to localhost
This commit is contained in:
parent
0bd483147d
commit
8c36fe5a72
2 changed files with 5 additions and 13 deletions
|
@ -16,10 +16,10 @@
|
|||
type = lib.types.str;
|
||||
default = "ollama";
|
||||
};
|
||||
# apiBase = lib.mkOption {
|
||||
# type = lib.types.nullOr lib.types.str;
|
||||
# default = null;
|
||||
# };
|
||||
apiBase = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "http://localhost:11434";
|
||||
};
|
||||
roles = lib.mkOption {
|
||||
type = lib.types.listOf (lib.types.enum [
|
||||
"chat"
|
||||
|
@ -40,7 +40,7 @@
|
|||
};
|
||||
|
||||
config = {
|
||||
# TODO: configure ollama to download any modules listed in options.host.ai.models.{name}.model if options.host.ai.models.{name}.apiBase is null
|
||||
# TODO: configure ollama to download any modules listed in options.host.ai.models.{name}.model if options.host.ai.models.{name}.apiBase is the default value
|
||||
# TODO: if we have any models that have a non null options.host.ai.models.{name}.apiBase then set services.ollama.enable to a lib.mkAfter true
|
||||
};
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue