generate .continue/config.yaml via configuration

This commit is contained in:
Leyla Becker 2025-05-25 13:07:32 -05:00
parent 2c77cf2ed4
commit 393f468be2
6 changed files with 117 additions and 20 deletions

View file

@ -83,6 +83,32 @@
};
};
user = {
continue = {
enable = true;
docs = {
"Continue Docs" = {
startUrl = "https://docs.continue.dev";
};
"Nixpkgs" = {
startUrl = "https://ryantm.github.io/nixpkgs/#preface";
};
"Nix Manual" = {
startUrl = "https://nixos.org/manual/nixos/stable/";
};
"Home manager Manual" = {
startUrl = "https://nix-community.github.io/home-manager/";
};
"Nix Docs" = {
startUrl = "https://nix.dev/index.html";
};
"Linux Man Page" = {
startUrl = "https://linux.die.net/man/";
};
};
};
};
programs = {
# Let Home Manager install and manage itself.
home-manager.enable = true;

View file

@ -2,11 +2,12 @@
lib,
pkgs,
inputs,
config,
osConfig,
...
}: let
nix-development-enabled = osConfig.host.nix-development.enable;
ai-tooling-enabled = osConfig.services.ollama.enable;
ai-tooling-enabled = config.user.continue.enable && osConfig.host.ai.enable;
in {
nixpkgs = {
overlays = [
@ -14,9 +15,6 @@ in {
];
};
# TODO: when ai-tooling is enabled configure ~/.continue/config.yaml to use remote hosted on defiant
# TODO: when ai-tooling is enabled configure ~/.continue/config.yaml to use use better models hosted on defiant
programs = {
bash.shellAliases = {
code = "codium";
@ -59,10 +57,8 @@ in {
"expr" = "import <nixpkgs> {}";
};
})
(lib.mkIf osConfig.services.ollama.enable {
(lib.mkIf ai-tooling-enabled {
"continue.telemetryEnabled" = false;
# builtins.elemAt osConfig.services.ollama.loadModels 0;
})
];

View file

@ -35,7 +35,7 @@
# TODO: benchmark twilight against defiant and prune this list of models that are faster on defiant
models = {
# conversation models
"lamma3.1:8b" = {
"Llama 3.1 8B" = {
model = "lamma3.1:8b";
roles = ["chat" "edit" "apply"];
};

View file

@ -1,3 +1,75 @@
{...}: {
# TODO: enable option for continue.yaml for home based on options defined in osConfig.ai.models
{
lib,
pkgs,
config,
osConfig,
...
}: let
ai-tooling-enabled = config.user.continue.enable && osConfig.host.ai.enable;
in {
options = {
user.continue = {
enable = lib.mkEnableOption "should continue be enabled on this machine";
docs = lib.mkOption {
type = lib.types.attrsOf (lib.types.submodule ({name, ...}: {
options = {
name = lib.mkOption {
type = lib.types.str;
default = name;
};
startUrl = lib.mkOption {
type = lib.types.str;
};
};
}));
};
context = lib.mkOption {
type = lib.types.attrsOf (lib.types.submodule ({name, ...}: {
options = {
provider = lib.mkOption {
type = lib.types.str;
default = name;
};
};
}));
default = {
"code" = {};
"docs" = {};
"diff" = {};
"terminal" = {};
"problems" = {};
"folder" = {};
"codebase" = {};
};
};
};
};
config =
lib.mkIf ai-tooling-enabled
(lib.mkMerge [
{
home = {
file = {
".continue/config.yaml".source = (pkgs.formats.yaml {}).generate "continue-config" {
name = "Assistant";
version = "1.0.0";
schema = "v1";
models = lib.attrsets.attrValues osConfig.host.ai.models;
context = lib.attrsets.attrValues config.user.continue.context;
docs = lib.attrsets.attrValues config.user.continue.docs;
};
};
};
}
(lib.mkIf osConfig.host.impermanence.enable {
home.persistence."/persist${config.home.homeDirectory}" = {
directories = [
".continue/index"
".continue/sessions"
];
allowOther = true;
};
})
]);
}

View file

@ -4,5 +4,6 @@
./flipperzero.nix
./i18n.nix
./openssh.nix
./continue.nix
];
}

View file

@ -4,24 +4,24 @@
enable = lib.mkEnableOption "should we use AI on this machine";
models = lib.mkOption {
type = lib.types.attrsOf (lib.types.submodule ({name, ...}: {
option = {
options = {
name = lib.mkOption {
type = lib.types.str;
default = name;
};
model = {
model = lib.mkOption {
type = lib.types.str;
};
provider = {
provider = lib.mkOption {
type = lib.types.str;
default = "ollama";
};
apiBase = {
type = lib.types.str;
default = null;
};
roles = {
type = lib.types.listOf lib.types.enumOf [
# apiBase = lib.mkOption {
# type = lib.types.nullOr lib.types.str;
# default = null;
# };
roles = lib.mkOption {
type = lib.types.listOf (lib.types.enum [
"chat"
"autocomplete"
"embed"
@ -29,11 +29,13 @@
"edit"
"apply"
"summarize"
];
]);
default = [];
};
};
}));
};
default = {};
};
};