generate .continue/config.yaml via configuration
This commit is contained in:
parent
2c77cf2ed4
commit
393f468be2
6 changed files with 117 additions and 20 deletions
|
@ -83,6 +83,32 @@
|
|||
};
|
||||
};
|
||||
|
||||
user = {
|
||||
continue = {
|
||||
enable = true;
|
||||
docs = {
|
||||
"Continue Docs" = {
|
||||
startUrl = "https://docs.continue.dev";
|
||||
};
|
||||
"Nixpkgs" = {
|
||||
startUrl = "https://ryantm.github.io/nixpkgs/#preface";
|
||||
};
|
||||
"Nix Manual" = {
|
||||
startUrl = "https://nixos.org/manual/nixos/stable/";
|
||||
};
|
||||
"Home manager Manual" = {
|
||||
startUrl = "https://nix-community.github.io/home-manager/";
|
||||
};
|
||||
"Nix Docs" = {
|
||||
startUrl = "https://nix.dev/index.html";
|
||||
};
|
||||
"Linux Man Page" = {
|
||||
startUrl = "https://linux.die.net/man/";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
programs = {
|
||||
# Let Home Manager install and manage itself.
|
||||
home-manager.enable = true;
|
||||
|
|
|
@ -2,11 +2,12 @@
|
|||
lib,
|
||||
pkgs,
|
||||
inputs,
|
||||
config,
|
||||
osConfig,
|
||||
...
|
||||
}: let
|
||||
nix-development-enabled = osConfig.host.nix-development.enable;
|
||||
ai-tooling-enabled = osConfig.services.ollama.enable;
|
||||
ai-tooling-enabled = config.user.continue.enable && osConfig.host.ai.enable;
|
||||
in {
|
||||
nixpkgs = {
|
||||
overlays = [
|
||||
|
@ -14,9 +15,6 @@ in {
|
|||
];
|
||||
};
|
||||
|
||||
# TODO: when ai-tooling is enabled configure ~/.continue/config.yaml to use remote hosted on defiant
|
||||
# TODO: when ai-tooling is enabled configure ~/.continue/config.yaml to use use better models hosted on defiant
|
||||
|
||||
programs = {
|
||||
bash.shellAliases = {
|
||||
code = "codium";
|
||||
|
@ -59,10 +57,8 @@ in {
|
|||
"expr" = "import <nixpkgs> {}";
|
||||
};
|
||||
})
|
||||
(lib.mkIf osConfig.services.ollama.enable {
|
||||
(lib.mkIf ai-tooling-enabled {
|
||||
"continue.telemetryEnabled" = false;
|
||||
|
||||
# builtins.elemAt osConfig.services.ollama.loadModels 0;
|
||||
})
|
||||
];
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@
|
|||
# TODO: benchmark twilight against defiant and prune this list of models that are faster on defiant
|
||||
models = {
|
||||
# conversation models
|
||||
"lamma3.1:8b" = {
|
||||
"Llama 3.1 8B" = {
|
||||
model = "lamma3.1:8b";
|
||||
roles = ["chat" "edit" "apply"];
|
||||
};
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue