Compare commits

...

3 commits

8 changed files with 187 additions and 55 deletions

View file

@ -123,7 +123,7 @@
"browser.bookmarks.addedImportButton" = true;
"browser.newtabpage.activity-stream.feeds.section.topstories" = false;
# Usage Experiance
# Usage Experience
"browser.startup.homepage" = "about:home";
"browser.download.useDownloadDir" = false;
"browser.uiCustomization.state" = builtins.toJSON {

View file

@ -65,58 +65,58 @@ in {
# builtins.elemAt osConfig.services.ollama.loadModels 0;
})
];
extensions = (
with open-vsx;
[
# vs code feel extensions
ms-vscode.atom-keybindings
akamud.vscode-theme-onedark
streetsidesoftware.code-spell-checker
streetsidesoftware.code-spell-checker-german
streetsidesoftware.code-spell-checker-italian
jeanp413.open-remote-ssh
# html extensions
formulahendry.auto-rename-tag
ms-vscode.live-server
# js extensions
dsznajder.es7-react-js-snippets
dbaeumer.vscode-eslint
standard.vscode-standard
firsttris.vscode-jest-runner
stylelint.vscode-stylelint
tauri-apps.tauri-vscode
# go extensions
golang.go
# astro blog extensions
astro-build.astro-vscode
unifiedjs.vscode-mdx
# misc extensions
tamasfe.even-better-toml
]
++ (lib.lists.optionals nix-development-enabled [
# nix extensions
pinage404.nix-extension-pack
jnoortheen.nix-ide
kamadorueda.alejandra
])
++ (
with vscode-marketplace;
[
# js extensions
karyfoundation.nearley
]
++ (lib.lists.optionals ai-tooling-enabled [
continue.continue
])
)
);
};
extensions = (
with open-vsx;
[
# vs code feel extensions
ms-vscode.atom-keybindings
akamud.vscode-theme-onedark
streetsidesoftware.code-spell-checker
streetsidesoftware.code-spell-checker-german
streetsidesoftware.code-spell-checker-italian
jeanp413.open-remote-ssh
# html extensions
formulahendry.auto-rename-tag
ms-vscode.live-server
# js extensions
dsznajder.es7-react-js-snippets
dbaeumer.vscode-eslint
standard.vscode-standard
firsttris.vscode-jest-runner
stylelint.vscode-stylelint
tauri-apps.tauri-vscode
# go extensions
golang.go
# astro blog extensions
astro-build.astro-vscode
unifiedjs.vscode-mdx
# misc extensions
tamasfe.even-better-toml
]
++ (lib.lists.optionals nix-development-enabled [
# nix extensions
pinage404.nix-extension-pack
jnoortheen.nix-ide
kamadorueda.alejandra
])
++ (
with vscode-marketplace;
[
# js extensions
karyfoundation.nearley
]
++ (lib.lists.optionals ai-tooling-enabled [
continue.continue
])
)
);
};
};
}

View file

@ -1,4 +1,6 @@
[
"leyla"
"webdav"
"ollama"
"optimise"
]

View file

@ -201,13 +201,28 @@
};
ollama = {
enable = false;
enable = true;
exposePort = true;
loadModels = [
"deepseek-coder:6.7b"
# conversation models
"llama3.1:8b"
"deepseek-r1:8b"
"deepseek-r1:32b"
"deepseek-r1:70b"
# auto complete models
"qwen2.5-coder:1.5b-base"
"qwen2.5-coder:7b"
"deepseek-coder:6.7b"
"deepseek-coder:33b"
# agent models
"qwen3:8b"
"qwen3:32b"
# embedding models
"nomic-embed-text:latest"
];
};
tailscale = {

View file

@ -30,8 +30,65 @@
graphicsAcceleration.enable = true;
directAccess.enable = true;
};
};
ai = {
enable = true;
# TODO: benchmark twilight against defiant and prune this list of models that are faster on defiant
models = {
# conversation models
"lamma3.1:8b" = {
model = "lamma3.1:8b";
# TODO: figure out what should be in this array
# roles = [""];
};
"deepseek-r1:8b" = {
model = "deepseek-r1:8b";
# TODO: figure out what should be in this array
# roles = [""];
};
"deepseek-r1:32b" = {
model = "deepseek-r1:32b";
# TODO: figure out what should be in this array
# roles = [""];
};
# auto complete models
"qwen2.5-coder:1.5b-base" = {
model = "qwen2.5-coder:1.5b-base";
# TODO: figure out what should be in this array
# roles = [""];
};
"qwen2.5-coder:7b" = {
model = "qwen2.5-coder:7b";
# TODO: figure out what should be in this array
# roles = [""];
};
"deepseek-coder:6.7b" = {
model = "deepseek-coder:6.7b";
# TODO: figure out what should be in this array
# roles = [""];
};
"deepseek-coder:33b" = {
model = "deepseek-coder:33b";
# TODO: figure out what should be in this array
# roles = [""];
};
# agent models
"qwen3:32b" = {
model = "qwen3:32b";
# TODO: figure out what should be in this array
# roles = [""];
};
# embedding models
"nomic-embed-text:latest" = {
model = "nomic-embed-text:latest";
# TODO: figure out what should be in this array
# roles = [""];
};
};
};
};
services = {
ollama = {
enable = true;

View file

@ -0,0 +1,44 @@
{lib, ...}: {
options.host = {
ai = {
enable = lib.mkEnableOption "should we use AI on this machine";
models = lib.mkOption {
type = lib.types.attrsOf (lib.types.submodule ({name, ...}: {
option = {
name = lib.mkOption {
type = lib.types.str;
default = name;
};
model = {
type = lib.types.str;
};
provider = {
type = lib.types.str;
default = "ollama";
};
apiBase = {
type = lib.types.str;
default = null;
};
roles = {
type = lib.types.listOf lib.types.enumOf [
"chat"
"autocomplete"
"embed"
"rerank"
"edit"
"apply"
"summarize"
];
};
};
}));
};
};
};
config = {
# TODO: configure ollama to download any modules listed in options.host.ai.models.{name}.model if options.host.ai.models.{name}.apiBase is null
# TODO: if we have any models that have a non null options.host.ai.models.{name}.apiBase then set services.ollama.enable to a lib.mkAfter true
};
}

View file

@ -12,6 +12,7 @@
./impermanence.nix
./disko.nix
./ollama.nix
./continue.nix
./tailscale.nix
./server
];

View file

@ -3,6 +3,10 @@
lib,
...
}: {
options = {
services.ollama.exposePort = lib.mkEnableOption "should we expose ollama on tailscale";
};
config = lib.mkMerge [
{
services.ollama = {
@ -22,6 +26,15 @@
}
];
};
networking.firewall.interfaces.${config.services.tailscale.interfaceName} = let
ports = [
config.services.ollama.port
];
in
lib.mkIf config.services.ollama.exposePort {
allowedTCPPorts = ports;
allowedUDPPorts = ports;
};
}))
];
}