forked from jan-leila/nix-config
		
	started to draft out configuration of continue models
This commit is contained in:
		
							parent
							
								
									7b6344b419
								
							
						
					
					
						commit
						77ab4781ac
					
				
					 4 changed files with 154 additions and 52 deletions
				
			
		|  | @ -65,7 +65,6 @@ in { | |||
|             # builtins.elemAt osConfig.services.ollama.loadModels 0; | ||||
|           }) | ||||
|         ]; | ||||
|       }; | ||||
| 
 | ||||
|         extensions = ( | ||||
|           with open-vsx; | ||||
|  | @ -119,4 +118,5 @@ in { | |||
|         ); | ||||
|       }; | ||||
|     }; | ||||
|   }; | ||||
| } | ||||
|  |  | |||
|  | @ -30,8 +30,65 @@ | |||
|       graphicsAcceleration.enable = true; | ||||
|       directAccess.enable = true; | ||||
|     }; | ||||
|     ai = { | ||||
|       enable = true; | ||||
|       # TODO: benchmark twilight against defiant and prune this list of models that are faster on defiant | ||||
|       models = { | ||||
|         # conversation models | ||||
|         "lamma3.1:8b" = { | ||||
|           model = "lamma3.1:8b"; | ||||
|           # TODO: figure out what should be in this array | ||||
|           # roles = [""]; | ||||
|         }; | ||||
|         "deepseek-r1:8b" = { | ||||
|           model = "deepseek-r1:8b"; | ||||
|           # TODO: figure out what should be in this array | ||||
|           # roles = [""]; | ||||
|         }; | ||||
|         "deepseek-r1:32b" = { | ||||
|           model = "deepseek-r1:32b"; | ||||
|           # TODO: figure out what should be in this array | ||||
|           # roles = [""]; | ||||
|         }; | ||||
| 
 | ||||
|         # auto complete models | ||||
|         "qwen2.5-coder:1.5b-base" = { | ||||
|           model = "qwen2.5-coder:1.5b-base"; | ||||
|           # TODO: figure out what should be in this array | ||||
|           # roles = [""]; | ||||
|         }; | ||||
|         "qwen2.5-coder:7b" = { | ||||
|           model = "qwen2.5-coder:7b"; | ||||
|           # TODO: figure out what should be in this array | ||||
|           # roles = [""]; | ||||
|         }; | ||||
|         "deepseek-coder:6.7b" = { | ||||
|           model = "deepseek-coder:6.7b"; | ||||
|           # TODO: figure out what should be in this array | ||||
|           # roles = [""]; | ||||
|         }; | ||||
|         "deepseek-coder:33b" = { | ||||
|           model = "deepseek-coder:33b"; | ||||
|           # TODO: figure out what should be in this array | ||||
|           # roles = [""]; | ||||
|         }; | ||||
| 
 | ||||
|         # agent models | ||||
|         "qwen3:32b" = { | ||||
|           model = "qwen3:32b"; | ||||
|           # TODO: figure out what should be in this array | ||||
|           # roles = [""]; | ||||
|         }; | ||||
| 
 | ||||
|         # embedding models | ||||
|         "nomic-embed-text:latest" = { | ||||
|           model = "nomic-embed-text:latest"; | ||||
|           # TODO: figure out what should be in this array | ||||
|           # roles = [""]; | ||||
|         }; | ||||
|       }; | ||||
|     }; | ||||
|   }; | ||||
|   services = { | ||||
|     ollama = { | ||||
|       enable = true; | ||||
|  |  | |||
							
								
								
									
										44
									
								
								modules/nixos-modules/continue.nix
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										44
									
								
								modules/nixos-modules/continue.nix
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,44 @@ | |||
| {lib, ...}: { | ||||
|   options.host = { | ||||
|     ai = { | ||||
|       enable = lib.mkEnableOption "should we use AI on this machine"; | ||||
|       models = lib.mkOption { | ||||
|         type = lib.types.attrsOf (lib.types.submodule ({name, ...}: { | ||||
|           option = { | ||||
|             name = lib.mkOption { | ||||
|               type = lib.types.str; | ||||
|               default = name; | ||||
|             }; | ||||
|             model = { | ||||
|               type = lib.types.str; | ||||
|             }; | ||||
|             provider = { | ||||
|               type = lib.types.str; | ||||
|               default = "ollama"; | ||||
|             }; | ||||
|             apiBase = { | ||||
|               type = lib.types.str; | ||||
|               default = null; | ||||
|             }; | ||||
|             roles = { | ||||
|               type = lib.types.listOf lib.types.enumOf [ | ||||
|                 "chat" | ||||
|                 "autocomplete" | ||||
|                 "embed" | ||||
|                 "rerank" | ||||
|                 "edit" | ||||
|                 "apply" | ||||
|                 "summarize" | ||||
|               ]; | ||||
|             }; | ||||
|           }; | ||||
|         })); | ||||
|       }; | ||||
|     }; | ||||
|   }; | ||||
| 
 | ||||
|   config = { | ||||
|     # TODO: configure ollama to download any modules listed in options.host.ai.models.{name}.model if options.host.ai.models.{name}.apiBase is null | ||||
|     # TODO: if we have any models that have a non null options.host.ai.models.{name}.apiBase then set services.ollama.enable to a lib.mkAfter true | ||||
|   }; | ||||
| } | ||||
|  | @ -12,6 +12,7 @@ | |||
|     ./impermanence.nix | ||||
|     ./disko.nix | ||||
|     ./ollama.nix | ||||
|     ./continue.nix | ||||
|     ./tailscale.nix | ||||
|     ./server | ||||
|   ]; | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue