forked from jan-leila/nix-config
		
	set horizon up to use twilight ollama models
This commit is contained in:
		
							parent
							
								
									8c36fe5a72
								
							
						
					
					
						commit
						f96f9f7675
					
				
					 3 changed files with 26 additions and 2 deletions
				
			
		|  | @ -23,6 +23,29 @@ | ||||||
|     hardware = { |     hardware = { | ||||||
|       directAccess.enable = true; |       directAccess.enable = true; | ||||||
|     }; |     }; | ||||||
|  | 
 | ||||||
|  |     ai = { | ||||||
|  |       enable = true; | ||||||
|  |       models = { | ||||||
|  |         "Llama 3.1 8B" = { | ||||||
|  |           model = "lamma3.1:8b"; | ||||||
|  |           roles = ["chat" "edit" "apply"]; | ||||||
|  |           apiBase = "http://twilight:11434"; | ||||||
|  |         }; | ||||||
|  | 
 | ||||||
|  |         "qwen2.5-coder:1.5b-base" = { | ||||||
|  |           model = "qwen2.5-coder:1.5b-base"; | ||||||
|  |           roles = ["autocomplete"]; | ||||||
|  |           apiBase = "http://twilight:11434"; | ||||||
|  |         }; | ||||||
|  | 
 | ||||||
|  |         "nomic-embed-text:latest" = { | ||||||
|  |           model = "nomic-embed-text:latest"; | ||||||
|  |           roles = ["embed"]; | ||||||
|  |           apiBase = "http://twilight:11434"; | ||||||
|  |         }; | ||||||
|  |       }; | ||||||
|  |     }; | ||||||
|   }; |   }; | ||||||
| 
 | 
 | ||||||
|   environment.systemPackages = [ |   environment.systemPackages = [ | ||||||
|  |  | ||||||
|  | @ -83,6 +83,7 @@ | ||||||
|   services = { |   services = { | ||||||
|     ollama = { |     ollama = { | ||||||
|       enable = true; |       enable = true; | ||||||
|  |       exposePort = true; | ||||||
| 
 | 
 | ||||||
|       loadModels = [ |       loadModels = [ | ||||||
|         # conversation models |         # conversation models | ||||||
|  |  | ||||||
|  | @ -40,7 +40,7 @@ | ||||||
|   }; |   }; | ||||||
| 
 | 
 | ||||||
|   config = { |   config = { | ||||||
|     # TODO: configure ollama to download any modules listed in options.host.ai.models.{name}.model if options.host.ai.models.{name}.apiBase is the default value |     # TODO: configure ollama to download any modules listed in options.host.ai.models.{name}.model if options.host.ai.models.{name}.apiBase is localhost | ||||||
|     # TODO: if we have any models that have a non null options.host.ai.models.{name}.apiBase then set services.ollama.enable to a lib.mkAfter true |     # TODO: if we have any models that have a non localhost options.host.ai.models.{name}.apiBase then set services.ollama.enable to a lib.mkAfter true | ||||||
|   }; |   }; | ||||||
| } | } | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue