forked from jan-leila/nix-config
		
	set horizon up to use twilight ollama models
This commit is contained in:
		
							parent
							
								
									8c36fe5a72
								
							
						
					
					
						commit
						f96f9f7675
					
				
					 3 changed files with 26 additions and 2 deletions
				
			
		|  | @ -23,6 +23,29 @@ | |||
|     hardware = { | ||||
|       directAccess.enable = true; | ||||
|     }; | ||||
| 
 | ||||
|     ai = { | ||||
|       enable = true; | ||||
|       models = { | ||||
|         "Llama 3.1 8B" = { | ||||
|           model = "lamma3.1:8b"; | ||||
|           roles = ["chat" "edit" "apply"]; | ||||
|           apiBase = "http://twilight:11434"; | ||||
|         }; | ||||
| 
 | ||||
|         "qwen2.5-coder:1.5b-base" = { | ||||
|           model = "qwen2.5-coder:1.5b-base"; | ||||
|           roles = ["autocomplete"]; | ||||
|           apiBase = "http://twilight:11434"; | ||||
|         }; | ||||
| 
 | ||||
|         "nomic-embed-text:latest" = { | ||||
|           model = "nomic-embed-text:latest"; | ||||
|           roles = ["embed"]; | ||||
|           apiBase = "http://twilight:11434"; | ||||
|         }; | ||||
|       }; | ||||
|     }; | ||||
|   }; | ||||
| 
 | ||||
|   environment.systemPackages = [ | ||||
|  |  | |||
|  | @ -83,6 +83,7 @@ | |||
|   services = { | ||||
|     ollama = { | ||||
|       enable = true; | ||||
|       exposePort = true; | ||||
| 
 | ||||
|       loadModels = [ | ||||
|         # conversation models | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue