fix: ollama

This commit is contained in:
s-prechtl 2025-11-09 16:29:52 +01:00
parent 6b72e90c41
commit 886a34f7a5

View file

@ -2,19 +2,18 @@
services.open-webui = { services.open-webui = {
enable = true; enable = true;
openFirewall = true; openFirewall = true;
host = "0.0.0.0"; host = "chattn.sprechtl.me";
}; };
services.ollama = { services.ollama = {
enable = true; enable = true;
host = "chattn.sprechtl.me";
acceleration = "cuda"; acceleration = "cuda";
loadModels = ["llama3.2:3b" "deepseek-r1:1.5b" "gpt-oss:20b"]; loadModels = ["llama3.2:3b" "deepseek-r1:1.5b" "gpt-oss:20b"];
}; };
services.nginx = { services.nginx = {
enable = true; enable = true;
virtualHosts.${config.services.ollama.host} = { virtualHosts.${config.services.open-webui.host} = {
forceSSL = true; forceSSL = true;
enableACME = true; enableACME = true;
locations."/" = { locations."/" = {