move ollama to profiles
This commit is contained in:
parent
317d838075
commit
5c8c48d5ea
@ -15,6 +15,8 @@
|
||||
customProfiles.nicotine
|
||||
customProfiles.sunshine
|
||||
customProfiles.wine-games
|
||||
|
||||
customProfiles.ollama
|
||||
];
|
||||
|
||||
security.pki.certificateFiles = [ ../../misc/mitmproxy-ca-cert.pem ];
|
||||
@ -118,46 +120,14 @@
|
||||
home.stateVersion = "24.05";
|
||||
};
|
||||
|
||||
services.ollama = {
|
||||
enable = true;
|
||||
host = "127.0.0.1";
|
||||
port = 11434;
|
||||
acceleration = "rocm";
|
||||
openFirewall = false;
|
||||
environmentVariables = {
|
||||
HSA_OVERRIDE_GFX_VERSION = "10.3.0";
|
||||
OLLAMA_KEEP_ALIVE = "-1";
|
||||
# OLLAMA_LLM_LIBRARY = "";
|
||||
};
|
||||
};
|
||||
services.open-webui = {
|
||||
enable = true;
|
||||
host = "127.0.0.1";
|
||||
port = 8081;
|
||||
openFirewall = false;
|
||||
environment = {
|
||||
ANONYMIZED_TELEMETRY = "False";
|
||||
DO_NOT_TRACK = "True";
|
||||
SCARF_NO_ANALYTICS = "True";
|
||||
OLLAMA_API_BASE_URL = "http://127.0.0.1:11434";
|
||||
# Disable authentication
|
||||
WEBUI_AUTH = "False";
|
||||
};
|
||||
};
|
||||
|
||||
persist.state = {
|
||||
directories = [
|
||||
"/var/lib/ollama"
|
||||
"/var/lib/open-webui"
|
||||
];
|
||||
homeDirectories = [
|
||||
".local/share/winbox"
|
||||
".local/share/PrismLauncher"
|
||||
".local/share/distrobox"
|
||||
".mitmproxy"
|
||||
".config/exercism"
|
||||
".llama"
|
||||
];
|
||||
".local/share/winbox"
|
||||
".local/share/PrismLauncher"
|
||||
".local/share/distrobox"
|
||||
".mitmproxy"
|
||||
".config/exercism"
|
||||
];
|
||||
};
|
||||
|
||||
system.stateVersion = "23.05";
|
||||
|
61
profiles/servers/ollama.nix
Normal file
61
profiles/servers/ollama.nix
Normal file
@ -0,0 +1,61 @@
|
||||
{ config, lib, ... }:
|
||||
let
|
||||
gpu = config.deviceSpecific.devInfo.gpu.vendor;
|
||||
in {
|
||||
services.ollama = {
|
||||
enable = true;
|
||||
host = "127.0.0.1";
|
||||
port = 11434;
|
||||
sandbox = false;
|
||||
acceleration =
|
||||
if gpu == "amd" then
|
||||
"rocm"
|
||||
else if gpu == "nvidia" then
|
||||
"cuda"
|
||||
else false;
|
||||
openFirewall = false;
|
||||
environmentVariables = {
|
||||
HSA_OVERRIDE_GFX_VERSION = "10.3.0";
|
||||
OLLAMA_KEEP_ALIVE = "-1";
|
||||
# OLLAMA_LLM_LIBRARY = "";
|
||||
};
|
||||
};
|
||||
services.open-webui = {
|
||||
enable = true;
|
||||
host = "127.0.0.1";
|
||||
port = 8081;
|
||||
openFirewall = false;
|
||||
environment = {
|
||||
ANONYMIZED_TELEMETRY = "False";
|
||||
DO_NOT_TRACK = "True";
|
||||
SCARF_NO_ANALYTICS = "True";
|
||||
OLLAMA_API_BASE_URL = "http://127.0.0.1:11434";
|
||||
# Disable authentication
|
||||
WEBUI_AUTH = "False";
|
||||
};
|
||||
};
|
||||
|
||||
users.groups.ollama = { };
|
||||
users.users.ollama = {
|
||||
description = "ollama user";
|
||||
isSystemUser = true;
|
||||
group = "ollama";
|
||||
extraGroups = [ "video" "render" ];
|
||||
};
|
||||
|
||||
systemd.services.ollama.serviceConfig = {
|
||||
DynamicUser = lib.mkForce false;
|
||||
User = "ollama";
|
||||
Group = "ollama";
|
||||
};
|
||||
systemd.services.open-webui.serviceConfig = {
|
||||
DynamicUser = lib.mkForce false;
|
||||
User = "ollama";
|
||||
Group = "ollama";
|
||||
};
|
||||
|
||||
persist.state.directories = [
|
||||
"/var/lib/ollama"
|
||||
"/var/lib/open-webui"
|
||||
];
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user