update many homelab services
This commit is contained in:
parent
6cb0af468f
commit
cc75b6db4f
@ -3,15 +3,20 @@ let
|
|||||||
persistRoot = config.autoinstall.persist.persistRoot or "/persist";
|
persistRoot = config.autoinstall.persist.persistRoot or "/persist";
|
||||||
in {
|
in {
|
||||||
imports = with inputs.self; [
|
imports = with inputs.self; [
|
||||||
"${toString modulesPath}/profiles/hardened.nix"
|
|
||||||
./hardware-configuration.nix
|
|
||||||
./boot.nix
|
./boot.nix
|
||||||
|
./hardening.nix
|
||||||
|
./hardware-configuration.nix
|
||||||
./virtualisation.nix
|
./virtualisation.nix
|
||||||
|
|
||||||
nixosRoles.hypervisor
|
nixosRoles.hypervisor
|
||||||
nixosProfiles.acme
|
nixosProfiles.acme
|
||||||
|
nixosProfiles.battery-historian
|
||||||
|
nixosProfiles.blocky
|
||||||
|
nixosProfiles.duplicacy
|
||||||
|
nixosProfiles.fail2ban
|
||||||
|
# nixosProfiles.firefox-syncserver
|
||||||
nixosProfiles.gitea
|
nixosProfiles.gitea
|
||||||
# nixosProfiles.joplin-server
|
nixosProfiles.joplin-server
|
||||||
nixosProfiles.mailserver
|
nixosProfiles.mailserver
|
||||||
nixosProfiles.nginx
|
nixosProfiles.nginx
|
||||||
nixosProfiles.roundcube
|
nixosProfiles.roundcube
|
||||||
@ -87,8 +92,8 @@ in {
|
|||||||
|
|
||||||
# hardened
|
# hardened
|
||||||
networking.firewall.enable = true;
|
networking.firewall.enable = true;
|
||||||
networking.firewall.allowedTCPPorts = [];
|
networking.firewall.allowedTCPPorts = lib.mkDefault [];
|
||||||
networking.firewall.allowedUDPPorts = [];
|
networking.firewall.allowedUDPPorts = lib.mkDefault [];
|
||||||
systemd.coredump.enable = false;
|
systemd.coredump.enable = false;
|
||||||
programs.firejail.enable = true;
|
programs.firejail.enable = true;
|
||||||
# scudo memalloc is unstable
|
# scudo memalloc is unstable
|
||||||
@ -105,8 +110,8 @@ in {
|
|||||||
networking.interfaces.br0 = {
|
networking.interfaces.br0 = {
|
||||||
useDHCP = false;
|
useDHCP = false;
|
||||||
ipv4.addresses = [{
|
ipv4.addresses = [{
|
||||||
"address" = "192.168.0.10";
|
address = "192.168.0.10";
|
||||||
"prefixLength" = 24;
|
prefixLength = 24;
|
||||||
}];
|
}];
|
||||||
};
|
};
|
||||||
networking.extraHosts = ''
|
networking.extraHosts = ''
|
||||||
|
@ -1,8 +1,12 @@
|
|||||||
{ config, pkgs, lib, ... }: {
|
{ config, pkgs, lib, ... }: {
|
||||||
boot.kernelModules = [
|
boot.kernelModules = [
|
||||||
"xt_nat"
|
# "xt_nat"
|
||||||
# "iptable_nat"
|
# "iptable_nat"
|
||||||
# "iptable_filter"
|
# "iptable_filter"
|
||||||
|
# "ip_tables"
|
||||||
|
# "nft_chain_nat"
|
||||||
|
# "nft_masq"
|
||||||
|
"x_tables"
|
||||||
];
|
];
|
||||||
|
|
||||||
virtualisation = {
|
virtualisation = {
|
||||||
|
139
profiles/servers/blocky.nix
Normal file
139
profiles/servers/blocky.nix
Normal file
@ -0,0 +1,139 @@
|
|||||||
|
{ config, pkgs, lib, ... }: {
|
||||||
|
|
||||||
|
containers.blocky = {
|
||||||
|
# extraFlags = [ "-U" ];
|
||||||
|
autoStart = true;
|
||||||
|
ephemeral = true;
|
||||||
|
privateNetwork = true;
|
||||||
|
hostBridge = "br0";
|
||||||
|
localAddress = "192.168.0.5/24";
|
||||||
|
tmpfs = [ "/" ];
|
||||||
|
config = { config, pkgs, ... }: {
|
||||||
|
networking = {
|
||||||
|
defaultGateway = "192.168.0.1";
|
||||||
|
hostName = "blocky-node";
|
||||||
|
nameservers = [ "127.0.0.1" ];
|
||||||
|
enableIPv6 = false;
|
||||||
|
useHostResolvConf = false;
|
||||||
|
firewall = {
|
||||||
|
enable = true;
|
||||||
|
allowedTCPPorts = [
|
||||||
|
953
|
||||||
|
# config.services.prometheus.port
|
||||||
|
config.services.blocky.settings.port
|
||||||
|
# config.services.blocky.settings.httpPort
|
||||||
|
# config.services.grafana.settings.server.http_port
|
||||||
|
];
|
||||||
|
allowedUDPPorts = [ 53 ];
|
||||||
|
rejectPackets = false;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
services.blocky = {
|
||||||
|
enable = true;
|
||||||
|
settings = {
|
||||||
|
upstream.default = [ "127.0.0.1:953" ];
|
||||||
|
upstreamTimeout = "10s";
|
||||||
|
blocking = {
|
||||||
|
blackLists.ads = [
|
||||||
|
"https://raw.githubusercontent.com/StevenBlack/hosts/master/hosts"
|
||||||
|
];
|
||||||
|
clientGroupsBlock.default = [ "ads" ];
|
||||||
|
};
|
||||||
|
port = 53;
|
||||||
|
httpPort = 4000;
|
||||||
|
# httpPort = 8080;
|
||||||
|
# httpsPort = 8443;
|
||||||
|
# customDNS = {
|
||||||
|
# # customTTL = "1h";
|
||||||
|
# # filterUnmappedTypes = "true";
|
||||||
|
# mapping = {
|
||||||
|
# "code.ataraxiadev.com" = "192.168.0.10";
|
||||||
|
# };
|
||||||
|
# };
|
||||||
|
queryLog = {
|
||||||
|
type = "console";
|
||||||
|
};
|
||||||
|
prometheus.enable = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
services.prometheus = {
|
||||||
|
# enable = true;
|
||||||
|
port = 9090;
|
||||||
|
listenAddress = "0.0.0.0";
|
||||||
|
globalConfig = {
|
||||||
|
scrape_interval = "15s";
|
||||||
|
evaluation_interval = "15s";
|
||||||
|
};
|
||||||
|
scrapeConfigs = [{
|
||||||
|
job_name = "blocky";
|
||||||
|
static_configs = [{
|
||||||
|
targets = [ "127.0.0.1:${toString config.services.blocky.settings.httpPort}" ];
|
||||||
|
}];
|
||||||
|
}];
|
||||||
|
};
|
||||||
|
services.grafana = {
|
||||||
|
# enable = true;
|
||||||
|
settings = {
|
||||||
|
analytics.reporting_enabled = false;
|
||||||
|
server = {
|
||||||
|
http_port = 3000;
|
||||||
|
http_addr = "0.0.0.0";
|
||||||
|
enable_gzip = true;
|
||||||
|
};
|
||||||
|
security = {
|
||||||
|
admin_user = "admin";
|
||||||
|
admin_password = "admin";
|
||||||
|
# admin_password = "$__file(/var/secrets/grafana)";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
provision.enable = true;
|
||||||
|
provision.datasources.settings = {
|
||||||
|
apiVersion = 1;
|
||||||
|
datasources = [{
|
||||||
|
name = "Prometheus";
|
||||||
|
type = "prometheus";
|
||||||
|
access = "proxy";
|
||||||
|
orgId = 1;
|
||||||
|
url = "127.0.0.1:${toString config.services.prometheus.port}";
|
||||||
|
isDefault = true;
|
||||||
|
jsonData = {
|
||||||
|
graphiteVersion = "1.1";
|
||||||
|
tlsAuth = false;
|
||||||
|
tlsAuthWithCACert = false;
|
||||||
|
};
|
||||||
|
version = 1;
|
||||||
|
editable = true;
|
||||||
|
}];
|
||||||
|
deleteDatasources = [{
|
||||||
|
name = "Prometheus";
|
||||||
|
orgId = 1;
|
||||||
|
}];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
services.dnscrypt-proxy2 = {
|
||||||
|
enable = true;
|
||||||
|
settings = {
|
||||||
|
listen_addresses = [ "0.0.0.0:953" ];
|
||||||
|
ipv6_servers = false;
|
||||||
|
doh_servers = false;
|
||||||
|
require_dnssec = true;
|
||||||
|
require_nolog = true;
|
||||||
|
require_nofilter = true;
|
||||||
|
block_ipv6 = true;
|
||||||
|
bootstrap_resolvers = [ "9.9.9.9:53" "9.9.9.11:53" ];
|
||||||
|
sources = {
|
||||||
|
public-resolvers = {
|
||||||
|
urls = [
|
||||||
|
"https://raw.githubusercontent.com/DNSCrypt/dnscrypt-resolvers/master/v3/public-resolvers.md"
|
||||||
|
"https://download.dnscrypt.info/resolvers-list/v3/public-resolvers.md"
|
||||||
|
];
|
||||||
|
cache_file = "/var/lib/dnscrypt-proxy2/public-resolvers.md";
|
||||||
|
minisign_key = "RWQf6LRCGA9i53mlYecO4IzT51TGPpvWucNSCh1CBM0QTaLn73Y7GFO3";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
system.stateVersion = "23.05";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
@ -1,87 +1,124 @@
|
|||||||
|
# { config, lib, pkgs, ... }:
|
||||||
|
# let
|
||||||
|
# start-backup = ''
|
||||||
|
# #!${pkgs.runtimeShell}
|
||||||
|
# export DUPLICACY_GCD_TOKEN=/var/secrets/gcd-token
|
||||||
|
# export DUPLICACY_PASSWORD=$(cat /var/secrets/duplicacy-pass)
|
||||||
|
|
||||||
|
# if [ ! -d "/backups/.duplicacy" ]; then
|
||||||
|
# echo "First init duplicacy repo with \"duplicacy init -e gcd://<folder-in-gdisk>\""
|
||||||
|
# exit 1
|
||||||
|
# fi
|
||||||
|
|
||||||
|
# if [ ! -d "/backups/var" ]; then
|
||||||
|
# mkdir -p /backups/var
|
||||||
|
# fi
|
||||||
|
|
||||||
|
# if [ ! -L "/backups/var/dkim" ]; then
|
||||||
|
# ln -s /var/dkim /backups/var/dkim
|
||||||
|
# fi
|
||||||
|
|
||||||
|
# if [ ! -L "/backups/var/vmail" ]; then
|
||||||
|
# ln -s /var/vmail /backups/var/vmail
|
||||||
|
# fi
|
||||||
|
|
||||||
|
# if [ ! -L "/backups/var/microbin" ]; then
|
||||||
|
# ln -s /var/microbin /backups/var/microbin
|
||||||
|
# fi
|
||||||
|
|
||||||
|
# if [ ! -L "/backups/gitea" ]; then
|
||||||
|
# ln -s /gitea /backups/gitea
|
||||||
|
# fi
|
||||||
|
|
||||||
|
# if [ ! -d "/backups/srv" ]; then
|
||||||
|
# mkdir -p /backups/var
|
||||||
|
# fi
|
||||||
|
|
||||||
|
# if [ ! -L "/backups/srv/joplin" ]; then
|
||||||
|
# ln -s /srv/joplin /backups/srv/joplin
|
||||||
|
# fi
|
||||||
|
|
||||||
|
# cd /backups
|
||||||
|
# duplicacy backup
|
||||||
|
# '';
|
||||||
|
# start-prune = ''
|
||||||
|
# #!${pkgs.runtimeShell}
|
||||||
|
# export DUPLICACY_GCD_TOKEN=/var/secrets/gcd-token;
|
||||||
|
# export DUPLICACY_PASSWORD=$(cat /var/secrets/duplicacy-pass);
|
||||||
|
|
||||||
|
# if [ ! -d "/backups/.duplicacy" ]; then
|
||||||
|
# echo "First init duplicacy repo with \"duplicacy init -e gcd://<folder-in-gdisk>\""
|
||||||
|
# exit 1
|
||||||
|
# fi
|
||||||
|
# cd /backups
|
||||||
|
# duplicacy prune -keep 0:30 -keep 7:14 -keep 1:7
|
||||||
|
# '';
|
||||||
|
# in {
|
||||||
|
# secrets.gcd-token.services = [ ];
|
||||||
|
# secrets.duplicacy-pass.services = [ ];
|
||||||
|
|
||||||
|
# systemd.services.duplicacy-backup = {
|
||||||
|
# serviceConfig.Type = "oneshot";
|
||||||
|
# path = [ pkgs.duplicacy ];
|
||||||
|
# script = start-backup;
|
||||||
|
# };
|
||||||
|
|
||||||
|
# systemd.timers.duplicacy-backup = {
|
||||||
|
# wantedBy = [ "timers.target" ];
|
||||||
|
# partOf = [ "duplicacy-backup.service" ];
|
||||||
|
# timerConfig.OnCalendar = [ "*-*-* 05:00:00" ];
|
||||||
|
# };
|
||||||
|
|
||||||
|
# systemd.services.duplicacy-prune = {
|
||||||
|
# serviceConfig.Type = "oneshot";
|
||||||
|
# path = [ pkgs.duplicacy ];
|
||||||
|
# script = start-prune;
|
||||||
|
# };
|
||||||
|
|
||||||
|
# systemd.timers.duplicacy-prune = {
|
||||||
|
# wantedBy = [ "timers.target" ];
|
||||||
|
# partOf = [ "duplicacy-prune.service" ];
|
||||||
|
# timerConfig.OnCalendar = [ "*-*-* 01:00:00" ];
|
||||||
|
# };
|
||||||
|
|
||||||
|
# # FIXME!
|
||||||
|
# persist.state.directories = lib.mkIf config.deviceSpecific.devInfo.fileSystem != "zfs"
|
||||||
|
# [ "/backup" ];
|
||||||
|
# }
|
||||||
{ config, lib, pkgs, ... }:
|
{ config, lib, pkgs, ... }:
|
||||||
let
|
let
|
||||||
start-backup = ''
|
backend = config.virtualisation.oci-containers.backend;
|
||||||
#!${pkgs.runtimeShell}
|
pass-path = "/tmp/pass";
|
||||||
export DUPLICACY_GCD_TOKEN=/var/secrets/gcd-token
|
gcd-path = "/tmp/gcd-token";
|
||||||
export DUPLICACY_PASSWORD=$(cat /var/secrets/duplicacy-pass)
|
config-path = "/repo";
|
||||||
|
config-host-path = "/var/lib/duplicacy";
|
||||||
if [ ! -d "/backups/.duplicacy" ]; then
|
|
||||||
echo "First init duplicacy repo with \"duplicacy init -e gcd://<folder-in-gdisk>\""
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -d "/backups/var" ]; then
|
|
||||||
mkdir -p /backups/var
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -L "/backups/var/dkim" ]; then
|
|
||||||
ln -s /var/dkim /backups/var/dkim
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -L "/backups/var/vmail" ]; then
|
|
||||||
ln -s /var/vmail /backups/var/vmail
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -L "/backups/var/microbin" ]; then
|
|
||||||
ln -s /var/microbin /backups/var/microbin
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -L "/backups/gitea" ]; then
|
|
||||||
ln -s /gitea /backups/gitea
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -d "/backups/srv" ]; then
|
|
||||||
mkdir -p /backups/var
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -L "/backups/srv/joplin" ]; then
|
|
||||||
ln -s /srv/joplin /backups/srv/joplin
|
|
||||||
fi
|
|
||||||
|
|
||||||
cd /backups
|
|
||||||
duplicacy backup
|
|
||||||
'';
|
|
||||||
start-prune = ''
|
|
||||||
#!${pkgs.runtimeShell}
|
|
||||||
export DUPLICACY_GCD_TOKEN=/var/secrets/gcd-token;
|
|
||||||
export DUPLICACY_PASSWORD=$(cat /var/secrets/duplicacy-pass);
|
|
||||||
|
|
||||||
if [ ! -d "/backups/.duplicacy" ]; then
|
|
||||||
echo "First init duplicacy repo with \"duplicacy init -e gcd://<folder-in-gdisk>\""
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
cd /backups
|
|
||||||
duplicacy prune -keep 0:30 -keep 7:14 -keep 1:7
|
|
||||||
'';
|
|
||||||
in {
|
in {
|
||||||
secrets.gcd-token.services = [ ];
|
secrets.duplicacy-pass.services = [ "${backend}-duplicacy.service" ];
|
||||||
secrets.duplicacy-pass.services = [ ];
|
secrets.gcd-token.services = [ "${backend}-duplicacy.service" ];
|
||||||
|
|
||||||
systemd.services.duplicacy-backup = {
|
virtualisation.oci-containers.containers.duplicacy = {
|
||||||
serviceConfig.Type = "oneshot";
|
autoStart = true;
|
||||||
path = [ pkgs.duplicacy ];
|
environment = rec {
|
||||||
script = start-backup;
|
BACKUP_NAME = "homelab-duplicacy-backup";
|
||||||
|
BACKUP_ENCRYPTION_KEY_FILE = pass-path;
|
||||||
|
BACKUP_SCHEDULE = "0 8 * * *";
|
||||||
|
BACKUP_LOCATION = "gcd://backups/${BACKUP_NAME}";
|
||||||
|
GCD_TOKEN = gcd-path;
|
||||||
|
# DUPLICACY_INIT_OPTIONS = "-storage-name ${BACKUP_NAME}";
|
||||||
|
# If backing up from hdd, change threads to 1
|
||||||
|
DUPLICACY_BACKUP_OPTIONS = "-threads 8 -stats";
|
||||||
|
DUPLICACY_PRUNE_OPTIONS = "-keep 0:360 -keep 30:180 -keep 7:30";
|
||||||
|
PRUNE_SCHEDULE = "0 9 * * *";
|
||||||
|
DUPLICACY_CONFIG_PATH = config-path;
|
||||||
|
};
|
||||||
|
image = "docker.io/ataraxiadev/duplicacy-autobackup";
|
||||||
|
volumes = [
|
||||||
|
"/srv:/data:ro" # backup folder
|
||||||
|
"${config-host-path}:${config-path}" # path to .duplicacy config
|
||||||
|
"${config.secrets.duplicacy-pass.decrypted}:${pass-path}:ro"
|
||||||
|
"${config.secrets.gcd-token.decrypted}:${gcd-path}:ro"
|
||||||
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
systemd.timers.duplicacy-backup = {
|
persist.state.directories = [ config-host-path ];
|
||||||
wantedBy = [ "timers.target" ];
|
|
||||||
partOf = [ "duplicacy-backup.service" ];
|
|
||||||
timerConfig.OnCalendar = [ "*-*-* 05:00:00" ];
|
|
||||||
};
|
|
||||||
|
|
||||||
systemd.services.duplicacy-prune = {
|
|
||||||
serviceConfig.Type = "oneshot";
|
|
||||||
path = [ pkgs.duplicacy ];
|
|
||||||
script = start-prune;
|
|
||||||
};
|
|
||||||
|
|
||||||
systemd.timers.duplicacy-prune = {
|
|
||||||
wantedBy = [ "timers.target" ];
|
|
||||||
partOf = [ "duplicacy-prune.service" ];
|
|
||||||
timerConfig.OnCalendar = [ "*-*-* 01:00:00" ];
|
|
||||||
};
|
|
||||||
|
|
||||||
# FIXME!
|
|
||||||
persist.state.directories = lib.mkIf config.deviceSpecific.devInfo.fileSystem != "zfs"
|
|
||||||
[ "/backup" ];
|
|
||||||
}
|
}
|
@ -12,16 +12,23 @@
|
|||||||
settings = {
|
settings = {
|
||||||
port = 5000;
|
port = 5000;
|
||||||
tokenserver.enabled = true;
|
tokenserver.enabled = true;
|
||||||
|
# syncserver = {
|
||||||
|
# public_url = "https://fsync.ataraxiadev.com";
|
||||||
|
# };
|
||||||
|
# endpoints = {
|
||||||
|
# "sync-1.5" = "http://localhost:8000/1.5/1";
|
||||||
|
# };
|
||||||
};
|
};
|
||||||
singleNode = {
|
singleNode = {
|
||||||
enable = true;
|
enable = true;
|
||||||
|
capacity = 10;
|
||||||
# enableTLS = false;
|
# enableTLS = false;
|
||||||
# enableNginx = false;
|
# enableNginx = false;
|
||||||
enableTLS = false;
|
# enableTLS = false;
|
||||||
enableNginx = true;
|
# enableNginx = true;
|
||||||
hostname = "localhost";
|
# hostname = "localhost";
|
||||||
# hostname = "fsync.ataraxiadev.com";
|
# hostname = "fsync.ataraxiadev.com";
|
||||||
# url = "https://fsync.ataraxiadev.com";
|
url = "https://fsync.ataraxiadev.com";
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
@ -22,11 +22,11 @@ in {
|
|||||||
type = "postgres";
|
type = "postgres";
|
||||||
passwordFile = config.secrets.gitea.decrypted;
|
passwordFile = config.secrets.gitea.decrypted;
|
||||||
};
|
};
|
||||||
# TODO: cleanup cache older than...
|
# TODO: cleanup dumps older than...
|
||||||
dump = {
|
dump = {
|
||||||
enable = true;
|
enable = true;
|
||||||
backupDir = "/srv/gitea/dump";
|
backupDir = "/srv/gitea/dump";
|
||||||
interval = "daily";
|
interval = "06:00";
|
||||||
type = "tar.zst";
|
type = "tar.zst";
|
||||||
};
|
};
|
||||||
domain = "code.ataraxiadev.com";
|
domain = "code.ataraxiadev.com";
|
||||||
@ -97,4 +97,15 @@ in {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
systemd.services.gitea-dump-clean = let
|
||||||
|
older-than = "3"; # in days
|
||||||
|
in rec {
|
||||||
|
before = [ "gitea-dump.service" ];
|
||||||
|
wantedBy = before;
|
||||||
|
script = ''
|
||||||
|
${pkgs.findutils}/bin/find ${config.services.gitea.dump.backupDir} \
|
||||||
|
-mindepth 1 -type f -mtime +${older-than} -delete
|
||||||
|
'';
|
||||||
|
};
|
||||||
}
|
}
|
@ -4,86 +4,48 @@ let
|
|||||||
joplin-db-data = "/srv/joplin/postgres";
|
joplin-db-data = "/srv/joplin/postgres";
|
||||||
joplin-uid = "1001";
|
joplin-uid = "1001";
|
||||||
backend = config.virtualisation.oci-containers.backend;
|
backend = config.virtualisation.oci-containers.backend;
|
||||||
|
pod-name = "joplin-pod";
|
||||||
|
open-ports = [ "127.0.0.1:22300:22300/tcp" ];
|
||||||
in {
|
in {
|
||||||
secrets.joplin-env = { };
|
secrets.joplin-env = { };
|
||||||
secrets.joplin-db-env = { };
|
secrets.joplin-db-env = { };
|
||||||
|
|
||||||
|
# FIXMEL mailer
|
||||||
virtualisation.oci-containers.containers = {
|
virtualisation.oci-containers.containers = {
|
||||||
joplin = {
|
joplin = {
|
||||||
autoStart = true;
|
autoStart = true;
|
||||||
dependsOn = [ "joplin-db" ];
|
dependsOn = [ "joplin-db" ];
|
||||||
|
environment = { MAX_TIME_DRIFT = "4000"; };
|
||||||
environmentFiles = [ config.secrets.joplin-env.decrypted ];
|
environmentFiles = [ config.secrets.joplin-env.decrypted ];
|
||||||
extraOptions = [
|
extraOptions = [ "--pod=${pod-name}" ];
|
||||||
"--pod=joplin"
|
image = "docker.io/ataraxiadev/joplin-server:2.9.17";
|
||||||
# "--network=joplin"
|
|
||||||
];
|
|
||||||
# ports = [ "127.0.0.1:22300:22300" ];
|
|
||||||
image = "docker.io/library/ataraxiadev/joplin-server:2.9.17";
|
|
||||||
volumes = [ "${joplin-data}:/home/joplin/data" ];
|
volumes = [ "${joplin-data}:/home/joplin/data" ];
|
||||||
};
|
};
|
||||||
joplin-db = {
|
joplin-db = {
|
||||||
autoStart = true;
|
autoStart = true;
|
||||||
environmentFiles = [ config.secrets.joplin-db-env.decrypted ];
|
environmentFiles = [ config.secrets.joplin-db-env.decrypted ];
|
||||||
extraOptions = [
|
extraOptions = [ "--pod=${pod-name}" ];
|
||||||
"--pod=joplin"
|
image = "docker.io/postgres:13";
|
||||||
# "--network=joplin"
|
|
||||||
];
|
|
||||||
image = "docker.io/library/postgres:13";
|
|
||||||
volumes = [ "${joplin-db-data}:/var/lib/postgresql/data" ];
|
volumes = [ "${joplin-db-data}:/var/lib/postgresql/data" ];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
systemd.services.podman-create-pod-joplin = let
|
systemd.services."podman-create-${pod-name}" = let
|
||||||
podman = config.virtualisation.podman.package;
|
portsMapping = lib.concatMapStrings (port: " -p " + port) open-ports;
|
||||||
# start-script = pkgs.writeShellScript "start" ''
|
start = pkgs.writeShellScript "create-pod" ''
|
||||||
# '';
|
mkdir -p ${joplin-data} && chown ${joplin-uid} ${joplin-data}
|
||||||
in {
|
mkdir -p ${joplin-db-data}
|
||||||
|
podman pod exists ${pod-name} || podman pod create -n ${pod-name} ${portsMapping}
|
||||||
|
'';
|
||||||
|
stop = "podman pod rm -i -f ${pod-name}";
|
||||||
|
in rec {
|
||||||
|
path = [ pkgs.coreutils config.virtualisation.podman.package ];
|
||||||
|
before = [ "${backend}-joplin.service" "${backend}-joplin-db.service" ];
|
||||||
|
wantedBy = before;
|
||||||
serviceConfig = {
|
serviceConfig = {
|
||||||
Type = "oneshot";
|
Type = "oneshot";
|
||||||
RemainAfterExit = "yes";
|
RemainAfterExit = "yes";
|
||||||
ExecStart = ''
|
ExecStart = start;
|
||||||
mkdir -p ${joplin-data} && chown ${joplin-uid} ${joplin-data}
|
ExecStop = stop;
|
||||||
mkdir -p ${joplin-db-data}
|
|
||||||
${podman}/bin/podman pod exists joplin ||
|
|
||||||
${podman}/bin/podman pod create -n joplin -p "127.0.0.1:22300:22300"
|
|
||||||
'';
|
|
||||||
ExecStop = "${podman}/bin/podman pod rm -i -f joplin";
|
|
||||||
};
|
};
|
||||||
wantedBy = [ "${backend}-joplin.service" "${backend}-joplin-db.service" ];
|
|
||||||
# script = ''
|
|
||||||
# mkdir -p ${joplin-data} && chown ${joplin-uid} ${joplin-data} || true
|
|
||||||
# mkdir -p ${joplin-db-data} || true
|
|
||||||
# ${config.virtualisation.podman.package}/bin/podman pod exists joplin ||
|
|
||||||
# ${config.virtualisation.podman.package}/bin/podman pod create -n joplin -p "127.0.0.1:22300:22300"
|
|
||||||
# '';
|
|
||||||
};
|
};
|
||||||
# systemd.services.create-joplin-network = with config.virtualisation.oci-containers; {
|
|
||||||
# serviceConfig.Type = "oneshot";
|
|
||||||
# wantedBy = [
|
|
||||||
# "${backend}-joplin.service"
|
|
||||||
# "${backend}-joplin-db.service"
|
|
||||||
# ];
|
|
||||||
# script = ''
|
|
||||||
# ${pkgs.podman}/bin/podman network inspect joplin || \
|
|
||||||
# ${pkgs.podman}/bin/podman network create -d bridge joplin || true
|
|
||||||
# '';
|
|
||||||
# };
|
|
||||||
# systemd.services.podman-joplin = {
|
|
||||||
# # path = [ "/run/wrappers" ];
|
|
||||||
# # serviceConfig.User = config.mainuser;
|
|
||||||
# preStart = "podman network create -d bridge joplin || true";
|
|
||||||
# postStop = "podman network rm joplin || true";
|
|
||||||
# };
|
|
||||||
# systemd.services.podman-joplin-db = {
|
|
||||||
# # path = [ "/run/wrappers" ];
|
|
||||||
# # serviceConfig.User = config.mainuser;
|
|
||||||
# preStart = "podman network create -d bridge joplin || true";
|
|
||||||
# postStop = "podman network rm joplin || true";
|
|
||||||
# };
|
|
||||||
# systemd.services.create-joplin-folder = {
|
|
||||||
# serviceConfig.Type = "oneshot";
|
|
||||||
# wantedBy = [ "${backend}-joplin.service" ];
|
|
||||||
# script = ''
|
|
||||||
# mkdir -p ${joplin-data} && chown ${joplin-uid} ${joplin-data}
|
|
||||||
# '';
|
|
||||||
# };
|
|
||||||
}
|
}
|
@ -28,55 +28,6 @@ in {
|
|||||||
"b.barracudacentral.org"
|
"b.barracudacentral.org"
|
||||||
"bl.spamcop.net"
|
"bl.spamcop.net"
|
||||||
"blacklist.woody.ch"
|
"blacklist.woody.ch"
|
||||||
# "bogons.cymru.com"
|
|
||||||
# "cbl.abuseat.org"
|
|
||||||
# "combined.abuse.ch"
|
|
||||||
# "db.wpbl.info"
|
|
||||||
# "dnsbl-1.uceprotect.net"
|
|
||||||
# "dnsbl-2.uceprotect.net"
|
|
||||||
# "dnsbl-3.uceprotect.net"
|
|
||||||
# "dnsbl.anticaptcha.net"
|
|
||||||
# "dnsbl.dronebl.org"
|
|
||||||
# "dnsbl.inps.de"
|
|
||||||
# "dnsbl.sorbs.net"
|
|
||||||
# "dnsbl.spfbl.net"
|
|
||||||
# "drone.abuse.ch"
|
|
||||||
# "duinv.aupads.org"
|
|
||||||
# "dul.dnsbl.sorbs.net"
|
|
||||||
# "dyna.spamrats.com"
|
|
||||||
# "dynip.rothen.com"
|
|
||||||
# "http.dnsbl.sorbs.net"
|
|
||||||
# "ips.backscatterer.org"
|
|
||||||
# "ix.dnsbl.manitu.net"
|
|
||||||
# "korea.services.net"
|
|
||||||
# "misc.dnsbl.sorbs.net"
|
|
||||||
# "noptr.spamrats.com"
|
|
||||||
# "orvedb.aupads.org"
|
|
||||||
# "pbl.spamhaus.org"
|
|
||||||
# "proxy.bl.gweep.ca"
|
|
||||||
# "psbl.surriel.com"
|
|
||||||
# "relays.bl.gweep.ca"
|
|
||||||
# "relays.nether.net"
|
|
||||||
# "sbl.spamhaus.org"
|
|
||||||
# "singular.ttk.pte.hu"
|
|
||||||
# "smtp.dnsbl.sorbs.net"
|
|
||||||
# "socks.dnsbl.sorbs.net"
|
|
||||||
# "spam.abuse.ch"
|
|
||||||
# "spam.dnsbl.anonmails.de"
|
|
||||||
# "spam.dnsbl.sorbs.net"
|
|
||||||
# "spam.spamrats.com"
|
|
||||||
# "spambot.bls.digibase.ca"
|
|
||||||
# "spamrbl.imp.ch"
|
|
||||||
# "spamsources.fabel.dk"
|
|
||||||
# "ubl.lashback.com"
|
|
||||||
# "ubl.unsubscore.com"
|
|
||||||
# "virus.rbl.jp"
|
|
||||||
# "web.dnsbl.sorbs.net"
|
|
||||||
# "wormrbl.imp.ch"
|
|
||||||
# "xbl.spamhaus.org"
|
|
||||||
# "z.mailspike.net"
|
|
||||||
# "zen.spamhaus.org"
|
|
||||||
# "zombie.dnsbl.sorbs.net"
|
|
||||||
];
|
];
|
||||||
dnsBlacklistOverrides = ''
|
dnsBlacklistOverrides = ''
|
||||||
ataraxiadev.com OK
|
ataraxiadev.com OK
|
||||||
@ -136,7 +87,6 @@ in {
|
|||||||
};
|
};
|
||||||
hierarchySeparator = "/";
|
hierarchySeparator = "/";
|
||||||
localDnsResolver = true;
|
localDnsResolver = true;
|
||||||
# certificateScheme = 3;
|
|
||||||
certificateScheme = 1;
|
certificateScheme = 1;
|
||||||
certificateFile = "${config.security.acme.certs.${fqdn}.directory}/fullchain.pem";
|
certificateFile = "${config.security.acme.certs.${fqdn}.directory}/fullchain.pem";
|
||||||
keyFile = "${config.security.acme.certs.${fqdn}.directory}/key.pem";
|
keyFile = "${config.security.acme.certs.${fqdn}.directory}/key.pem";
|
||||||
|
@ -27,10 +27,11 @@
|
|||||||
# "nzbhydra.ataraxiadev.com"
|
# "nzbhydra.ataraxiadev.com"
|
||||||
# "kavita.ataraxiadev.com"
|
# "kavita.ataraxiadev.com"
|
||||||
# "shoko.ataraxiadev.com"
|
# "shoko.ataraxiadev.com"
|
||||||
# "bathist.ataraxiadev.com"
|
"bathist.ataraxiadev.com"
|
||||||
# "microbin.ataraxiadev.com"
|
# "microbin.ataraxiadev.com"
|
||||||
# "joplin.ataraxiadev.com"
|
"joplin.ataraxiadev.com"
|
||||||
"api.ataraxiadev.com"
|
"api.ataraxiadev.com"
|
||||||
|
"fsync.ataraxiadev.com"
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@ -155,12 +156,12 @@
|
|||||||
extraConfig = proxySettings;
|
extraConfig = proxySettings;
|
||||||
};
|
};
|
||||||
} // default;
|
} // default;
|
||||||
# "bathist.ataraxiadev.com" = {
|
"bathist.ataraxiadev.com" = {
|
||||||
# locations."/" = {
|
locations."/" = {
|
||||||
# proxyPass = "http://localhost:9999";
|
proxyPass = "http://localhost:9999";
|
||||||
# extraConfig = proxySettings;
|
extraConfig = proxySettings;
|
||||||
# };
|
};
|
||||||
# } // default;
|
} // default;
|
||||||
# "file.ataraxiadev.com" = {
|
# "file.ataraxiadev.com" = {
|
||||||
# locations."/" = {
|
# locations."/" = {
|
||||||
# proxyPass = "http://localhost:8088";
|
# proxyPass = "http://localhost:8088";
|
||||||
@ -211,12 +212,18 @@
|
|||||||
# '' + proxySettings;
|
# '' + proxySettings;
|
||||||
# };
|
# };
|
||||||
# } // default;
|
# } // default;
|
||||||
# "joplin.ataraxiadev.com" = {
|
"joplin.ataraxiadev.com" = {
|
||||||
# locations."/" = {
|
locations."/" = {
|
||||||
# proxyPass = "http://localhost:22300";
|
proxyPass = "http://localhost:22300";
|
||||||
# extraConfig = proxySettings;
|
extraConfig = proxySettings;
|
||||||
# };
|
};
|
||||||
# } // default;
|
} // default;
|
||||||
|
"fsync.ataraxiadev.com" = {
|
||||||
|
locations."/" = {
|
||||||
|
proxyPass = "http://localhost:5000";
|
||||||
|
extraConfig = proxySettings;
|
||||||
|
};
|
||||||
|
} // default;
|
||||||
"api.ataraxiadev.com" = {
|
"api.ataraxiadev.com" = {
|
||||||
locations."~ (\\.py|\\.sh)$" = with config.services; {
|
locations."~ (\\.py|\\.sh)$" = with config.services; {
|
||||||
alias = "/srv/http/api.ataraxiadev.com";
|
alias = "/srv/http/api.ataraxiadev.com";
|
||||||
|
Loading…
x
Reference in New Issue
Block a user