many changes and more modularizing
This commit is contained in:
1
hosts/fw-new/channel
Normal file
1
hosts/fw-new/channel
Normal file
@@ -0,0 +1 @@
|
||||
https://channels.nixos.org/nixos-unstable
|
||||
141
hosts/fw-new/configuration.nix
Normal file
141
hosts/fw-new/configuration.nix
Normal file
@@ -0,0 +1,141 @@
|
||||
{ lib, pkgs, ... }: {
|
||||
imports = [
|
||||
./fleet.nix
|
||||
./utils/bento.nix
|
||||
./utils/modules/sops.nix
|
||||
./utils/modules/lego/lego.nix
|
||||
./utils/modules/nginx.nix
|
||||
|
||||
./utils/modules/autoupgrade.nix
|
||||
./utils/modules/promtail
|
||||
./utils/modules/borgbackup.nix
|
||||
# ./utils/modules/netdata.nix
|
||||
|
||||
# fw
|
||||
./modules/networking.nix
|
||||
./modules/firewall.nix
|
||||
./modules/dhcp4.nix
|
||||
./modules/unbound.nix
|
||||
./modules/avahi.nix
|
||||
./modules/openconnect.nix
|
||||
./modules/wireguard.nix
|
||||
./modules/podman.nix
|
||||
./modules/omada.nix
|
||||
# ./modules/ddclient.nix
|
||||
# ./modules/wol.nix
|
||||
|
||||
# microvm
|
||||
./modules/microvm.nix
|
||||
./modules/gitea-vm.nix
|
||||
|
||||
# web
|
||||
./modules/web
|
||||
|
||||
# git
|
||||
./modules/gitea.nix
|
||||
./modules/fwmetrics.nix
|
||||
|
||||
# ./modules/firefox-sync.nix
|
||||
|
||||
# home assistant
|
||||
./modules/home-assistant
|
||||
./modules/deconz.nix
|
||||
# ./modules/mopidy.nix
|
||||
# ./modules/mosquitto.nix
|
||||
./modules/snapserver.nix
|
||||
|
||||
# gaming
|
||||
# ./modules/palworld.nix
|
||||
# ./modules/ark-survival-evolved.nix
|
||||
./modules/foundry-vtt.nix
|
||||
|
||||
# setup network
|
||||
# ./modules/setupnetwork.nix
|
||||
|
||||
|
||||
./hardware-configuration.nix
|
||||
];
|
||||
|
||||
nixpkgs.overlays = [
|
||||
(import ./utils/overlays/packages.nix)
|
||||
];
|
||||
|
||||
nixpkgs.config.permittedInsecurePackages = [
|
||||
"openssl-1.1.1w"
|
||||
];
|
||||
|
||||
nixpkgs.config.allowUnfreePredicate = pkg: builtins.elem (lib.getName pkg) [
|
||||
"mongodb"
|
||||
];
|
||||
|
||||
time.timeZone = "Europe/Vienna";
|
||||
|
||||
services.logind.extraConfig = "RuntimeDirectorySize=2G";
|
||||
|
||||
sops.age.sshKeyPaths = [ "/etc/ssh/ssh_host_ed25519_key" ];
|
||||
sops.defaultSopsFile = ./secrets.yaml;
|
||||
|
||||
environment.systemPackages = with pkgs; [
|
||||
bento
|
||||
conntrack-tools # view network connection states
|
||||
ethtool # manage NIC settings (offload, NIC feeatures, ...)
|
||||
git
|
||||
htop # to see the system load
|
||||
tcpdump # view network traffic
|
||||
vim # my preferred editor
|
||||
wol
|
||||
inotify-tools
|
||||
];
|
||||
|
||||
nix = {
|
||||
settings.auto-optimise-store = true;
|
||||
gc = {
|
||||
automatic = true;
|
||||
dates = "weekly";
|
||||
options = "--delete-older-than 60d";
|
||||
};
|
||||
# Free up to 1GiB whenever there is less than 100MiB left.
|
||||
extraOptions = ''
|
||||
min-free = ${toString (100 * 1024 * 1024)}
|
||||
max-free = ${toString (1024 * 1024 * 1024)}
|
||||
'';
|
||||
};
|
||||
|
||||
# services.tlp = {
|
||||
# enable = true;
|
||||
# settings = {
|
||||
# CPU_SCALING_GOVERNOR_ON_AC = "powersave"; # powersave or performance
|
||||
# CPU_ENERGY_PERF_POLICY_ON_AC = "power"; # power or performance
|
||||
# # CPU_MIN_PERF_ON_AC = 0;
|
||||
# # CPU_MAX_PERF_ON_AC = 100; # max 100
|
||||
# };
|
||||
# };
|
||||
|
||||
# systemd.services = {
|
||||
# powertop = {
|
||||
# wantedBy = [ "multi-user.target" ];
|
||||
# after = [ "multi-user.target" ];
|
||||
# description = "Powertop tunings";
|
||||
# path = [ pkgs.kmod ];
|
||||
# serviceConfig = {
|
||||
# Type = "oneshot";
|
||||
# RemainAfterExit = "yes";
|
||||
# ExecStart = "${pkgs.powertop}/bin/powertop --auto-tune && for dev in /sys/class/net/*; do echo on > \"$dev/device/power/control\"; done'";
|
||||
# };
|
||||
# };
|
||||
# };
|
||||
|
||||
boot.tmp.cleanOnBoot = true;
|
||||
zramSwap.enable = true;
|
||||
networking.hostName = "fw-new";
|
||||
services.openssh.enable = true;
|
||||
users.users.root.openssh.authorizedKeys.keys = [
|
||||
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDN/2SAFm50kraB1fepAizox/QRXxB7WbqVbH+5OPalDT47VIJGNKOKhixQoqhABHxEoLxdf/C83wxlCVlPV9poLfDgVkA3Lyt5r3tSFQ6QjjOJAgchWamMsxxyGBedhKvhiEzcr/Lxytnoz3kjDG8fqQJwEpdqMmJoMUfyL2Rqp16u+FQ7d5aJtwO8EUqovhMaNO7rggjPpV/uMOg+tBxxmscliN7DLuP4EMTA/FwXVzcFNbOx3K9BdpMRAaSJt4SWcJO2cS2KHA5n/H+PQI7nz5KN3Yr/upJN5fROhi/SHvK39QOx12Pv7FCuWlc+oR68vLaoCKYhnkl3DnCfc7A7"
|
||||
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIRQuPqH5fdX3KEw7DXzWEdO3AlUn1oSmtJtHB71ICoH Generated By Termius"
|
||||
];
|
||||
|
||||
# backups
|
||||
borgbackup.repo = "u149513-sub2@u149513-sub2.your-backup.de:borg";
|
||||
|
||||
system.stateVersion = "22.05";
|
||||
}
|
||||
1
hosts/fw-new/fleet.nix
Symbolic link
1
hosts/fw-new/fleet.nix
Symbolic link
@@ -0,0 +1 @@
|
||||
../../fleet.nix
|
||||
86
hosts/fw-new/hardware-configuration.nix
Normal file
86
hosts/fw-new/hardware-configuration.nix
Normal file
@@ -0,0 +1,86 @@
|
||||
# Do not modify this file! It was generated by ‘nixos-generate-config’
|
||||
# and may be overwritten by future invocations. Please make changes
|
||||
# to /etc/nixos/configuration.nix instead.
|
||||
{ config, lib, pkgs, modulesPath, ... }:
|
||||
let
|
||||
# needs to get pinned because otherwise kernel will fail to build because of gcc
|
||||
kernelpkgs = import (builtins.fetchGit {
|
||||
name = "kernelpkgs";
|
||||
url = "https://github.com/nixos/nixpkgs/";
|
||||
rev = "4c2fcb090b1f3e5b47eaa7bd33913b574a11e0a0";
|
||||
}) {};
|
||||
in
|
||||
{
|
||||
powerManagement.cpuFreqGovernor = lib.mkDefault "ondemand";
|
||||
|
||||
|
||||
boot = {
|
||||
loader.systemd-boot.enable = true;
|
||||
loader.efi.canTouchEfiVariables = true;
|
||||
|
||||
kernelPackages = pkgs.linuxPackagesFor (kernelpkgs.callPackage ./pkgs/kernel/vendor.nix {});
|
||||
kernel.sysctl = {
|
||||
"kernel.printk" = "1 4 1 7";
|
||||
};
|
||||
supportedFilesystems = lib.mkForce [ "vfat" "fat32" "exfat" "ext4" "btrfs" ];
|
||||
initrd.includeDefaultModules = lib.mkForce false;
|
||||
initrd.availableKernelModules = lib.mkForce [ "nvme" "mmc_block" "hid" "dm_mod" "dm_crypt" "input_leds" ];
|
||||
|
||||
# kernelParams copy from Armbian's /boot/armbianEnv.txt & /boot/boot.cmd
|
||||
kernelParams = [
|
||||
"rootwait"
|
||||
|
||||
"earlycon" # enable early console, so we can see the boot messages via serial port / HDMI
|
||||
"consoleblank=0" # disable console blanking(screen saver)
|
||||
"console=ttyS2,1500000" # serial port
|
||||
"console=tty1" # HDMI
|
||||
|
||||
# docker optimizations
|
||||
"cgroup_enable=cpuset"
|
||||
"cgroup_memory=1"
|
||||
"cgroup_enable=memory"
|
||||
"swapaccount=1"
|
||||
];
|
||||
};
|
||||
|
||||
hardware = {
|
||||
deviceTree = {
|
||||
# https://github.com/armbian/build/blob/f9d7117/config/boards/orangepi5-plus.wip#L10C51-L10C51
|
||||
name = "rockchip/rk3588-orangepi-5-plus.dtb";
|
||||
overlays = [
|
||||
];
|
||||
};
|
||||
|
||||
enableRedistributableFirmware = lib.mkForce true;
|
||||
firmware = [
|
||||
(pkgs.callPackage ./pkgs/orangepi-firmware {})
|
||||
];
|
||||
};
|
||||
|
||||
|
||||
fileSystems."/" =
|
||||
{ device = "/dev/disk/by-uuid/c49c99cd-9a91-485d-8572-8f83df262907";
|
||||
fsType = "ext4";
|
||||
};
|
||||
|
||||
fileSystems."/boot" =
|
||||
{ device = "/dev/disk/by-uuid/12CE-A600";
|
||||
fsType = "vfat";
|
||||
};
|
||||
|
||||
swapDevices =
|
||||
[ { device = "/dev/disk/by-uuid/1f3fe124-c1f6-4e3f-9e89-4aa6d87d9853"; }
|
||||
];
|
||||
|
||||
# Enables DHCP on each ethernet and wireless interface. In case of scripted networking
|
||||
# (the default) this is the recommended approach. When using systemd-networkd it's
|
||||
# still possible to use this option, but it's recommended to use it in conjunction
|
||||
# with explicit per-interface declarations with `networking.interfaces.<interface>.useDHCP`.
|
||||
networking.useDHCP = lib.mkDefault true;
|
||||
# networking.interfaces.enP3p49s0.useDHCP = lib.mkDefault true;
|
||||
# networking.interfaces.enP4p65s0.useDHCP = lib.mkDefault true;
|
||||
# networking.interfaces.enu1u3c2.useDHCP = lib.mkDefault true;
|
||||
# networking.interfaces.wlP2p33s0.useDHCP = lib.mkDefault true;
|
||||
|
||||
nixpkgs.hostPlatform = lib.mkDefault "aarch64-linux";
|
||||
}
|
||||
24
hosts/fw-new/modules/ark-survival-evolved.nix
Normal file
24
hosts/fw-new/modules/ark-survival-evolved.nix
Normal file
@@ -0,0 +1,24 @@
|
||||
{ config, pkgs, ... }:
|
||||
|
||||
{
|
||||
virtualisation.oci-containers.backend = "podman";
|
||||
virtualisation.oci-containers.containers = {
|
||||
ark = {
|
||||
image = "hermsi/ark-server:latest";
|
||||
autoStart = true;
|
||||
environmentFiles = [
|
||||
config.sops.secrets.ark.path
|
||||
];
|
||||
volumes = [
|
||||
"/var/lib/ark/app:/app/"
|
||||
"/var/lib/ark/backup:/home/steam/ARK-Backups"
|
||||
];
|
||||
extraOptions = [
|
||||
"--network=server"
|
||||
"--ip=10.42.97.201"
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
sops.secrets.ark = {};
|
||||
}
|
||||
16
hosts/fw-new/modules/avahi.nix
Normal file
16
hosts/fw-new/modules/avahi.nix
Normal file
@@ -0,0 +1,16 @@
|
||||
{ pkgs, ... }: {
|
||||
services.avahi = {
|
||||
enable = true;
|
||||
reflector = true;
|
||||
allowInterfaces = [
|
||||
"multimedia"
|
||||
"server"
|
||||
"lan"
|
||||
"smart"
|
||||
];
|
||||
};
|
||||
|
||||
environment.systemPackages = with pkgs; [
|
||||
nssmdns
|
||||
];
|
||||
}
|
||||
24
hosts/fw-new/modules/ddclient.nix
Normal file
24
hosts/fw-new/modules/ddclient.nix
Normal file
@@ -0,0 +1,24 @@
|
||||
{ config, ... }:
|
||||
{
|
||||
services.ddclient = {
|
||||
enable = true;
|
||||
use = "if, if=wan";
|
||||
protocol = "hetzner";
|
||||
# server = "https://dns.hetzner.com/api/v1/";
|
||||
username = "dominik.polakovics@cloonar.com";
|
||||
passwordFile = config.sops.secrets.ddclient.path;
|
||||
zone = "cloonar.com";
|
||||
domains = [
|
||||
"fw.cloonar.com"
|
||||
"vpn.cloonar.com"
|
||||
"git.cloonar.com"
|
||||
"palworld.cloonar.com"
|
||||
"matrix.cloonar.com"
|
||||
"element.cloonar.com"
|
||||
];
|
||||
};
|
||||
|
||||
sops.secrets.ddclient = {
|
||||
# owner = config.systemd.services.ddclient.serviceConfig.User;
|
||||
};
|
||||
}
|
||||
24
hosts/fw-new/modules/deconz.nix
Normal file
24
hosts/fw-new/modules/deconz.nix
Normal file
@@ -0,0 +1,24 @@
|
||||
{ config, pkgs, ... }: {
|
||||
virtualisation = {
|
||||
oci-containers.containers = {
|
||||
deconz = {
|
||||
autoStart = false;
|
||||
image = "marthoc/deconz";
|
||||
volumes = [
|
||||
"/etc/localtime:/etc/localtime:ro"
|
||||
"/var/lib/deconz:/root/.local/share/dresden-elektronik/deCONZ"
|
||||
];
|
||||
environment = {
|
||||
DECONZ_DEVICE = "/dev/ttyACM0";
|
||||
TZ = "Europe/Vienna";
|
||||
};
|
||||
extraOptions = [
|
||||
"--network=server"
|
||||
"--ip=10.42.97.22"
|
||||
"--device=/dev/ttyACM0"
|
||||
"--hostname=deconz"
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
282
hosts/fw-new/modules/dhcp4.nix
Normal file
282
hosts/fw-new/modules/dhcp4.nix
Normal file
@@ -0,0 +1,282 @@
|
||||
{ ... }: {
|
||||
services.kea.dhcp4 = {
|
||||
enable = true;
|
||||
settings = {
|
||||
interfaces-config = {
|
||||
interfaces = [
|
||||
"lan"
|
||||
"server"
|
||||
"infrastructure"
|
||||
"multimedia"
|
||||
"smart"
|
||||
"guest"
|
||||
];
|
||||
};
|
||||
lease-database = {
|
||||
name = "/var/lib/kea/dhcp4.leases";
|
||||
persist = true;
|
||||
type = "memfile";
|
||||
};
|
||||
rebind-timer = 2000;
|
||||
renew-timer = 1000;
|
||||
subnet4 = [
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.96.100 - 10.42.96.240";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.96.0/24";
|
||||
interface = "lan";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.96.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name";
|
||||
data = "cloonar.com";
|
||||
}
|
||||
{
|
||||
name = "domain-search";
|
||||
data = "cloonar.com";
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
data = "10.42.96.1";
|
||||
}
|
||||
];
|
||||
reservations = [
|
||||
{
|
||||
hw-address = "04:7c:16:d5:63:5e";
|
||||
ip-address = "10.42.96.5";
|
||||
server-hostname = "omada.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "30:05:5c:56:62:37";
|
||||
ip-address = "10.42.96.100";
|
||||
server-hostname = "brn30055c566237.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "24:df:a7:b1:1b:74";
|
||||
ip-address = "10.42.96.101";
|
||||
server-hostname = "rmproplus-b1-1b-74.cloonar.com";
|
||||
}
|
||||
];
|
||||
|
||||
}
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.97.100 - 10.42.97.240";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.97.0/24";
|
||||
interface = "server";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.97.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name";
|
||||
data = "cloonar.com";
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
data = "10.42.97.1";
|
||||
}
|
||||
];
|
||||
reservations = [
|
||||
{
|
||||
hw-address = "1a:c4:04:6e:29:bd";
|
||||
ip-address = "10.42.97.2";
|
||||
server-hostname = "omada.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "02:00:00:00:00:03";
|
||||
ip-address = "10.42.97.5";
|
||||
server-hostname = "web-02.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "02:00:00:00:00:04";
|
||||
ip-address = "10.42.97.6";
|
||||
server-hostname = "matrix.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "ea:db:d4:c1:18:ba";
|
||||
ip-address = "10.42.97.50";
|
||||
server-hostname = "git.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "c2:4f:64:dd:13:0c";
|
||||
ip-address = "10.42.97.20";
|
||||
server-hostname = "home-assistant.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "1a:c4:04:6e:29:02";
|
||||
ip-address = "10.42.97.25";
|
||||
server-hostname = "deconz.cloonar.com";
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.101.100 - 10.42.101.240";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.101.0/24";
|
||||
interface = "infrastructure";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.101.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name";
|
||||
data = "cloonar.com";
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
data = "10.42.101.1";
|
||||
}
|
||||
{
|
||||
name = "capwap-ac-v4";
|
||||
code = 138;
|
||||
data = "10.42.97.2";
|
||||
}
|
||||
];
|
||||
reservations = [
|
||||
];
|
||||
}
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.99.100 - 10.42.99.240";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.99.0/24";
|
||||
interface = "multimedia";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.99.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name";
|
||||
data = "cloonar.multimedia";
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
data = "10.42.99.1";
|
||||
}
|
||||
];
|
||||
reservations = [
|
||||
{
|
||||
hw-address = "c4:a7:2b:c7:ea:30";
|
||||
ip-address = "10.42.99.10";
|
||||
hostname = "metz.cloonar.multimedia";
|
||||
}
|
||||
{
|
||||
hw-address = "f0:2f:9e:d4:3b:21";
|
||||
ip-address = "10.42.99.11";
|
||||
hostname = "firetv-living";
|
||||
}
|
||||
{
|
||||
hw-address = "bc:33:29:ed:24:f0";
|
||||
ip-address = "10.42.99.12";
|
||||
hostname = "ps5";
|
||||
}
|
||||
{
|
||||
hw-address = "e4:2a:ac:32:3f:79";
|
||||
ip-address = "10.42.99.13";
|
||||
hostname = "xbox";
|
||||
}
|
||||
{
|
||||
hw-address = "98:b6:e9:b6:ef:f4";
|
||||
ip-address = "10.42.99.14";
|
||||
hostname = "switch";
|
||||
}
|
||||
{
|
||||
hw-address = "f0:2f:9e:c1:74:72";
|
||||
ip-address = "10.42.99.21";
|
||||
hostname = "firetv-bedroom";
|
||||
}
|
||||
{
|
||||
hw-address = "30:05:5c:56:62:37";
|
||||
ip-address = "10.42.99.100";
|
||||
server-hostname = "brn30055c566237";
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.254.10 - 10.42.254.254";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.254.0/24";
|
||||
interface = "guest";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.254.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
data = "9.9.9.9";
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.100.100 - 10.42.100.240";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.100.0/24";
|
||||
interface = "smart";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.100.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name";
|
||||
data = "cloonar.smart";
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
data = "10.42.100.1";
|
||||
}
|
||||
];
|
||||
reservations = [
|
||||
{
|
||||
hw-address = "fc:ee:28:03:63:e9";
|
||||
ip-address = "10.42.100.148";
|
||||
server-hostname = "k1c";
|
||||
}
|
||||
{
|
||||
hw-address = "cc:50:e3:bc:27:64";
|
||||
ip-address = "10.42.100.112";
|
||||
server-hostname = "Nuki_Bridge_1A753F72";
|
||||
}
|
||||
|
||||
{
|
||||
hw-address = "34:6f:24:f3:af:ad";
|
||||
ip-address = "10.42.100.137";
|
||||
server-hostname = "daikin86604";
|
||||
}
|
||||
{
|
||||
hw-address = "34:6f:24:c1:f8:54";
|
||||
ip-address = "10.42.100.139";
|
||||
server-hostname = "daikin53800";
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
valid-lifetime = 4000;
|
||||
};
|
||||
};
|
||||
}
|
||||
83
hosts/fw-new/modules/firefox-sync.nix
Normal file
83
hosts/fw-new/modules/firefox-sync.nix
Normal file
@@ -0,0 +1,83 @@
|
||||
{ config, pkgs, ... }:
|
||||
let
|
||||
domain = "sync.cloonar.com";
|
||||
in {
|
||||
sops.secrets.firefox-sync = { };
|
||||
|
||||
security.acme.certs."${domain}" = {
|
||||
group = "nginx";
|
||||
};
|
||||
|
||||
containers."firefox-sync" = {
|
||||
autoStart = true;
|
||||
ephemeral = false; # because of ssh key
|
||||
privateNetwork = true;
|
||||
hostBridge = "server";
|
||||
hostAddress = "10.42.97.1";
|
||||
localAddress = "10.42.97.51/24";
|
||||
bindMounts = {
|
||||
"/run/secrets/firefox-sync" = {
|
||||
hostPath = "/run/secrets/firefox-sync";
|
||||
isReadOnly = true;
|
||||
};
|
||||
"/var/lib/acme/${domain}/" = {
|
||||
hostPath = "${config.security.acme.certs.${domain}.directory}";
|
||||
isReadOnly = true;
|
||||
};
|
||||
};
|
||||
config = { lib, config, pkgs, ... }: {
|
||||
networking = {
|
||||
hostName = "firefox-sync";
|
||||
useHostResolvConf = false;
|
||||
defaultGateway = {
|
||||
address = "10.42.97.1";
|
||||
interface = "eth0";
|
||||
};
|
||||
firewall.enable = false;
|
||||
nameservers = [ "10.42.97.1" ];
|
||||
};
|
||||
|
||||
services.nginx.enable = true;
|
||||
services.nginx.virtualHosts."${domain}" = {
|
||||
sslCertificate = "/var/lib/acme/${domain}/fullchain.pem";
|
||||
sslCertificateKey = "/var/lib/acme/${domain}/key.pem";
|
||||
sslTrustedCertificate = "/var/lib/acme/${domain}/chain.pem";
|
||||
listen = [
|
||||
{
|
||||
addr = "0.0.0.0";
|
||||
ssl = true;
|
||||
port = 5000;
|
||||
}
|
||||
];
|
||||
locations."/" = {
|
||||
proxyPass = "http://localhost:5001/";
|
||||
recommendedProxySettings = true;
|
||||
};
|
||||
};
|
||||
|
||||
services.mysql.package = pkgs.mariadb;
|
||||
services.firefox-syncserver = {
|
||||
enable = true;
|
||||
singleNode = {
|
||||
enable = true;
|
||||
enableNginx = false;
|
||||
hostname = domain;
|
||||
};
|
||||
settings = {
|
||||
port = 5001;
|
||||
tokenserver.enable = true;
|
||||
};
|
||||
secrets = "/run/secrets/firefox-sync";
|
||||
logLevel = "trace";
|
||||
};
|
||||
|
||||
services.openssh.enable = true;
|
||||
users.users.root.openssh.authorizedKeys.keys = [
|
||||
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDN/2SAFm50kraB1fepAizox/QRXxB7WbqVbH+5OPalDT47VIJGNKOKhixQoqhABHxEoLxdf/C83wxlCVlPV9poLfDgVkA3Lyt5r3tSFQ6QjjOJAgchWamMsxxyGBedhKvhiEzcr/Lxytnoz3kjDG8fqQJwEpdqMmJoMUfyL2Rqp16u+FQ7d5aJtwO8EUqovhMaNO7rggjPpV/uMOg+tBxxmscliN7DLuP4EMTA/FwXVzcFNbOx3K9BdpMRAaSJt4SWcJO2cS2KHA5n/H+PQI7nz5KN3Yr/upJN5fROhi/SHvK39QOx12Pv7FCuWlc+oR68vLaoCKYhnkl3DnCfc7A7"
|
||||
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIRQuPqH5fdX3KEw7DXzWEdO3AlUn1oSmtJtHB71ICoH Generated By Termius"
|
||||
];
|
||||
|
||||
system.stateVersion = "23.05";
|
||||
};
|
||||
};
|
||||
}
|
||||
160
hosts/fw-new/modules/firewall.nix
Normal file
160
hosts/fw-new/modules/firewall.nix
Normal file
@@ -0,0 +1,160 @@
|
||||
{ pkgs, ... }: {
|
||||
networking = {
|
||||
firewall.checkReversePath = false;
|
||||
nat.enable = false;
|
||||
nftables = {
|
||||
enable = true;
|
||||
tables = {
|
||||
"cloonar-fw" = {
|
||||
family = "inet";
|
||||
content = ''
|
||||
chain output {
|
||||
type filter hook output priority 100; policy accept;
|
||||
}
|
||||
|
||||
chain rpfilter {
|
||||
type filter hook prerouting priority mangle + 10; policy drop;
|
||||
meta nfproto ipv4 udp sport . udp dport { 68 . 67, 67 . 68 } accept comment "DHCPv4 client/server"
|
||||
fib saddr . mark . iif oif exists accept
|
||||
}
|
||||
|
||||
chain input {
|
||||
type filter hook input priority filter; policy drop;
|
||||
iifname "lo" accept comment "trusted interfaces"
|
||||
iifname "lan" counter accept comment "Spice"
|
||||
ct state vmap { invalid : drop, established : accept, related : accept, new : jump input-allow, untracked : jump input-allow }
|
||||
tcp flags syn / fin,syn,rst,ack log prefix "refused connection: " level info
|
||||
}
|
||||
|
||||
chain input-allow {
|
||||
udp dport != { 53, 5353 } ct state new limit rate over 1/second burst 10 packets drop comment "rate limit for new connections"
|
||||
iifname lo accept
|
||||
iifname "wan" udp dport 51820 counter accept comment "Wireguard traffic"
|
||||
iifname "wan" tcp dport 9273 counter accept comment "Prometheus traffic"
|
||||
iifname "lan" tcp dport 5931 counter accept comment "Spice"
|
||||
iifname { "server", "vserver", "vm-*", "lan", "wg_cloonar" } counter accept comment "allow trusted to router"
|
||||
iifname { "multimedia", "smart", "infrastructure", "podman0", "setup" } udp dport { 53, 5353 } counter accept comment "DNS"
|
||||
iifname { "wan", "multimedia" } icmp type { echo-request, destination-unreachable, time-exceeded } counter accept comment "Allow select ICMP"
|
||||
|
||||
# Accept mDNS for avahi reflection
|
||||
iifname "server" ip saddr 10.42.97.20/32 tcp dport { llmnr } counter accept
|
||||
iifname "server" ip saddr 10.42.97.20/32 udp dport { mdns, llmnr } counter accept
|
||||
|
||||
# Allow all returning traffic
|
||||
ct state { established, related } counter accept
|
||||
|
||||
# Allow returning traffic from wrwks and drop everthing else
|
||||
iifname "wrwks" ct state { established, related } counter accept
|
||||
iifname "wrwks" drop
|
||||
|
||||
# Allow returning traffic from wg_epicenter and drop everthing else
|
||||
iifname "wg_epicenter" ct state { established, related } counter accept
|
||||
iifname "wg_epicenter" drop
|
||||
|
||||
# Allow returning traffic from wg_ghetto_at and drop everthing else
|
||||
iifname "wg_ghetto_at" ct state { established, related } counter accept
|
||||
iifname "wg_ghetto_at" drop
|
||||
|
||||
# Allow returning traffic from wan and drop everthing else
|
||||
iifname "wan" ct state { established, related } accept comment "Allow established traffic"
|
||||
iifname "wan" icmp type { echo-request, destination-unreachable, time-exceeded } counter accept comment "Allow select ICMP"
|
||||
iifname "wan" counter drop comment "Drop all other unsolicited traffic from wan"
|
||||
|
||||
limit rate 60/minute burst 100 packets log prefix "Input - Drop: " comment "Log any unmatched traffic"
|
||||
}
|
||||
|
||||
chain forward {
|
||||
type filter hook forward priority filter; policy drop;
|
||||
|
||||
iifname "wg_cloonar" counter accept comment "test wireguard"
|
||||
|
||||
iifname "wg_cloonar" oifname lo counter accept comment "wireguard to server"
|
||||
|
||||
# enable flow offloading for better throughput
|
||||
# ip protocol { tcp, udp } flow offload @f
|
||||
|
||||
# broadcast
|
||||
iifname "server" oifname { "lan", "multimedia" } udp dport { 9 } counter accept comment "wakeonlan"
|
||||
|
||||
# multimedia airplay
|
||||
iifname "multimedia" oifname { "lan" } counter accept
|
||||
iifname "multimedia" oifname "server" tcp dport { 1704, 1705 } counter accept
|
||||
iifname "lan" oifname "server" udp dport { 5000, 5353, 6001 - 6011 } counter accept
|
||||
# avahi
|
||||
iifname "server" ip saddr 10.42.97.20/32 oifname { "lan" } counter accept
|
||||
|
||||
# smart home coap
|
||||
iifname "smart" oifname "server" ip daddr 10.42.97.20/32 udp dport { 5683 } counter accept
|
||||
iifname "smart" oifname "server" ip daddr 10.42.97.20/32 tcp dport { 1883 } counter accept
|
||||
|
||||
# Forward to git server
|
||||
oifname "server" ip daddr 10.42.97.50 tcp dport { 22 } counter accept
|
||||
oifname "server" ip daddr 10.42.97.5 tcp dport { 80, 443 } counter accept
|
||||
|
||||
# lan and vpn to any
|
||||
iifname { "lan", "server", "vserver", "wg_cloonar" } oifname { "lan", "vb-*", "vm-*", "server", "vserver", "infrastructure", "multimedia", "smart", "wg_cloonar", "guest", "setup" } counter accept
|
||||
iifname { "lan", "server", "wg_cloonar" } oifname { "wrwks", "wg_epicenter", "wg_ghetto_at" } counter accept
|
||||
iifname { "infrastructure", "setup" } oifname { "server", "vserver" } counter accept
|
||||
iifname { "lan", "wan" } udp dport { 8211, 27015 } counter accept comment "palworld"
|
||||
|
||||
# accept palword server
|
||||
iifname { "wan", "lan" } oifname "podman0" udp dport { 8211, 27015 } counter accept comment "palworld"
|
||||
# forward to ark server
|
||||
oifname "server" ip daddr 10.42.97.201 tcp dport { 27020 } counter accept comment "ark survival evolved"
|
||||
oifname "server" ip daddr 10.42.97.201 udp dport { 7777, 7778, 27015 } counter accept comment "ark survival evolved"
|
||||
|
||||
# firefox-sync
|
||||
oifname "server" ip daddr 10.42.97.51 tcp dport { 5000 } counter accept comment "firefox-sync"
|
||||
|
||||
# allow all established, related
|
||||
ct state { established, related } accept comment "Allow established traffic"
|
||||
|
||||
# Allow trusted network WAN access
|
||||
iifname {
|
||||
"lan",
|
||||
"infrastructure",
|
||||
"server",
|
||||
"vserver",
|
||||
"multimedia",
|
||||
"smart",
|
||||
"wg_cloonar",
|
||||
"podman*",
|
||||
"guest",
|
||||
"setup",
|
||||
"vb-*",
|
||||
"vm-*",
|
||||
} oifname {
|
||||
"wan",
|
||||
} counter accept comment "Allow trusted LAN to WAN"
|
||||
|
||||
limit rate 60/minute burst 100 packets log prefix "Forward - Drop: " comment "Log any unmatched traffic"
|
||||
}
|
||||
'';
|
||||
};
|
||||
"cloonar-nat" = {
|
||||
family = "ip";
|
||||
content = ''
|
||||
chain prerouting {
|
||||
type nat hook prerouting priority filter; policy accept;
|
||||
iifname "server" ip daddr 10.42.96.255 udp dport { 9 } dnat to 10.42.96.255
|
||||
iifname "wan" tcp dport { 22 } dnat to 10.42.97.50
|
||||
iifname "wan" tcp dport { 80, 443 } dnat to 10.42.97.5
|
||||
iifname "wan" tcp dport { 5000 } dnat to 10.42.97.51
|
||||
iifname { "wan", "lan" } udp dport { 7777, 7778, 27015 } dnat to 10.42.97.201
|
||||
iifname { "wan", "lan" } tcp dport { 27020 } dnat to 10.42.97.201
|
||||
}
|
||||
|
||||
# Setup NAT masquerading on external interfaces
|
||||
chain postrouting {
|
||||
type nat hook postrouting priority filter; policy accept;
|
||||
oifname { "wan", "wg_cloonar", "wrwks", "wg_epicenter", "wg_ghetto_at" } masquerade
|
||||
iifname { "wan", "wg_cloonar" } ip daddr 10.42.97.50 masquerade
|
||||
iifname { "wan", "wg_cloonar" } ip daddr 10.42.97.51 masquerade
|
||||
iifname { "wan", "wg_cloonar" } ip daddr 10.42.97.201 masquerade
|
||||
}
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
77
hosts/fw-new/modules/foundry-vtt.nix
Normal file
77
hosts/fw-new/modules/foundry-vtt.nix
Normal file
@@ -0,0 +1,77 @@
|
||||
{ config, pkgs, ... }:
|
||||
let
|
||||
foundry-vtt = pkgs.callPackage ../pkgs/foundry-vtt {};
|
||||
cids = import ../modules/staticids.nix;
|
||||
in {
|
||||
users.users.foundry-vtt = {
|
||||
isSystemUser = true;
|
||||
uid = cids.uids.foundry-vtt;
|
||||
home = "/var/lib/foundry-vtt";
|
||||
group = "foundry-vtt";
|
||||
createHome = true;
|
||||
};
|
||||
|
||||
users.groups.foundry-vtt = {
|
||||
gid = cids.gids.foundry-vtt;
|
||||
};
|
||||
|
||||
|
||||
containers.foundry-vtt = {
|
||||
autoStart = true;
|
||||
ephemeral = true;
|
||||
privateNetwork = true;
|
||||
hostBridge = "server";
|
||||
hostAddress = "10.42.97.1";
|
||||
localAddress = "10.42.97.21/24";
|
||||
bindMounts = {
|
||||
"/var/lib/foundry-vtt" = {
|
||||
hostPath = "/var/lib/foundry-vtt";
|
||||
isReadOnly = false;
|
||||
};
|
||||
};
|
||||
config = { lib, config, pkgs, ... }: {
|
||||
networking = {
|
||||
hostName = "foundry-vtt";
|
||||
useHostResolvConf = false;
|
||||
defaultGateway = {
|
||||
address = "10.42.97.1";
|
||||
interface = "eth0";
|
||||
};
|
||||
nameservers = [ "10.42.97.1" ];
|
||||
};
|
||||
systemd.services.foundry-vtt = {
|
||||
description = "Foundry VTT Server";
|
||||
after = [ "network.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
environment = {
|
||||
NODE_ENV = "production";
|
||||
};
|
||||
serviceConfig = {
|
||||
ExecStart = "${pkgs.nodejs}/bin/node ${foundry-vtt}/share/foundry-vtt/resources/app/main.js --dataPath=${config.users.users.foundry-vtt.home}";
|
||||
Restart = "always";
|
||||
User = "foundry-vtt";
|
||||
WorkingDirectory = "${config.users.users.foundry-vtt.home}";
|
||||
};
|
||||
};
|
||||
|
||||
users.users.foundry-vtt = {
|
||||
isSystemUser = true;
|
||||
uid = cids.uids.foundry-vtt;
|
||||
home = "/var/lib/foundry-vtt";
|
||||
group = "foundry-vtt";
|
||||
};
|
||||
|
||||
users.groups.foundry-vtt = {
|
||||
gid = cids.gids.foundry-vtt;
|
||||
};
|
||||
|
||||
networking.firewall = {
|
||||
enable = true;
|
||||
allowedTCPPorts = [ 30000 ];
|
||||
};
|
||||
|
||||
|
||||
system.stateVersion = "24.05";
|
||||
};
|
||||
};
|
||||
}
|
||||
30
hosts/fw-new/modules/fwmetrics.nix
Normal file
30
hosts/fw-new/modules/fwmetrics.nix
Normal file
@@ -0,0 +1,30 @@
|
||||
{ config, pkgs, ... }:
|
||||
let
|
||||
configure_prom = builtins.toFile "prometheus.yml" ''
|
||||
scrape_configs:
|
||||
- job_name: 'server'
|
||||
stream_parse: true
|
||||
static_configs:
|
||||
- targets:
|
||||
- ${config.networking.hostName}:9100
|
||||
'';
|
||||
in {
|
||||
sops.secrets.victoria-agent-env = {
|
||||
sopsFile = ../utils/modules/victoriametrics/secrets.yaml;
|
||||
};
|
||||
|
||||
services.prometheus.exporters.node.enable = true;
|
||||
|
||||
systemd.services.export-fw-to-prometheus = {
|
||||
path = with pkgs; [victoriametrics];
|
||||
enable = true;
|
||||
after = ["network-online.target"];
|
||||
wants = ["network-online.target"];
|
||||
wantedBy = ["multi-user.target"];
|
||||
script = "vmagent -promscrape.config=${configure_prom} -envflag.enable -remoteWrite.url=https://victoria-server.cloonar.com/api/v1/write";
|
||||
|
||||
serviceConfig = {
|
||||
EnvironmentFile=config.sops.secrets.victoria-agent-env.path;
|
||||
};
|
||||
};
|
||||
}
|
||||
233
hosts/fw-new/modules/gitea-vm.nix
Normal file
233
hosts/fw-new/modules/gitea-vm.nix
Normal file
@@ -0,0 +1,233 @@
|
||||
{ lib, nixpkgs, pkgs, ... }: let
|
||||
# hostname = "git-02";
|
||||
# json = pkgs.formats.json { };
|
||||
runners = ["git-runner-1" "git-runner-2"];
|
||||
indexedRunners = lib.lists.imap1 (i: v: { name=v; value=i; }) runners;
|
||||
in {
|
||||
microvm.vms = lib.mapAttrs (runner: idx: {
|
||||
config = {
|
||||
microvm = {
|
||||
mem = 4048;
|
||||
shares = [
|
||||
{
|
||||
source = "/nix/store";
|
||||
mountPoint = "/nix/.ro-store";
|
||||
tag = "ro-store";
|
||||
proto = "virtiofs";
|
||||
}
|
||||
{
|
||||
source = "/run/secrets";
|
||||
mountPoint = "/run/secrets";
|
||||
tag = "ro-token";
|
||||
proto = "virtiofs";
|
||||
}
|
||||
];
|
||||
volumes = [
|
||||
{
|
||||
image = "rootfs.img";
|
||||
mountPoint = "/";
|
||||
size = 51200;
|
||||
}
|
||||
];
|
||||
interfaces = [
|
||||
{
|
||||
type = "tap";
|
||||
id = "vm-${runner}";
|
||||
mac = "02:00:00:00:00:0${toString idx}";
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
networking.hostName = runner;
|
||||
|
||||
virtualisation.podman.enable = true;
|
||||
|
||||
services.gitea-actions-runner.instances.${runner} = {
|
||||
enable = true;
|
||||
url = "https://git.cloonar.com";
|
||||
name = runner;
|
||||
tokenFile = "/run/secrets/gitea-runner-token";
|
||||
labels = [
|
||||
"ubuntu-latest:docker://shivammathur/node:latest"
|
||||
];
|
||||
settings = {
|
||||
container = {
|
||||
network = "podman";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
services.openssh.enable = true;
|
||||
users.users.root.openssh.authorizedKeys.keys = [
|
||||
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDN/2SAFm50kraB1fepAizox/QRXxB7WbqVbH+5OPalDT47VIJGNKOKhixQoqhABHxEoLxdf/C83wxlCVlPV9poLfDgVkA3Lyt5r3tSFQ6QjjOJAgchWamMsxxyGBedhKvhiEzcr/Lxytnoz3kjDG8fqQJwEpdqMmJoMUfyL2Rqp16u+FQ7d5aJtwO8EUqovhMaNO7rggjPpV/uMOg+tBxxmscliN7DLuP4EMTA/FwXVzcFNbOx3K9BdpMRAaSJt4SWcJO2cS2KHA5n/H+PQI7nz5KN3Yr/upJN5fROhi/SHvK39QOx12Pv7FCuWlc+oR68vLaoCKYhnkl3DnCfc7A7"
|
||||
];
|
||||
|
||||
system.stateVersion = "22.05";
|
||||
};
|
||||
}) (lib.listToAttrs (lib.lists.imap1 (i: v: { name=v; value=i; }) runners));
|
||||
|
||||
# microvm.vms = {
|
||||
# gitea = {
|
||||
# config = {
|
||||
# microvm = {
|
||||
# hypervisor = "cloud-hypervisor";
|
||||
# shares = [
|
||||
# {
|
||||
# source = "/nix/store";
|
||||
# mountPoint = "/nix/.ro-store";
|
||||
# tag = "ro-store";
|
||||
# proto = "virtiofs";
|
||||
# }
|
||||
# {
|
||||
# source = "/var/lib/acme/git.cloonar.com";
|
||||
# mountPoint = "/var/lib/acme/${hostname}.cloonar.com";
|
||||
# tag = "ro-cert";
|
||||
# proto = "virtiofs";
|
||||
# }
|
||||
# ];
|
||||
# interfaces = [
|
||||
# {
|
||||
# type = "tap";
|
||||
# id = "vm-${hostname}";
|
||||
# mac = "02:00:00:00:00:01";
|
||||
# }
|
||||
# ];
|
||||
# };
|
||||
#
|
||||
# imports = [
|
||||
# ../fleet.nix
|
||||
# ];
|
||||
#
|
||||
# environment.systemPackages = with pkgs; [
|
||||
# vim # my preferred editor
|
||||
# ];
|
||||
#
|
||||
# networking = {
|
||||
# hostName = hostname;
|
||||
# firewall = {
|
||||
# enable = true;
|
||||
# allowedTCPPorts = [ 22 80 443 ];
|
||||
# };
|
||||
# };
|
||||
#
|
||||
# services.nginx.enable = true;
|
||||
# services.nginx.virtualHosts."${hostname}.cloonar.com" = {
|
||||
# sslCertificate = "/var/lib/acme/${hostname}.cloonar.com/fullchain.pem";
|
||||
# sslCertificateKey = "/var/lib/acme/${hostname}.cloonar.com/key.pem";
|
||||
# sslTrustedCertificate = "/var/lib/acme/${hostname}.cloonar.com/chain.pem";
|
||||
# forceSSL = true;
|
||||
# locations."/" = {
|
||||
# proxyPass = "http://localhost:3001/";
|
||||
# };
|
||||
# };
|
||||
#
|
||||
# services.gitea = {
|
||||
# enable = true;
|
||||
# appName = "Cloonar Gitea server"; # Give the site a name
|
||||
# settings = {
|
||||
# server = {
|
||||
# ROOT_URL = "https://${hostname}.cloonar.com/";
|
||||
# HTTP_PORT = 3001;
|
||||
# DOMAIN = "${hostname}.cloonar.com";
|
||||
# };
|
||||
# openid = {
|
||||
# ENABLE_OPENID_SIGNIN = true;
|
||||
# ENABLE_OPENID_SIGNUP = true;
|
||||
# WHITELISTED_URIS = "auth.cloonar.com";
|
||||
# };
|
||||
# service = {
|
||||
# DISABLE_REGISTRATION = true;
|
||||
# ALLOW_ONLY_EXTERNAL_REGISTRATION = true;
|
||||
# SHOW_REGISTRATION_BUTTON = false;
|
||||
# };
|
||||
# actions.ENABLED=true;
|
||||
# };
|
||||
# };
|
||||
#
|
||||
# services.openssh.enable = true;
|
||||
# users.users.root.openssh.authorizedKeys.keys = [
|
||||
# "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDN/2SAFm50kraB1fepAizox/QRXxB7WbqVbH+5OPalDT47VIJGNKOKhixQoqhABHxEoLxdf/C83wxlCVlPV9poLfDgVkA3Lyt5r3tSFQ6QjjOJAgchWamMsxxyGBedhKvhiEzcr/Lxytnoz3kjDG8fqQJwEpdqMmJoMUfyL2Rqp16u+FQ7d5aJtwO8EUqovhMaNO7rggjPpV/uMOg+tBxxmscliN7DLuP4EMTA/FwXVzcFNbOx3K9BdpMRAaSJt4SWcJO2cS2KHA5n/H+PQI7nz5KN3Yr/upJN5fROhi/SHvK39QOx12Pv7FCuWlc+oR68vLaoCKYhnkl3DnCfc7A7"
|
||||
# "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIRQuPqH5fdX3KEw7DXzWEdO3AlUn1oSmtJtHB71ICoH Generated By Termius"
|
||||
# ];
|
||||
#
|
||||
# system.stateVersion = "22.05";
|
||||
# };
|
||||
# };
|
||||
#
|
||||
# gitea-runner-1 = {
|
||||
# config = {
|
||||
# microvm = {
|
||||
# mem = 4048;
|
||||
# shares = [
|
||||
# {
|
||||
# source = "/nix/store";
|
||||
# mountPoint = "/nix/.ro-store";
|
||||
# tag = "ro-store";
|
||||
# proto = "virtiofs";
|
||||
# }
|
||||
# {
|
||||
# source = "/run/secrets";
|
||||
# mountPoint = "/run/secrets";
|
||||
# tag = "ro-token";
|
||||
# proto = "virtiofs";
|
||||
# }
|
||||
# ];
|
||||
# volumes = [
|
||||
# {
|
||||
# image = "rootfs.img";
|
||||
# mountPoint = "/";
|
||||
# size = 102400;
|
||||
# }
|
||||
# ];
|
||||
# interfaces = [
|
||||
# {
|
||||
# type = "tap";
|
||||
# id = "vm-gitea-runner-1";
|
||||
# mac = "02:00:00:00:00:02";
|
||||
# }
|
||||
# ];
|
||||
# };
|
||||
#
|
||||
# environment.systemPackages = with pkgs; [
|
||||
# vim # my preferred editor
|
||||
# ];
|
||||
#
|
||||
# networking.hostName = "gitea-runner";
|
||||
#
|
||||
# virtualisation.podman.enable = true;
|
||||
#
|
||||
# services.gitea-actions-runner.instances.vm = {
|
||||
# enable = true;
|
||||
# url = "https://git.cloonar.com";
|
||||
# name = "vm";
|
||||
# tokenFile = "/run/secrets/gitea-runner-token";
|
||||
# labels = [
|
||||
# "ubuntu-latest:docker://shivammathur/node:latest"
|
||||
# ];
|
||||
# settings = {
|
||||
# container = {
|
||||
# network = "podman";
|
||||
# };
|
||||
# };
|
||||
# };
|
||||
#
|
||||
# services.openssh.enable = true;
|
||||
# users.users.root.openssh.authorizedKeys.keys = [
|
||||
# "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDN/2SAFm50kraB1fepAizox/QRXxB7WbqVbH+5OPalDT47VIJGNKOKhixQoqhABHxEoLxdf/C83wxlCVlPV9poLfDgVkA3Lyt5r3tSFQ6QjjOJAgchWamMsxxyGBedhKvhiEzcr/Lxytnoz3kjDG8fqQJwEpdqMmJoMUfyL2Rqp16u+FQ7d5aJtwO8EUqovhMaNO7rggjPpV/uMOg+tBxxmscliN7DLuP4EMTA/FwXVzcFNbOx3K9BdpMRAaSJt4SWcJO2cS2KHA5n/H+PQI7nz5KN3Yr/upJN5fROhi/SHvK39QOx12Pv7FCuWlc+oR68vLaoCKYhnkl3DnCfc7A7"
|
||||
# "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIRQuPqH5fdX3KEw7DXzWEdO3AlUn1oSmtJtHB71ICoH Generated By Termius"
|
||||
# ];
|
||||
#
|
||||
# system.stateVersion = "22.05";
|
||||
# };
|
||||
# };
|
||||
# };
|
||||
|
||||
sops.secrets.gitea-runner-token = {};
|
||||
|
||||
environment = {
|
||||
systemPackages = [
|
||||
pkgs.qemu
|
||||
pkgs.quickemu
|
||||
];
|
||||
};
|
||||
}
|
||||
127
hosts/fw-new/modules/gitea.nix
Normal file
127
hosts/fw-new/modules/gitea.nix
Normal file
@@ -0,0 +1,127 @@
|
||||
{ config, pkgs, ... }:
|
||||
let
|
||||
cids = import ../modules/staticids.nix;
|
||||
domain = "git.cloonar.com";
|
||||
|
||||
user = {
|
||||
isSystemUser = true;
|
||||
uid = cids.uids.gitea;
|
||||
group = "gitea";
|
||||
home = "/var/lib/gitea";
|
||||
createHome = true;
|
||||
};
|
||||
group = {
|
||||
gid = cids.gids.gitea;
|
||||
};
|
||||
in
|
||||
{
|
||||
users.users.gitea = user;
|
||||
users.groups.gitea = group;
|
||||
|
||||
security.acme.certs."${domain}" = {
|
||||
group = "nginx";
|
||||
};
|
||||
|
||||
containers.git = {
|
||||
autoStart = true;
|
||||
ephemeral = false; # because of ssh key
|
||||
privateNetwork = true;
|
||||
hostBridge = "server";
|
||||
hostAddress = "10.42.97.1";
|
||||
localAddress = "10.42.97.50/24";
|
||||
bindMounts = {
|
||||
"/var/lib/gitea" = {
|
||||
hostPath = "/var/lib/gitea/";
|
||||
isReadOnly = false;
|
||||
};
|
||||
"/var/lib/acme/gitea/" = {
|
||||
hostPath = config.security.acme.certs.${domain}.directory;
|
||||
isReadOnly = true;
|
||||
};
|
||||
"/run/secrets/gitea-mailer-password" = {
|
||||
hostPath = config.sops.secrets.gitea-mailer-password.path;
|
||||
};
|
||||
};
|
||||
config = { lib, config, pkgs, ... }: {
|
||||
imports = [
|
||||
../fleet.nix
|
||||
];
|
||||
|
||||
environment.systemPackages = with pkgs; [
|
||||
vim # my preferred editor
|
||||
];
|
||||
|
||||
networking = {
|
||||
hostName = "git";
|
||||
useHostResolvConf = false;
|
||||
defaultGateway = {
|
||||
address = "10.42.96.1";
|
||||
interface = "eth0";
|
||||
};
|
||||
firewall.enable = false;
|
||||
nameservers = [ "10.42.97.1" ];
|
||||
};
|
||||
|
||||
services.nginx.enable = true;
|
||||
services.nginx.virtualHosts."${domain}" = {
|
||||
sslCertificate = "/var/lib/acme/gitea/fullchain.pem";
|
||||
sslCertificateKey = "/var/lib/acme/gitea/key.pem";
|
||||
sslTrustedCertificate = "/var/lib/acme/gitea/chain.pem";
|
||||
forceSSL = true;
|
||||
locations."/" = {
|
||||
proxyPass = "http://localhost:3001/";
|
||||
};
|
||||
};
|
||||
|
||||
services.gitea = {
|
||||
enable = true;
|
||||
appName = "Cloonar Gitea server"; # Give the site a name
|
||||
mailerPasswordFile = "/run/secrets/gitea-mailer-password";
|
||||
settings = {
|
||||
server = {
|
||||
ROOT_URL = "https://${domain}/";
|
||||
HTTP_PORT = 3001;
|
||||
DOMAIN = domain;
|
||||
};
|
||||
openid = {
|
||||
ENABLE_OPENID_SIGNIN = false;
|
||||
ENABLE_OPENID_SIGNUP = true;
|
||||
WHITELISTED_URIS = "auth.cloonar.com";
|
||||
};
|
||||
service = {
|
||||
DISABLE_REGISTRATION = false;
|
||||
ALLOW_ONLY_EXTERNAL_REGISTRATION = true;
|
||||
SHOW_REGISTRATION_BUTTON = false;
|
||||
ENABLE_NOTIFY_MAIL = true;
|
||||
};
|
||||
mailer = {
|
||||
ENABLED = true;
|
||||
FROM = "Gitea Cloonar <gitea@cloonar.com>";
|
||||
PROTOCOL = "smtp+starttls";
|
||||
SMTP_ADDR = "mail.cloonar.com";
|
||||
SMTP_PORT = 587;
|
||||
USER = "gitea@cloonar.com";
|
||||
};
|
||||
actions.ENABLED=true;
|
||||
};
|
||||
};
|
||||
|
||||
services.openssh.enable = true;
|
||||
users.users.root.openssh.authorizedKeys.keys = [
|
||||
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDN/2SAFm50kraB1fepAizox/QRXxB7WbqVbH+5OPalDT47VIJGNKOKhixQoqhABHxEoLxdf/C83wxlCVlPV9poLfDgVkA3Lyt5r3tSFQ6QjjOJAgchWamMsxxyGBedhKvhiEzcr/Lxytnoz3kjDG8fqQJwEpdqMmJoMUfyL2Rqp16u+FQ7d5aJtwO8EUqovhMaNO7rggjPpV/uMOg+tBxxmscliN7DLuP4EMTA/FwXVzcFNbOx3K9BdpMRAaSJt4SWcJO2cS2KHA5n/H+PQI7nz5KN3Yr/upJN5fROhi/SHvK39QOx12Pv7FCuWlc+oR68vLaoCKYhnkl3DnCfc7A7"
|
||||
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIRQuPqH5fdX3KEw7DXzWEdO3AlUn1oSmtJtHB71ICoH Generated By Termius"
|
||||
];
|
||||
|
||||
users.users.gitea = user;
|
||||
users.groups.gitea = group;
|
||||
|
||||
system.stateVersion = "23.05";
|
||||
};
|
||||
};
|
||||
|
||||
sops.secrets.gitea-runner = {};
|
||||
sops.secrets.gitea-mailer-password = {
|
||||
owner = "gitea";
|
||||
restartUnits = [ "container@git.service" ];
|
||||
};
|
||||
}
|
||||
60
hosts/fw-new/modules/home-assistant/3dprinter.nix
Normal file
60
hosts/fw-new/modules/home-assistant/3dprinter.nix
Normal file
@@ -0,0 +1,60 @@
|
||||
{ config, ... }: {
|
||||
services.home-assistant.config = {
|
||||
sensor = [
|
||||
{
|
||||
platform = "rest";
|
||||
name = "creality extruder";
|
||||
resource = "http://k1c-63e9.cloonar.smart:7125/printer/objects/query?extruder";
|
||||
value_template = "OK";
|
||||
json_attributes_path = "$.result.status.extruder";
|
||||
json_attributes = [
|
||||
"pressure_advance"
|
||||
"power"
|
||||
"target"
|
||||
"temperature"
|
||||
];
|
||||
}
|
||||
{
|
||||
platform = "rest";
|
||||
name = "creality print stats";
|
||||
resource = "http://k1c-63e9.cloonar.smart:7125/printer/objects/query?print_stats";
|
||||
value_template = "OK";
|
||||
json_attributes_path = "$.result.status.print_stats";
|
||||
json_attributes = [
|
||||
"filename"
|
||||
"total_duration"
|
||||
"print_duration"
|
||||
"filament_used"
|
||||
"state"
|
||||
"message"
|
||||
];
|
||||
}
|
||||
{
|
||||
platform = "template";
|
||||
sensors = {
|
||||
crality_hotend_actual = {
|
||||
friendly_name = "Hot End Actual";
|
||||
value_template = "{{ state_attr('sensor.creality_extruder', 'temperature') | float | round(1) }}";
|
||||
device_class = "temperature";
|
||||
unit_of_measurement = "°C";
|
||||
};
|
||||
};
|
||||
}
|
||||
];
|
||||
"automation 3d printer state" = {
|
||||
alias = "3d printer state change";
|
||||
trigger = [
|
||||
{
|
||||
platform = "template";
|
||||
value_template = "{{ state_attr('sensor.creality_print_stats','state') == 'standby' }}";
|
||||
}
|
||||
];
|
||||
action = {
|
||||
service = "notify.mobile_app_dominiks_iphone";
|
||||
data = {
|
||||
message = "Printer status changed to {{ state_attr('sensor.creality_print_stats','state') }}";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
120
hosts/fw-new/modules/home-assistant/ac.nix
Normal file
120
hosts/fw-new/modules/home-assistant/ac.nix
Normal file
@@ -0,0 +1,120 @@
|
||||
{ pkgs, ... }:
|
||||
{
|
||||
services.home-assistant.extraComponents = [
|
||||
"daikin"
|
||||
"enocean"
|
||||
];
|
||||
# services.home-assistant.customComponents = [
|
||||
# (pkgs.callPackage ./custom-components/scheduler.nix { })
|
||||
# ];
|
||||
services.home-assistant.customLovelaceModules = [
|
||||
(pkgs.callPackage ./custom-components/lovelace-scheduler.nix { })
|
||||
];
|
||||
|
||||
services.home-assistant.config = {
|
||||
sensor = [
|
||||
{
|
||||
name = "Living Room Window Handle 2";
|
||||
platform = "enocean";
|
||||
id = [ 129 0 227 53 ];
|
||||
device_class = "windowhandle";
|
||||
}
|
||||
{
|
||||
name = "Living Room Window Handle 1";
|
||||
platform = "enocean";
|
||||
id = [ 129 0 229 8 ];
|
||||
device_class = "windowhandle";
|
||||
}
|
||||
];
|
||||
"automation ac_livingroom" = {
|
||||
alias = "ac_livingroom";
|
||||
trigger = [
|
||||
{
|
||||
platform = "state";
|
||||
entity_id = "sensor.windowhandle_living_room_window_handle_1";
|
||||
to = [ "open" "tilt" ];
|
||||
}
|
||||
{
|
||||
platform = "state";
|
||||
entity_id = "sensor.windowhandle_living_room_window_handle_2";
|
||||
to = [ "open" "tilt" ];
|
||||
}
|
||||
];
|
||||
action = {
|
||||
service = "climate.set_hvac_mode";
|
||||
target = {
|
||||
entity_id = "climate.living_room";
|
||||
};
|
||||
data = {
|
||||
hvac_mode = "off";
|
||||
};
|
||||
};
|
||||
};
|
||||
"automation ac_eco" = {
|
||||
alias = "ac_eco";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = [
|
||||
"climate.living_room"
|
||||
"climate.bedroom"
|
||||
];
|
||||
to = [
|
||||
"heat"
|
||||
"cold"
|
||||
];
|
||||
};
|
||||
action = {
|
||||
service = "climate.set_preset_mode";
|
||||
target = {
|
||||
entity_id = "{{ trigger.entity_id }}";
|
||||
};
|
||||
data = {
|
||||
preset_mode = "eco";
|
||||
};
|
||||
};
|
||||
};
|
||||
"automation bedroom_ac_on" = {
|
||||
alias = "bedroom ac on";
|
||||
trigger = {
|
||||
platform = "time";
|
||||
at = "00:30:00";
|
||||
};
|
||||
action = {
|
||||
choose = [
|
||||
{
|
||||
conditions = [ "{{ states('sensor.bedroom_ac_inside_temperature') > 25 and states('sensor.bedroom_ac_outside_temperature') > 22 }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "climate.set_hvac_mode";
|
||||
target = {
|
||||
entity_id = "climate.bedroom";
|
||||
};
|
||||
data = {
|
||||
hvac_mode = "cold";
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
"automation bedroom_ac_off" = {
|
||||
alias = "bedroom ac on";
|
||||
trigger = {
|
||||
platform = "template";
|
||||
value_template = ''
|
||||
{{ now().timestamp() | timestamp_custom('%H:%M') == (as_timestamp(strptime(states('sensor.bedtime_alarm'), "%H:%M")) - 1800) | timestamp_custom('%H:%M', false) }}
|
||||
'';
|
||||
};
|
||||
action = {
|
||||
service = "climate.set_hvac_mode";
|
||||
target = {
|
||||
entity_id = "climate.bedroom";
|
||||
};
|
||||
data = {
|
||||
hvac_mode = "off";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
91
hosts/fw-new/modules/home-assistant/battery.nix
Normal file
91
hosts/fw-new/modules/home-assistant/battery.nix
Normal file
@@ -0,0 +1,91 @@
|
||||
{
|
||||
services.home-assistant.config = {
|
||||
sensor = [
|
||||
{
|
||||
platform = "template";
|
||||
sensors = {
|
||||
sensors_lowest_battery_level = {
|
||||
friendly_name = "Lowest battery level (Sensors)";
|
||||
entity_id = "sun.sun";
|
||||
device_class = "battery";
|
||||
unit_of_measurement = "%";
|
||||
value_template = ''
|
||||
{% set domains = ['sensor', 'battery'] %}
|
||||
{% set ns = namespace(min_batt=100, entities=[]) %}
|
||||
{%- set exclude_sensors = ['sensor.sensors_lowest_battery_level','sensor.dominiks_iphone_battery_level'] -%}
|
||||
{% for domain in domains %}
|
||||
{% set ns.entities = states[domain] %}
|
||||
{% for sensor in exclude_sensors %}
|
||||
{% set ns.entities = ns.entities | rejectattr('entity_id', 'equalto', sensor) %}
|
||||
{% endfor %}
|
||||
{% set batt_sensors = ns.entities | selectattr('attributes.device_class','equalto','battery') | map(attribute='state') | reject('equalto', 'unknown') | reject('equalto', 'None') | map('int') | reject('equalto', 0) | list %}
|
||||
{% set batt_attrs = ns.entities | selectattr('attributes.battery_level','defined') | map(attribute='attributes.battery_level') | reject('equalto', 'unknown') | reject('equalto', 'None') | map('int') | reject('equalto', 0) | list %}
|
||||
{% set batt_lvls = batt_sensors + batt_attrs %}
|
||||
{% if batt_lvls|length > 0 %}
|
||||
{% set _min = batt_lvls|min %}
|
||||
{% if _min < ns.min_batt %}
|
||||
{% set ns.min_batt = _min %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{{ ns.min_batt }}
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
||||
];
|
||||
binary_sensor = [
|
||||
{
|
||||
platform = "template";
|
||||
sensors = {
|
||||
sensor_low_battery = {
|
||||
value_template = "{{ states('sensor.sensors_lowest_battery_level')|int <= 30 }}";
|
||||
friendly_name = "A sensor has low battery";
|
||||
device_class = "problem";
|
||||
};
|
||||
};
|
||||
}
|
||||
];
|
||||
alert = {
|
||||
sensor_low_battery = {
|
||||
name = "Sensor has low battery!";
|
||||
message = ''
|
||||
{% set domains = ['sensor', 'battery'] %}
|
||||
{% set threshold = 30 %}
|
||||
{%- set exclude_entities = ['sensor.sensors_lowest_battery_level','sensor.dominiks_iphone_battery_level','sensor.roborock_s8_pro_ultra_battery'] -%}
|
||||
Sensors are below 50% battery:
|
||||
{% for domain in domains %}
|
||||
{% for item in states[domain] %}
|
||||
{% if item.entity_id not in exclude_entities %}
|
||||
{% if item.attributes.battery_level is defined %}
|
||||
{% set level = item.attributes.battery_level|int %}
|
||||
{% if level > 0 and level < threshold %}
|
||||
- {{ item.attributes.friendly_name }} ({{ item.attributes['battery_level']|int}}%)
|
||||
{%- endif -%}
|
||||
{% endif %}
|
||||
{% if item.attributes.device_class is defined and item.attributes.device_class == 'battery' %}
|
||||
{% set level = item.state|int %}
|
||||
{% if level > 0 and level <= threshold %}
|
||||
- {{ item.attributes.friendly_name }} ({{ item.state|int }}%)
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
'';
|
||||
entity_id = "binary_sensor.sensor_low_battery";
|
||||
state = "on";
|
||||
repeat = [
|
||||
5
|
||||
60
|
||||
360
|
||||
];
|
||||
skip_first = true;
|
||||
can_acknowledge = true;
|
||||
notifiers = [
|
||||
"NotificationGroup"
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
{ stdenv, fetchFromGitHub }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "ha-bermuda";
|
||||
version = "0.7.2"; # Replace with the latest version
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "agittins";
|
||||
repo = "bermuda";
|
||||
rev = "v${version}";
|
||||
sha256 = "sha256-FBmZc2I9JoLAQ55yasa0i+SM0dMg2IbR3AaKgEybRu8="; # Replace with the correct SHA256 hash
|
||||
};
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
cp -r custom_components/bermuda $out/
|
||||
'';
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
{ lib
|
||||
, buildHomeAssistantComponent
|
||||
, fetchFromGitHub
|
||||
}:
|
||||
|
||||
buildHomeAssistantComponent rec {
|
||||
owner = "hacs";
|
||||
domain = "hacs";
|
||||
version = "2.0.1"; # Replace with the latest version
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "hacs";
|
||||
repo = "integration";
|
||||
rev = version;
|
||||
sha256 = ""; # You'll need to fill this in
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
# Add any required dependencies here
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
homepage = "https://github.com/hacs/integration";
|
||||
license = licenses.mit;
|
||||
description = "HACS (Home Assistant Community Store)";
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
{ lib
|
||||
, buildNpmPackage
|
||||
, fetchFromGitHub
|
||||
}:
|
||||
|
||||
buildNpmPackage rec {
|
||||
pname = "lovelace-scheduler";
|
||||
version = "3.2.13";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "nielsfaber";
|
||||
repo = "scheduler-card";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-LFKOTu0SBeHpf8Hjvsgc/xOUux9d4lBCshdD9u7eO5o=";
|
||||
};
|
||||
|
||||
npmDepsHash = "sha256-JJexFmVbDHi2JCiCpcDupzVf0xfwy+vqWILq/dLVcBo=";
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
mkdir $out
|
||||
cp card-mod.js $out
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
passthru.entrypoint = "card-mod.js";
|
||||
|
||||
meta = with lib; {
|
||||
description = "This is a Lovelace card for Home Assistant that can be used to create a time schedule for your smart devices. You can create new rules, modify existing rules and temporarily disable rules.";
|
||||
homepage = "https://github.com/nielsfaber/scheduler-card";
|
||||
license = licenses.mit;
|
||||
platforms = platforms.all;
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
buildHomeAssistantComponent,
|
||||
fetchFromGitHub,
|
||||
lib,
|
||||
gitUpdater,
|
||||
}:
|
||||
|
||||
buildHomeAssistantComponent rec {
|
||||
owner = "nielsfaber";
|
||||
domain = "scheduler";
|
||||
version = "3.3.7";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "nielsfaber";
|
||||
repo = "scheduler-component";
|
||||
rev = "refs/tags/${version}";
|
||||
hash = "sha256-zXO2UDLhSTOemzsO9G5ZUzr50Zg8kDW/aObn6Y3j70k=";
|
||||
};
|
||||
|
||||
passthru.updateScript = gitUpdater {
|
||||
ignoredVersions = "(Alpha|Beta|alpha|beta).*";
|
||||
};
|
||||
|
||||
meta = {
|
||||
changelog = "https://github.com/nielsfaber/scheduler-component/releases/tag/${version}";
|
||||
description = "This is a custom component for Home Assistant, that is used for controlling your existing devices based on time. It works nicely together with the Lovelace scheduler card.";
|
||||
homepage = "https://github.com/nielsfaber/scheduler-component";
|
||||
license = lib.licenses.agpl3Only;
|
||||
};
|
||||
}
|
||||
288
hosts/fw-new/modules/home-assistant/default.nix
Normal file
288
hosts/fw-new/modules/home-assistant/default.nix
Normal file
@@ -0,0 +1,288 @@
|
||||
{ config, pkgs, ... }:
|
||||
let
|
||||
domain = "home-assistant.cloonar.com";
|
||||
pkgs-with-home-assistant = import (builtins.fetchGit {
|
||||
name = "new-home-assistant";
|
||||
url = "https://github.com/nixos/nixpkgs/";
|
||||
rev = "41dea55321e5a999b17033296ac05fe8a8b5a257";
|
||||
}) {};
|
||||
in
|
||||
{
|
||||
users.users.hass = {
|
||||
home = "/var/lib/hass";
|
||||
createHome = true;
|
||||
group = "hass";
|
||||
uid = config.ids.uids.hass;
|
||||
extraGroups = [ "dialout" ];
|
||||
};
|
||||
users.groups.hass.gid = config.ids.gids.hass;
|
||||
|
||||
security.acme.certs."${domain}" = {
|
||||
group = "nginx";
|
||||
};
|
||||
|
||||
sops.secrets."home-assistant-secrets.yaml" = {
|
||||
owner = "hass";
|
||||
restartUnits = [ "container@hass.service" ];
|
||||
};
|
||||
|
||||
sops.secrets."home-assistant-ldap" = {
|
||||
owner = "hass";
|
||||
};
|
||||
|
||||
containers.hass = {
|
||||
autoStart = true;
|
||||
ephemeral = false;
|
||||
privateNetwork = true;
|
||||
hostBridge = "server";
|
||||
hostAddress = "10.42.97.1";
|
||||
localAddress = "10.42.97.20/24";
|
||||
extraFlags = [
|
||||
"--capability=CAP_NET_ADMIN"
|
||||
"--capability=CAP_MKNOD"
|
||||
];
|
||||
allowedDevices = [
|
||||
{
|
||||
modifier = "rwm";
|
||||
node = "char-usb_device";
|
||||
}
|
||||
{
|
||||
modifier = "rwm";
|
||||
node = "char-ttyUSB";
|
||||
}
|
||||
];
|
||||
bindMounts = {
|
||||
"/dev/ttyUSB0" = {
|
||||
hostPath = "/dev/ttyUSB0";
|
||||
isReadOnly = false;
|
||||
};
|
||||
"/etc/localtime" = {
|
||||
hostPath = "/etc/localtime";
|
||||
};
|
||||
"/var/lib/hass" = {
|
||||
hostPath = "/var/lib/hass/";
|
||||
isReadOnly = false;
|
||||
};
|
||||
"/var/lib/acme/hass/" = {
|
||||
hostPath = "${config.security.acme.certs.${domain}.directory}";
|
||||
};
|
||||
"/run/secrets/home-assistant-ldap" = {
|
||||
hostPath = config.sops.secrets."home-assistant-ldap".path;
|
||||
};
|
||||
"/var/lib/hass/secrets.yaml" = {
|
||||
hostPath = config.sops.secrets."home-assistant-secrets.yaml".path;
|
||||
};
|
||||
};
|
||||
config = { lib, config, pkgs, ... }: {
|
||||
imports = [
|
||||
./3dprinter.nix
|
||||
./ac.nix
|
||||
# ./aeg.nix
|
||||
./battery.nix
|
||||
./electricity.nix
|
||||
./enocean.nix
|
||||
./ldap.nix
|
||||
./light.nix
|
||||
./locks.nix
|
||||
./multimedia.nix
|
||||
./music.nix
|
||||
./notify.nix
|
||||
./pc.nix
|
||||
./power-saving.nix
|
||||
./pushover.nix
|
||||
./presense.nix
|
||||
./remote.nix
|
||||
./roborock.nix
|
||||
./scene-switch.nix
|
||||
./shelly.nix
|
||||
./sleep.nix
|
||||
./snapcast.nix
|
||||
];
|
||||
|
||||
networking = {
|
||||
hostName = "home-assistant";
|
||||
useHostResolvConf = false;
|
||||
defaultGateway = {
|
||||
address = "10.42.96.1";
|
||||
interface = "eth0";
|
||||
};
|
||||
firewall.enable = false;
|
||||
nameservers = [ "10.42.97.1" ];
|
||||
};
|
||||
|
||||
environment.systemPackages = [
|
||||
pkgs.wol
|
||||
pkgs.mariadb
|
||||
];
|
||||
|
||||
services.nginx.enable = true;
|
||||
services.nginx.virtualHosts."${domain}" = {
|
||||
sslCertificate = "/var/lib/acme/hass/fullchain.pem";
|
||||
sslCertificateKey = "/var/lib/acme/hass/key.pem";
|
||||
sslTrustedCertificate = "/var/lib/acme/hass/chain.pem";
|
||||
forceSSL = true;
|
||||
extraConfig = ''
|
||||
proxy_buffering off;
|
||||
'';
|
||||
locations."/".extraConfig = ''
|
||||
proxy_pass http://127.0.0.1:8123;
|
||||
proxy_set_header Host $host;
|
||||
proxy_redirect http:// https://;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
'';
|
||||
};
|
||||
|
||||
services.home-assistant = {
|
||||
package = pkgs-with-home-assistant.home-assistant;
|
||||
enable = true;
|
||||
};
|
||||
|
||||
services.home-assistant.extraComponents = [
|
||||
"mobile_app"
|
||||
"backup"
|
||||
"denonavr"
|
||||
"androidtv"
|
||||
"rainbird"
|
||||
"zha"
|
||||
"tplink_omada"
|
||||
];
|
||||
|
||||
systemd.services.install-hacs = {
|
||||
description = "Install HACS";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
};
|
||||
script = ''
|
||||
set -e
|
||||
HACS_VERSION="2.0.1" # Replace with the latest version
|
||||
HACS_DIR="/var/lib/hass/custom_components/hacs"
|
||||
|
||||
mkdir -p "$HACS_DIR"
|
||||
${pkgs.curl}/bin/curl -L "https://github.com/hacs/integration/releases/download/$HACS_VERSION/hacs.zip" -o /tmp/hacs.zip
|
||||
${pkgs.unzip}/bin/unzip -o /tmp/hacs.zip -d "$HACS_DIR"
|
||||
rm /tmp/hacs.zip
|
||||
chown -R hass:hass "$HACS_DIR"
|
||||
'';
|
||||
};
|
||||
|
||||
services.home-assistant.extraPackages = ps: with ps; [
|
||||
mysqlclient
|
||||
];
|
||||
|
||||
services.mysql = {
|
||||
enable = true;
|
||||
package = pkgs.mariadb;
|
||||
ensureDatabases = [ "hass" ];
|
||||
ensureUsers = [
|
||||
{
|
||||
name = "hass";
|
||||
ensurePermissions = {
|
||||
"hass.*" = "ALL PRIVILEGES";
|
||||
};
|
||||
}
|
||||
];
|
||||
|
||||
};
|
||||
|
||||
services.mysqlBackup = {
|
||||
enable = true;
|
||||
databases = [ "hass" ];
|
||||
};
|
||||
|
||||
services.home-assistant.config =
|
||||
let
|
||||
hiddenEntities = [
|
||||
"sensor.last_boot"
|
||||
"sensor.date"
|
||||
];
|
||||
in
|
||||
{
|
||||
recorder = {
|
||||
db_url = "mysql://hass@localhost/hass?unix_socket=/var/run/mysqld/mysqld.sock";
|
||||
};
|
||||
homeassistant = {
|
||||
name = "Home";
|
||||
latitude = "!secret home_latitude";
|
||||
longitude = "!secret home_longitude";
|
||||
elevation = "!secret home_elevation";
|
||||
unit_system = "metric";
|
||||
currency = "EUR";
|
||||
country = "AT";
|
||||
time_zone = "Europe/Vienna";
|
||||
external_url = "https://${domain}";
|
||||
};
|
||||
zone = {
|
||||
name = "Home";
|
||||
latitude = "!secret home_latitude";
|
||||
longitude = "!secret home_longitude";
|
||||
radius = 35;
|
||||
icon = "mdi:account-multiple";
|
||||
|
||||
};
|
||||
automation = "!include automations.yaml";
|
||||
frontend = { };
|
||||
http = {
|
||||
use_x_forwarded_for = true;
|
||||
trusted_proxies = [
|
||||
"127.0.0.1"
|
||||
"::1"
|
||||
];
|
||||
};
|
||||
api = { };
|
||||
history.exclude = {
|
||||
entities = hiddenEntities;
|
||||
domains = [
|
||||
"automation"
|
||||
"updater"
|
||||
];
|
||||
};
|
||||
"map" = { };
|
||||
enocean = {
|
||||
device = "/dev/ttyUSB0";
|
||||
};
|
||||
# logbook.exclude.entities = "hiddenEntities";
|
||||
logger = {
|
||||
default = "info";
|
||||
};
|
||||
|
||||
#icloud = {
|
||||
# username = "!secret icloud_email";
|
||||
# password = "!secret icloud_password";
|
||||
# with_family = true;
|
||||
#};
|
||||
network = { };
|
||||
zeroconf = { };
|
||||
system_health = { };
|
||||
default_config = { };
|
||||
system_log = { };
|
||||
sensor = [
|
||||
{
|
||||
platform = "template";
|
||||
sensors.bedtime_alarm = {
|
||||
friendly_name = "Bedtime Alarm";
|
||||
value_template = "09:00";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
services.mosquitto = {
|
||||
enable = true;
|
||||
listeners = [
|
||||
{
|
||||
acl = [ "pattern readwrite #" ];
|
||||
omitPasswordAuth = true;
|
||||
settings.allow_anonymous = true;
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
users.users.hass.extraGroups = [ "dialout" ];
|
||||
system.stateVersion = "23.05";
|
||||
};
|
||||
};
|
||||
}
|
||||
28
hosts/fw-new/modules/home-assistant/electricity.nix
Normal file
28
hosts/fw-new/modules/home-assistant/electricity.nix
Normal file
@@ -0,0 +1,28 @@
|
||||
{ config, ... }:
|
||||
let
|
||||
unstable = import
|
||||
(builtins.fetchTarball https://github.com/nixos/nixpkgs/tarball/nixpkgs-unstable)
|
||||
# reuse the current configuration
|
||||
{ config = config.nixpkgs.config; };
|
||||
in {
|
||||
services.home-assistant.customComponents = with unstable.home-assistant-custom-components; [
|
||||
epex_spot
|
||||
];
|
||||
|
||||
services.home-assistant.config = {
|
||||
sensor = [
|
||||
{
|
||||
platform = "template";
|
||||
sensors = {
|
||||
electricity_price = {
|
||||
friendly_name = "Current Price of electricity";
|
||||
unit_of_measurement = "EUR/kWh";
|
||||
value_template = ''
|
||||
{{ (((states('sensor.epex_spot_data_price') | int ) / 1000) + (0.0149 + 0.053 + 0.00866)) | float }}
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
}
|
||||
19
hosts/fw-new/modules/home-assistant/enocean.nix
Normal file
19
hosts/fw-new/modules/home-assistant/enocean.nix
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
services.home-assistant.config = {
|
||||
"binary_sensor pc_0" = [
|
||||
{
|
||||
platform = "enocean";
|
||||
id = [ 254 235 105 198 ];
|
||||
name = "enocean_switch_pc";
|
||||
}
|
||||
];
|
||||
sensor = [
|
||||
{
|
||||
name = "Bathroom HT";
|
||||
platform = "enocean";
|
||||
id = [ 5 41 146 251 ];
|
||||
device_class = "temperature";
|
||||
}
|
||||
];
|
||||
};
|
||||
}
|
||||
56
hosts/fw-new/modules/home-assistant/ldap.nix
Normal file
56
hosts/fw-new/modules/home-assistant/ldap.nix
Normal file
@@ -0,0 +1,56 @@
|
||||
{ pkgs
|
||||
, config
|
||||
, lib
|
||||
, ... }:
|
||||
let
|
||||
ldap-auth-sh = pkgs.stdenv.mkDerivation {
|
||||
name = "ldap-auth-sh";
|
||||
|
||||
src = pkgs.fetchFromGitHub {
|
||||
owner = "efficiosoft";
|
||||
repo = "ldap-auth-sh";
|
||||
rev = "93b2c00413942908139e37c7432a12bcb705ac87";
|
||||
sha256 = "1pymp6ki353aqkigr89g7hg5x1mny68m31c3inxf1zr26n5s2kz8";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ pkgs.makeWrapper ];
|
||||
installPhase = ''
|
||||
mkdir -p $out/etc
|
||||
cat > $out/etc/home-assistant.cfg << 'EOF'
|
||||
CLIENT="ldapsearch"
|
||||
SERVER="ldaps://ldap.cloonar.com:636"
|
||||
USERDN="cn=home-assistant,ou=system,ou=users,dc=cloonar,dc=com"
|
||||
PW="$(</run/secrets/home-assistant-ldap)"
|
||||
BASEDN="ou=users,dc=cloonar,dc=com"
|
||||
SCOPE="one"
|
||||
FILTER="(&(objectClass=cloonarUser)(memberOf=cn=HomeAssistant,ou=groups,dc=cloonar,dc=com)(mail=$(ldap_dn_escape "$username")))"
|
||||
USERNAME_PATTERN='^[a-z|A-Z|0-9|_|-|.|@]+$'
|
||||
on_auth_success() {
|
||||
# print the meta entries for use in HA
|
||||
if echo "$output" | grep -qE '^(dn|DN):: '; then
|
||||
# ldapsearch base64 encodes non-ascii
|
||||
output=$(echo "$output" | sed -n -e "s/^\(dn\|DN\)\s*::\s*\(.*\)$/\2/p" | base64 -d)
|
||||
else
|
||||
output=$(echo "$output" | sed -n -e "s/^\(dn\|DN\)\s*:\s*\(.*\)$/\2/p")
|
||||
fi
|
||||
name=$(echo "$output" | sed -nr 's/^cn=([^,]+).*/\1/Ip')
|
||||
[ -z "$name" ] || echo "name=$name"
|
||||
}
|
||||
EOF
|
||||
install -D -m755 ldap-auth.sh $out/bin/ldap-auth.sh
|
||||
wrapProgram $out/bin/ldap-auth.sh \
|
||||
--prefix PATH : ${lib.makeBinPath [pkgs.openldap pkgs.coreutils pkgs.gnused pkgs.gnugrep]} \
|
||||
--add-flags "$out/etc/home-assistant.cfg"
|
||||
'';
|
||||
};
|
||||
in
|
||||
{
|
||||
services.home-assistant.config.homeassistant.auth_providers = [
|
||||
{
|
||||
type = "command_line";
|
||||
command = "${ldap-auth-sh}/bin/ldap-auth.sh";
|
||||
meta = true;
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
440
hosts/fw-new/modules/home-assistant/light.nix
Normal file
440
hosts/fw-new/modules/home-assistant/light.nix
Normal file
@@ -0,0 +1,440 @@
|
||||
{
|
||||
services.home-assistant.extraComponents = [
|
||||
"deconz"
|
||||
"shelly"
|
||||
"sun"
|
||||
"nanoleaf"
|
||||
];
|
||||
|
||||
services.home-assistant.config = {
|
||||
homeassistant = {
|
||||
customize_domain = {
|
||||
light = {
|
||||
assumed_state = false;
|
||||
};
|
||||
};
|
||||
};
|
||||
customize_domain = {
|
||||
light = {
|
||||
assumed_state = false;
|
||||
};
|
||||
};
|
||||
"automation light_sunrise" = {
|
||||
alias = "light_sunrise";
|
||||
trigger = {
|
||||
platform = "sun";
|
||||
event = "sunrise";
|
||||
};
|
||||
action = {
|
||||
service = "light.turn_on";
|
||||
target = {
|
||||
entity_id = "{{ states.light | selectattr(\"state\",\"eq\",\"on\") | map(attribute=\"entity_id\") | list }}";
|
||||
};
|
||||
data = {
|
||||
brightness_pct = 254;
|
||||
color_temp = 250;
|
||||
};
|
||||
};
|
||||
};
|
||||
"automation light_sunset" = {
|
||||
alias = "light_sunset";
|
||||
trigger = {
|
||||
platform = "sun";
|
||||
event = "sunset";
|
||||
};
|
||||
action = {
|
||||
service = "light.turn_on";
|
||||
target = {
|
||||
entity_id = "{{ states.light | selectattr(\"state\",\"eq\",\"on\") | map(attribute=\"entity_id\") | list }}";
|
||||
};
|
||||
data = {
|
||||
brightness_pct = 30;
|
||||
color_temp = 450;
|
||||
};
|
||||
};
|
||||
};
|
||||
"automation light_on" = {
|
||||
alias = "light_on";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = [
|
||||
"light.bedroom_lights"
|
||||
"light.kitchen_lights"
|
||||
"light.livingroom_lights"
|
||||
"light.hallway_lights"
|
||||
"light.bathroom_lights"
|
||||
"light.toilet_lights"
|
||||
"light.storage_lights"
|
||||
];
|
||||
to = "on";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
choose = [
|
||||
{
|
||||
conditions = [ "{{ state_attr('sun.sun', 'elevation') < 5 and trigger.entity_id == 'light.toilet_lights' }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.turn_on";
|
||||
target = {
|
||||
entity_id = "{{ trigger.entity_id }}";
|
||||
};
|
||||
data = {
|
||||
brightness_pct = 20;
|
||||
color_temp = 450;
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [ "{{ state_attr('sun.sun', 'elevation') < 5 and trigger.entity_id == 'light.hallway_lights' }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.turn_on";
|
||||
target = {
|
||||
entity_id = "{{ trigger.entity_id }}";
|
||||
};
|
||||
data = {
|
||||
brightness_pct = 30;
|
||||
rgbw_color = [ 255 126 0 255 ];
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [ "{{ state_attr('sun.sun', 'elevation') < 5 and trigger.entity_id == 'light.bathroom_lights' }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.turn_on";
|
||||
target = {
|
||||
entity_id = "{{ trigger.entity_id }}";
|
||||
};
|
||||
data = {
|
||||
brightness_pct = 30;
|
||||
rgbw_color = [ 255 126 0 255 ];
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [ "{{ state_attr('sun.sun', 'elevation') < 5 and trigger.entity_id == 'light.livingroom_lights' }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.turn_on";
|
||||
target = {
|
||||
entity_id = "{{ trigger.entity_id }}";
|
||||
};
|
||||
data = {
|
||||
brightness_pct = 20;
|
||||
rgbw_color = [ 255 126 0 255 ];
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [ "{{ state_attr('sun.sun', 'elevation') < 5 and trigger.entity_id == 'light.bedroom_lights' }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.turn_on";
|
||||
target = {
|
||||
entity_id = "light.bedroom_lights";
|
||||
};
|
||||
data = {
|
||||
brightness_pct = 20;
|
||||
rgbw_color = [ 255 126 0 255 ];
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [ "{{ state_attr('sun.sun', 'elevation') < 5 and trigger.entity_id == 'light.kitchen_lights' }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.turn_on";
|
||||
target = {
|
||||
entity_id = "light.kitchen_lights";
|
||||
};
|
||||
data = {
|
||||
brightness_pct = 30;
|
||||
color_temp = 450;
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [ "{{ state_attr('sun.sun', 'elevation') > 4 }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.turn_on";
|
||||
target = {
|
||||
entity_id = "{{ trigger.entity_id }}";
|
||||
};
|
||||
data = {
|
||||
brightness_pct = 100;
|
||||
color_temp = 250;
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation bathroom light small" = {
|
||||
alias = "bathroom light small";
|
||||
mode = "restart";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = [
|
||||
"light.bathroom_switch_channel_1"
|
||||
];
|
||||
from = "on";
|
||||
to = "off";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "switch.turn_off";
|
||||
target = {
|
||||
entity_id = "switch.bathroom_small";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation bathroom light" = {
|
||||
alias = "bathroom light";
|
||||
mode = "restart";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = [
|
||||
"light.bathroom_switch_channel_1"
|
||||
];
|
||||
from = "off";
|
||||
to = "on";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
delay = 3600;
|
||||
}
|
||||
{
|
||||
service = "light.turn_off";
|
||||
target = {
|
||||
entity_id = "light.bathroom_switch_channel_1";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation bed_led" = {
|
||||
alias = "bed_led";
|
||||
mode = "restart";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = [
|
||||
"light.bedroom_led"
|
||||
];
|
||||
from = "off";
|
||||
to = "on";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
delay = 10800;
|
||||
}
|
||||
{
|
||||
service = "light.turn_off";
|
||||
target = {
|
||||
entity_id = "{{ trigger.entity_id }}";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation hallway_motion" = {
|
||||
alias = "Hallway Motion";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = "binary_sensor.hallway_motion_motion";
|
||||
};
|
||||
action = {
|
||||
service_template = "light.turn_{{ trigger.to_state.state }}";
|
||||
target = {
|
||||
entity_id = "light.hallway_lights";
|
||||
};
|
||||
};
|
||||
};
|
||||
"automation bed_button_1" = {
|
||||
alias = "bed_button_1";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "shelly.click";
|
||||
event_data = {
|
||||
device = "shellybutton1-E8DB84AA196D";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
choose = [
|
||||
{
|
||||
conditions = [ "{{ trigger.event.data.click_type == \"single\" }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.toggle";
|
||||
entity_id = "light.bed_reading_1";
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [ "{{ trigger.event.data.click_type == \"double\" }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.toggle";
|
||||
entity_id = "light.bedroom_lights";
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [ "{{ trigger.event.data.click_type == \"triple\" }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.toggle";
|
||||
entity_id = "light.bedroom_bed";
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation bed_button_2" = {
|
||||
alias = "bed_button_2";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "shelly.click";
|
||||
event_data = {
|
||||
device = "shellybutton1-E8DB84AA136D";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
choose = [
|
||||
{
|
||||
conditions = [ "{{ trigger.event.data.click_type == \"single\" }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.toggle";
|
||||
entity_id = "light.bed_reading_2";
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [ "{{ trigger.event.data.click_type == \"double\" }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.toggle";
|
||||
entity_id = "light.bedroom_lights";
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [ "{{ trigger.event.data.click_type == \"triple\" }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.toggle";
|
||||
entity_id = "light.bedroom_bed";
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
light = [
|
||||
{
|
||||
platform = "switch";
|
||||
name = "Livingroom Switch";
|
||||
entity_id = "switch.livingroom_switch";
|
||||
}
|
||||
{
|
||||
platform = "group";
|
||||
name = "Livingroom Lights";
|
||||
all = true;
|
||||
entities = [
|
||||
"light.livingroom_switch"
|
||||
"light.living_room_bulb_1"
|
||||
"light.living_room_bulb_2"
|
||||
"light.living_room_bulb_3"
|
||||
"light.living_room_bulb_4"
|
||||
"light.living_room_bulb_5"
|
||||
"light.living_room_bulb_6"
|
||||
];
|
||||
}
|
||||
{
|
||||
platform = "switch";
|
||||
name = "Kitchen Switch";
|
||||
entity_id = "switch.kitchen_switch";
|
||||
}
|
||||
{
|
||||
platform = "group";
|
||||
name = "Kitchen Lights";
|
||||
all = true;
|
||||
entities = [
|
||||
"light.kitchen_switch"
|
||||
"light.kitchen"
|
||||
];
|
||||
}
|
||||
{
|
||||
platform = "switch";
|
||||
name = "Bedroom Switch";
|
||||
entity_id = "switch.bedroom_switch";
|
||||
}
|
||||
{
|
||||
platform = "group";
|
||||
name = "Bedroom Lights";
|
||||
all = true;
|
||||
entities = [
|
||||
"light.bedroom_switch"
|
||||
"light.bedroom_bulb_1"
|
||||
"light.bedroom_bulb_2"
|
||||
"light.bedroom_bulb_3"
|
||||
"light.bedroom_bulb_4"
|
||||
];
|
||||
}
|
||||
{
|
||||
platform = "group";
|
||||
name = "Bathroom Lights";
|
||||
all = true;
|
||||
entities = [
|
||||
"light.bathroom_switch"
|
||||
"light.bathroom_bulb_1"
|
||||
"light.bathroom_bulb_2"
|
||||
];
|
||||
}
|
||||
{
|
||||
platform = "switch";
|
||||
name = "Hallway Switch";
|
||||
entity_id = "switch.hallway";
|
||||
}
|
||||
{
|
||||
platform = "group";
|
||||
name = "Hallway Lights";
|
||||
all = true;
|
||||
entities = [
|
||||
"light.hallway_switch"
|
||||
"light.hallway_bulb_1"
|
||||
"light.hallway_bulb_2"
|
||||
];
|
||||
}
|
||||
{
|
||||
platform = "switch";
|
||||
name = "Toilet Switch";
|
||||
entity_id = "switch.toilet";
|
||||
}
|
||||
{
|
||||
platform = "group";
|
||||
name = "Toilet Lights";
|
||||
all = true;
|
||||
entities = [
|
||||
"light.toilet_switch"
|
||||
"light.toilet_bulb"
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
}
|
||||
176
hosts/fw-new/modules/home-assistant/locks.nix
Normal file
176
hosts/fw-new/modules/home-assistant/locks.nix
Normal file
@@ -0,0 +1,176 @@
|
||||
{
|
||||
services.home-assistant.extraComponents = [
|
||||
"nuki"
|
||||
];
|
||||
|
||||
services.home-assistant.config = {
|
||||
"automation house_door" = {
|
||||
alias = "house_door";
|
||||
mode = "restart";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = [
|
||||
"device_tracker.dominiks_iphone"
|
||||
];
|
||||
from = "not_home";
|
||||
to = "home";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "script.turn_on";
|
||||
target = {
|
||||
entity_id = "script.turn_on_circuits";
|
||||
};
|
||||
}
|
||||
{
|
||||
service = "lock.unlock";
|
||||
target = {
|
||||
entity_id = "lock.house_door";
|
||||
};
|
||||
}
|
||||
{
|
||||
delay = "00:05:00";
|
||||
}
|
||||
{
|
||||
service = "lock.lock";
|
||||
target = {
|
||||
entity_id = "lock.house_door";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation house_door_ring" = {
|
||||
alias = "house_door_ring";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "nuki_event";
|
||||
event_data = {
|
||||
type = "ring";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
choose = [
|
||||
{
|
||||
conditions = [ "{{ state.house_door == \"unlocked\" }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "lock.lock";
|
||||
target = {
|
||||
entity_id = "lock.house_door";
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [ "{{ state.house_door != \"unlocked\" }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "notify.mobile_app_dominiks_iphone";
|
||||
data = {
|
||||
message = "Someone is at the door!";
|
||||
actions = [
|
||||
{
|
||||
action = "action_open";
|
||||
title = "Open house door";
|
||||
}
|
||||
{
|
||||
action = "action_ignore";
|
||||
title = "Ignore";
|
||||
}
|
||||
];
|
||||
};
|
||||
}
|
||||
{
|
||||
wait_for_trigger = [
|
||||
{
|
||||
platform = "event";
|
||||
event_type = "mobile_app_notification_action";
|
||||
event_data = {
|
||||
action = "{{ action_open }}";
|
||||
|
||||
};
|
||||
}
|
||||
{
|
||||
platform = "event";
|
||||
event_type = "mobile_app_notification_action";
|
||||
event_data = {
|
||||
action = "{{ action_ignore }}";
|
||||
};
|
||||
}
|
||||
|
||||
];
|
||||
}
|
||||
{
|
||||
choose = [
|
||||
{
|
||||
conditions = "{{ wait.trigger.event.data.action == action_open }}";
|
||||
sequence = [{
|
||||
service = "lock.open";
|
||||
target = {
|
||||
entity_id = "lock.house_door";
|
||||
};
|
||||
}];
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
binary_sensor = [
|
||||
{
|
||||
platform = "template";
|
||||
sensors = {
|
||||
lock_critical_battery = {
|
||||
value_template = ''
|
||||
{% set domains = ['lock'] %}
|
||||
{% set ns = namespace(crit=battery_critical, entities=[]) %}
|
||||
{% for domain in domains %}
|
||||
{% set batt_critical = states[domain] | selectattr('attributes.battery_critical','defined') | map(attribute='attributes.battery_critical') | reject('equalto', 'unknown') | reject('equalto', 'None') | map('int') | reject('equalto', 0) | list %}
|
||||
{% if batt_critical|length > 0 %}
|
||||
{% set ns.battery_critical = true %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{{ ns.battery_critical }}
|
||||
'';
|
||||
friendly_name = "A lock has critical battery";
|
||||
device_class = "problem";
|
||||
};
|
||||
};
|
||||
}
|
||||
];
|
||||
alert = {
|
||||
battery_critical = {
|
||||
name = "Lock has low battery!";
|
||||
message = ''
|
||||
{%- set domains = ['lock'] -%}
|
||||
Lock battery is critical:
|
||||
{%- for domain in domains -%}
|
||||
{%- for item in states[domain] -%}
|
||||
{%- if item.attributes.battery_critical is defined -%}
|
||||
{% if item.attributes.battery_critical %}
|
||||
- {{ item.attributes.friendly_name }}
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{%- endfor -%}
|
||||
'';
|
||||
entity_id = "binary_sensor.lock_critical_battery";
|
||||
state = "on";
|
||||
repeat = [
|
||||
5
|
||||
60
|
||||
360
|
||||
];
|
||||
skip_first = true;
|
||||
can_acknowledge = true;
|
||||
notifiers = [
|
||||
"NotificationGroup"
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
512
hosts/fw-new/modules/home-assistant/multimedia.nix
Normal file
512
hosts/fw-new/modules/home-assistant/multimedia.nix
Normal file
@@ -0,0 +1,512 @@
|
||||
{
|
||||
services.home-assistant.extraComponents = [
|
||||
"ping"
|
||||
"broadlink"
|
||||
"androidtv"
|
||||
"samsungtv"
|
||||
"apple_tv"
|
||||
];
|
||||
services.home-assistant.config = {
|
||||
ios = {
|
||||
actions = [
|
||||
{
|
||||
name = "Home Cinema";
|
||||
label.text = "Home Cinema";
|
||||
icon = {
|
||||
icon = "theater";
|
||||
color = "#ffffff";
|
||||
};
|
||||
show_in_watch = true;
|
||||
}
|
||||
];
|
||||
};
|
||||
binary_sensor = [
|
||||
{
|
||||
name = "xbox";
|
||||
platform = "ping";
|
||||
host = "xbox.cloonar.multimedia";
|
||||
count = 2;
|
||||
scan_interval = 5;
|
||||
}
|
||||
{
|
||||
name = "ps5";
|
||||
platform = "ping";
|
||||
host = "ps5.cloonar.multimedia";
|
||||
count = 2;
|
||||
scan_interval = 5;
|
||||
}
|
||||
{
|
||||
name = "steamdeck";
|
||||
platform = "ping";
|
||||
host = "steamdeck.cloonar.com";
|
||||
count = 2;
|
||||
scan_interval = 5;
|
||||
}
|
||||
{
|
||||
platform = "template";
|
||||
sensors = {
|
||||
multimedia_device_on = {
|
||||
friendly_name = "Any multimedia device on";
|
||||
device_class = "connectivity";
|
||||
value_template = ''
|
||||
{% if ((states('media_player.living_room') != 'off') and (states('media_player.living_room') != 'standby')) or is_state('binary_sensor.ps5', 'on') or is_state('binary_sensor.xbox', 'on') or (is_state('binary_sensor.steamdeck', 'on') and (states('sensor.steamdeck_power') | float(default=0) > 5)) %}
|
||||
on
|
||||
{% else %}
|
||||
off
|
||||
{% endif %}
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
||||
];
|
||||
script = {
|
||||
turn_on_tv = {
|
||||
sequence = [
|
||||
{
|
||||
choose = [
|
||||
{
|
||||
conditions = [
|
||||
{
|
||||
condition = "state";
|
||||
entity_id = "switch.tv_switch";
|
||||
state = "off";
|
||||
}
|
||||
];
|
||||
sequence = [
|
||||
{
|
||||
service = "automation.turn_off";
|
||||
target = {
|
||||
entity_id = "automation.all_multimedia_off"; # Replace with your target automation ID
|
||||
};
|
||||
}
|
||||
{
|
||||
service = "switch.turn_on";
|
||||
target = {
|
||||
entity_id = "switch.tv_switch";
|
||||
};
|
||||
}
|
||||
{
|
||||
delay = 10;
|
||||
}
|
||||
{
|
||||
service = "remote.send_command";
|
||||
target = {
|
||||
entity_id = "remote.rmproplus";
|
||||
};
|
||||
data = {
|
||||
num_repeats = 1;
|
||||
delay_secs = 0.4;
|
||||
hold_secs = 0;
|
||||
command = "b64:JgBOAJaSFREVNRU2FTUVERURFRAVERURFTUVNhU1FREVERUQFREVERUQFTYVNRURFREVEBURFTYVNRURFRAVNhU1FTYVNRUABfmWkhURFQANBQAAAAAAAAAAAAA=";
|
||||
};
|
||||
}
|
||||
{
|
||||
delay = 10;
|
||||
}
|
||||
{
|
||||
service = "remote.turn_on";
|
||||
target = {
|
||||
entity_id = "remote.living_room";
|
||||
};
|
||||
}
|
||||
{
|
||||
delay = 120;
|
||||
}
|
||||
{
|
||||
service = "automation.turn_on";
|
||||
target = {
|
||||
entity_id = "automation.all_multimedia_off"; # Replace with your target automation ID
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [
|
||||
{
|
||||
condition = "state";
|
||||
entity_id = "media_player.android_tv_metz_cloonar_multimedia";
|
||||
state = "unavailable";
|
||||
}
|
||||
];
|
||||
sequence = [
|
||||
{
|
||||
service = "remote.send_command";
|
||||
target = {
|
||||
entity_id = "remote.rmproplus";
|
||||
};
|
||||
data = {
|
||||
num_repeats = 1;
|
||||
delay_secs = 0.4;
|
||||
hold_secs = 0;
|
||||
command = "b64:JgBOAJaSFREVNRU2FTUVERURFRAVERURFTUVNhU1FREVERUQFREVERUQFTYVNRURFREVEBURFTYVNRURFRAVNhU1FTYVNRUABfmWkhURFQANBQAAAAAAAAAAAAA=";
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [
|
||||
{
|
||||
condition = "state";
|
||||
entity_id = "media_player.android_tv_metz_cloonar_multimedia";
|
||||
state = "off";
|
||||
}
|
||||
];
|
||||
sequence = [
|
||||
{
|
||||
service = "media_player.turn_on";
|
||||
target = {
|
||||
entity_id = "media_player.android_tv_metz_cloonar_multimedia";
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
"automation steamdeck on" = {
|
||||
alias = "steamdeck on";
|
||||
trigger = {
|
||||
platform = "template";
|
||||
value_template = "{% if is_state('binary_sensor.steamdeck', 'on') and (states('sensor.steamdeck_power') | float > 5) %}true{% endif %}";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "denonavr.get_command";
|
||||
target = {
|
||||
entity_id = "media_player.marantz_sr6015";
|
||||
};
|
||||
data = {
|
||||
command = "/goform/formiPhoneAppDirect.xml?SIDVD";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation xbox on" = {
|
||||
alias = "xbox on";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = "binary_sensor.xbox";
|
||||
to = "on";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "denonavr.get_command";
|
||||
target = {
|
||||
entity_id = "media_player.marantz_sr6015";
|
||||
};
|
||||
data = {
|
||||
command = "/goform/formiPhoneAppDirect.xml?SIGAME";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation firetv on" = {
|
||||
alias = "firetv on";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = "media_player.fire_tv_firetv_living_cloonar_multimedia";
|
||||
from = "off";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "denonavr.get_command";
|
||||
target = {
|
||||
entity_id = "media_player.marantz_sr6015";
|
||||
};
|
||||
data = {
|
||||
command = "/goform/formiPhoneAppDirect.xml?SIMPLAY";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation ps5 on" = {
|
||||
alias = "ps5 on";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = "binary_sensor.ps5";
|
||||
to = "on";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "denonavr.get_command";
|
||||
target = {
|
||||
entity_id = "media_player.marantz_sr6015";
|
||||
};
|
||||
data = {
|
||||
command = "/goform/formiPhoneAppDirect.xml?SIBD";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation all multimedia off" = {
|
||||
alias = "all multimedia off";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = "binary_sensor.multimedia_device_on";
|
||||
to = "off";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "media_player.turn_off";
|
||||
target = {
|
||||
entity_id = "media_player.android_tv_metz_cloonar_multimedia";
|
||||
};
|
||||
}
|
||||
{
|
||||
service = "denonavr.get_command";
|
||||
target = {
|
||||
entity_id = "media_player.marantz_sr6015";
|
||||
};
|
||||
data = {
|
||||
command = "/goform/formiPhoneAppDirect.xml?PWSTANDBY";
|
||||
};
|
||||
}
|
||||
# silverscreen up
|
||||
{
|
||||
service = "remote.send_command";
|
||||
target = {
|
||||
entity_id = "remote.rmproplus";
|
||||
};
|
||||
data = {
|
||||
num_repeats = 2;
|
||||
delay_secs = 1;
|
||||
hold_secs = 0;
|
||||
command = "b64:sgBqAgkaBBoJCRsJHBoKGgoJGgQaCQkaBAgbGwoIHAgcGwkJGwgAARkbCRsJGwkJGgQaCgkaBAgbCRsbCQkbGwkJGgQIGxwJGwkJGxsJCRwIHBoKCBsECBsbCAQIGwkAARgbChoKGgoJGxsJCRoECBsJHBsJCRoEGgkJGwkcGgobCQkbGwkJGwkbGwoIHAkbGwkJGwkAARgbCRsJGwoIGxwJCRsJGwkbGwoIGxwIChoKGhwJGwkJHBsJCRsJGxsJCRsJHBsJCRsJAAEYGwkbCRsKCBscCQkbCRsJGxsJCRwbCQkbCRsbCRsJCRscCQgcCRocCQkbCRsbCQobCQABGBsJGwkbCQkbHAkJGwkbCRsbCQkbGwoJGwkbGwkbCQkbGwoIHAkbGwkJGgobGwkKGwkAARccCRsJGwkJHBsJCRsJGwkbGwkJGxsKCRsIHBsJGwkJGxsKCRoJGxwJCRsJGxsJChsIAAEZGwgcCRsJCRscCQkbCRsJGhwJCRscCQkaChsbCRsJCRscCQgcCRocCQkbCRsbCggcCQABGBsJGwkbCggcGwkJGwkbCRsbCggcGgoJGwkbGwkbCggcGwkJGwkbGwkJHAgcGwkJGwkAARgbChoKGgoJGhwJCRsJGwkcGgoJGxsJCRsJGxsJHAkJGxsJCRsJGhwJCRwJGhwJCRsJAAEYGwoaChsJCRsbCQkaChsJGxwJCRsbCQkbCRsbChsJCRsbCQkbCRsbCgkbCRsbCQkcCAABFwQaChsJGwkJGxsKCBwIHAgcGwkJGxsKCBwIGwQaCRsJCRwaCggcCBwbCQkbCRwaCggcCAAF3AAAAAAAAAAAAAAAAAAA";
|
||||
};
|
||||
}
|
||||
# turn off beamer
|
||||
{
|
||||
service = "remote.send_command";
|
||||
target = {
|
||||
entity_id = "remote.rmproplus";
|
||||
};
|
||||
data = {
|
||||
num_repeats = 2;
|
||||
delay_secs = 1;
|
||||
hold_secs = 0;
|
||||
command = "b64:JgDaAAABKZMUERMSExITEhMSExETEhMSExITEhMSExETNxQ2ExITEhMSEzcTNxM3ExITEhM3ExITNxMSEhITEhM3EzcTEhM3EwAFyAABKJQUERMSEhITEhMSExITEhMSEhITEhMSExITNxM3ExITEhMREzcTNxQ3EhITEhM3ExITNxMSExITEhM3EzcTEhM3EwAFyAABKJQUERMSExETEhMSExITEhMSExETEhMSExITNxM3ExITEhMREzcTOBI4ExETEhM3ExITNxMSExITEhM3EzcTEhM3E5IGAA0FAAAAAAAAAAAAAAAAAAA=";
|
||||
};
|
||||
}
|
||||
# turn off tv switch
|
||||
{
|
||||
service = "switch.turn_off";
|
||||
target = {
|
||||
entity_id = "switch.tv_switch";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation all_multimedia_on" = {
|
||||
alias = "all multimedia on";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = "binary_sensor.multimedia_device_on";
|
||||
to = "on";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "script.turn_on";
|
||||
target = {
|
||||
entity_id = "script.turn_on_tv";
|
||||
};
|
||||
}
|
||||
{
|
||||
delay = 5;
|
||||
}
|
||||
{
|
||||
service = "androidtv.adb_command";
|
||||
target = {
|
||||
entity_id = "media_player.android_tv_metz_cloonar_multimedia";
|
||||
};
|
||||
data = {
|
||||
command = "adb shell am start -a android.intent.action.VIEW -d content://android.media.tv/passthrough/com.mediatek.tvinput%2F.hdmi.HDMIInputService%2FHDMI100004";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation bedroom tv off" = {
|
||||
alias = "bedroom tv off";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = "media_player.fire_tv_firetv_bedroom_cloonar_multimedia";
|
||||
to = "off";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "media_player.turn_off";
|
||||
target = {
|
||||
entity_id = "media_player.samsung_7_series_55";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation multimedia scene switch" = {
|
||||
alias = "multimedia scene switch";
|
||||
trigger = [
|
||||
{
|
||||
platform = "event";
|
||||
event_type = "button_pressed";
|
||||
event_data = {
|
||||
id = [ 254 235 105 198 ];
|
||||
onoff = 1;
|
||||
};
|
||||
}
|
||||
{
|
||||
platform = "event";
|
||||
event_type = "ios.action_fired";
|
||||
event_data = {
|
||||
actionID = "Home Cinema";
|
||||
};
|
||||
}
|
||||
];
|
||||
action = [
|
||||
{
|
||||
choose = [
|
||||
{
|
||||
conditions = [
|
||||
{
|
||||
condition = "state";
|
||||
entity_id = "switch.tv_switch";
|
||||
state = "off";
|
||||
}
|
||||
];
|
||||
sequence = [
|
||||
{
|
||||
service = "script.turn_on";
|
||||
target = {
|
||||
entity_id = "script.turn_on_tv";
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [
|
||||
{
|
||||
condition = "or";
|
||||
conditions = [
|
||||
{
|
||||
condition = "state";
|
||||
entity_id = "media_player.android_tv_metz_cloonar_multimedia";
|
||||
state = "on";
|
||||
}
|
||||
{
|
||||
condition = "state";
|
||||
entity_id = "media_player.android_tv_metz_cloonar_multimedia";
|
||||
state = "idle";
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
sequence = [
|
||||
# silver screen down
|
||||
{
|
||||
service = "remote.send_command";
|
||||
target = {
|
||||
entity_id = "remote.rmproplus";
|
||||
};
|
||||
data = {
|
||||
num_repeats = 2;
|
||||
delay_secs = 1;
|
||||
hold_secs = 0;
|
||||
command = "b64:sQs0AB0JCxsLGx0IHQgLGh0ICxoLGx0JCxodCQobCxoLAAEXHQgdCR0JCxodCQsbCxsLGx0JCxoAAAAA";
|
||||
};
|
||||
}
|
||||
# turn on beamer
|
||||
{
|
||||
service = "remote.send_command";
|
||||
target = {
|
||||
entity_id = "remote.rmproplus";
|
||||
};
|
||||
data = {
|
||||
num_repeats = 1;
|
||||
delay_secs = 0.4;
|
||||
hold_secs = 0;
|
||||
command = "b64:JgAgAQABKZMUERMSExETEhMSExITEhMSExETEhMSExITNxM3ExITERM3EzgSOBM3ExETEhM3ExITEhMSExITERM3EzcTEhM3EwAFyAABKZMTEhMRExITEhMSExITEhMRExITEhMSExITNxM3ExITERM3EzcTNxM3ExITEhM3ExITEhMSExETEhM3EzcTEhM3EwAFyAABKZMUERMRExITEhMSExITERMSExITEhMSExITNxM3ExISEhM3EzcTNxM3ExITEhM3ExITEhMSExETEhM3EzcTEhM3EwAFxwABKZQUERMRFBETEhMSExITEhISExITEhMSExITNxM3ExITERM3EzcTNxM3FBETEhM3ExITEhMSExITERM3EzcTEhM3EwANBQAAAAAAAAAA";
|
||||
};
|
||||
}
|
||||
{
|
||||
service = "media_player.turn_off";
|
||||
target = {
|
||||
entity_id = "media_player.android_tv_metz_cloonar_multimedia";
|
||||
};
|
||||
}
|
||||
{
|
||||
service = "media_player.turn_on";
|
||||
target = {
|
||||
entity_id = "media_player.marantz_sr6015";
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [
|
||||
{
|
||||
condition = "or";
|
||||
conditions = [
|
||||
{
|
||||
condition = "state";
|
||||
entity_id = "media_player.android_tv_metz_cloonar_multimedia";
|
||||
state = "off";
|
||||
}
|
||||
{
|
||||
condition = "state";
|
||||
entity_id = "media_player.android_tv_metz_cloonar_multimedia";
|
||||
state = "unavailable";
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
sequence = [
|
||||
{
|
||||
service = "remote.send_command";
|
||||
target = {
|
||||
entity_id = "remote.rmproplus";
|
||||
};
|
||||
data = {
|
||||
num_repeats = 2;
|
||||
delay_secs = 1;
|
||||
hold_secs = 0;
|
||||
command = "b64:sgBqAgkaBBoJCRsJHBoKGgoJGgQaCQkaBAgbGwoIHAgcGwkJGwgAARkbCRsJGwkJGgQaCgkaBAgbCRsbCQkbGwkJGgQIGxwJGwkJGxsJCRwIHBoKCBsECBsbCAQIGwkAARgbChoKGgoJGxsJCRoECBsJHBsJCRoEGgkJGwkcGgobCQkbGwkJGwkbGwoIHAkbGwkJGwkAARgbCRsJGwoIGxwJCRsJGwkbGwoIGxwIChoKGhwJGwkJHBsJCRsJGxsJCRsJHBsJCRsJAAEYGwkbCRsKCBscCQkbCRsJGxsJCRwbCQkbCRsbCRsJCRscCQgcCRocCQkbCRsbCQobCQABGBsJGwkbCQkbHAkJGwkbCRsbCQkbGwoJGwkbGwkbCQkbGwoIHAkbGwkJGgobGwkKGwkAARccCRsJGwkJHBsJCRsJGwkbGwkJGxsKCRsIHBsJGwkJGxsKCRoJGxwJCRsJGxsJChsIAAEZGwgcCRsJCRscCQkbCRsJGhwJCRscCQkaChsbCRsJCRscCQgcCRocCQkbCRsbCggcCQABGBsJGwkbCggcGwkJGwkbCRsbCggcGgoJGwkbGwkbCggcGwkJGwkbGwkJHAgcGwkJGwkAARgbChoKGgoJGhwJCRsJGwkcGgoJGxsJCRsJGxsJHAkJGxsJCRsJGhwJCRwJGhwJCRsJAAEYGwoaChsJCRsbCQkaChsJGxwJCRsbCQkbCRsbChsJCRsbCQkbCRsbCgkbCRsbCQkcCAABFwQaChsJGwkJGxsKCBwIHAgcGwkJGxsKCBwIGwQaCRsJCRwaCggcCBwbCQkbCRwaCggcCAAF3AAAAAAAAAAAAAAAAAAA";
|
||||
};
|
||||
}
|
||||
# turn off beamer
|
||||
{
|
||||
service = "remote.send_command";
|
||||
target = {
|
||||
entity_id = "remote.rmproplus";
|
||||
};
|
||||
data = {
|
||||
num_repeats = 2;
|
||||
delay_secs = 1;
|
||||
hold_secs = 0;
|
||||
command = "b64:JgDaAAABKZMUERMSExITEhMSExETEhMSExITEhMSExETNxQ2ExITEhMSEzcTNxM3ExITEhM3ExITNxMSEhITEhM3EzcTEhM3EwAFyAABKJQUERMSEhITEhMSExITEhMSEhITEhMSExITNxM3ExITEhMREzcTNxQ3EhITEhM3ExITNxMSExITEhM3EzcTEhM3EwAFyAABKJQUERMSExETEhMSExITEhMSExETEhMSExITNxM3ExITEhMREzcTOBI4ExETEhM3ExITNxMSExITEhM3EzcTEhM3E5IGAA0FAAAAAAAAAAAAAAAAAAA=";
|
||||
};
|
||||
}
|
||||
{
|
||||
service = "script.turn_on";
|
||||
target = {
|
||||
entity_id = "script.turn_on_tv";
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
delay = 5;
|
||||
}
|
||||
{
|
||||
service = "androidtv.adb_command";
|
||||
target = {
|
||||
entity_id = "media_player.android_tv_metz_cloonar_multimedia";
|
||||
};
|
||||
data = {
|
||||
command = "adb shell am start -a android.intent.action.VIEW -d content://android.media.tv/passthrough/com.mediatek.tvinput%2F.hdmi.HDMIInputService%2FHDMI100004";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
22
hosts/fw-new/modules/home-assistant/music.nix
Normal file
22
hosts/fw-new/modules/home-assistant/music.nix
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
services.home-assistant.config = {
|
||||
"automation toilet music" = {
|
||||
alias = "toilet music";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = "light.toilett_lights";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "media_player.volume_mute";
|
||||
target = {
|
||||
entity_id = "media_player.music_toilet_snapcast_client";
|
||||
};
|
||||
data = {
|
||||
is_volume_muted = "{{ trigger.to_state.state == 'off' }}";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
15
hosts/fw-new/modules/home-assistant/notify.nix
Normal file
15
hosts/fw-new/modules/home-assistant/notify.nix
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
services.home-assistant.config = {
|
||||
notify = [
|
||||
{
|
||||
name = "NotificationGroup";
|
||||
platform = "group";
|
||||
services = [
|
||||
{
|
||||
service = "pushover_dominik";
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
}
|
||||
71
hosts/fw-new/modules/home-assistant/pc.nix
Normal file
71
hosts/fw-new/modules/home-assistant/pc.nix
Normal file
@@ -0,0 +1,71 @@
|
||||
{
|
||||
services.home-assistant.extraComponents = [
|
||||
"wake_on_lan"
|
||||
];
|
||||
services.home-assistant.config = {
|
||||
ios = {
|
||||
actions = [
|
||||
{
|
||||
name = "Turn on PC";
|
||||
label.text = "Turn on PC";
|
||||
icon = {
|
||||
icon = "controller";
|
||||
color = "#ffffff";
|
||||
};
|
||||
show_in_watch = true;
|
||||
}
|
||||
];
|
||||
};
|
||||
wake_on_lan = {};
|
||||
"automation pc_switch" = {
|
||||
alias = "switch pc";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "button_pressed";
|
||||
event_data = {
|
||||
id = [ 254 235 105 198 ];
|
||||
onoff = 0;
|
||||
};
|
||||
};
|
||||
action = {
|
||||
service = "script.turn_on";
|
||||
target = {
|
||||
entity_id = "script.turn_on_pc";
|
||||
};
|
||||
};
|
||||
};
|
||||
script = {
|
||||
turn_on_pc = {
|
||||
sequence = [
|
||||
{
|
||||
service = "wake_on_lan.send_magic_packet";
|
||||
data = {
|
||||
mac = "04:7c:16:d5:63:5e";
|
||||
broadcast_address = "10.42.96.5";
|
||||
broadcast_port = 9;
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
"automation turn on pc" = {
|
||||
trigger = [
|
||||
{
|
||||
platform = "event";
|
||||
event_type = "ios.action_fired";
|
||||
event_data = {
|
||||
actionID = "Turn on PC";
|
||||
};
|
||||
}
|
||||
];
|
||||
action = [
|
||||
{
|
||||
service = "script.turn_on";
|
||||
target = {
|
||||
entity_id = "script.turn_on_pc";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
43
hosts/fw-new/modules/home-assistant/power-saving.nix
Normal file
43
hosts/fw-new/modules/home-assistant/power-saving.nix
Normal file
@@ -0,0 +1,43 @@
|
||||
{
|
||||
services.home-assistant.config = {
|
||||
"automation power leave home" = {
|
||||
alias = "leave house power";
|
||||
mode = "restart";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = [
|
||||
"device_tracker.dominiks_iphone"
|
||||
];
|
||||
from = "home";
|
||||
to = "not_home";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "script.turn_on";
|
||||
target = {
|
||||
entity_id = "script.turn_off_circuits";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation arrive home power" = {
|
||||
alias = "arrive home power";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = [
|
||||
"device_tracker.dominiks_iphone"
|
||||
];
|
||||
from = "not_home";
|
||||
to = "home";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "script.turn_on";
|
||||
target = {
|
||||
entity_id = "script.turn_on_circuits";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
65
hosts/fw-new/modules/home-assistant/presense.nix
Normal file
65
hosts/fw-new/modules/home-assistant/presense.nix
Normal file
@@ -0,0 +1,65 @@
|
||||
{ pkgs, ... }:
|
||||
let
|
||||
bermuda = pkgs.callPackage ./custom-components/bermuda.nix {};
|
||||
in
|
||||
{
|
||||
services.home-assistant.extraComponents = [
|
||||
"daikin"
|
||||
"enocean"
|
||||
"private_ble_device"
|
||||
];
|
||||
|
||||
systemd.services.install-bermuda = {
|
||||
description = "Install Bermuda";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
};
|
||||
script = ''
|
||||
set -e
|
||||
BERMUDA_VERSION="v0.7.2" # Replace with the latest version
|
||||
BERMUDA_DIR="/var/lib/hass/custom_components/bermuda"
|
||||
|
||||
mkdir -p "$BERMUDA_DIR"
|
||||
${pkgs.curl}/bin/curl -L "https://github.com/agittins/bermuda/releases/download/$BERMUDA_VERSION/bermuda.zip" -o /tmp/bermuda.zip
|
||||
${pkgs.unzip}/bin/unzip -o /tmp/bermuda.zip -d "$BERMUDA_DIR"
|
||||
rm /tmp/bermuda.zip
|
||||
chown -R hass:hass "$BERMUDA_DIR"
|
||||
'';
|
||||
};
|
||||
|
||||
services.home-assistant.config = {
|
||||
"automation presense kitchen" = {
|
||||
alias = "presense kitchen";
|
||||
mode = "restart";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = [
|
||||
"sensor.presense_kitchen"
|
||||
];
|
||||
};
|
||||
action = [
|
||||
{
|
||||
choose = [
|
||||
{
|
||||
conditions = [ "{{ trigger.to_state.state == \"\" }}" ];
|
||||
sequence = [
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [ "{{ trigger.to_state.state != \"\" }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.turn_on";
|
||||
entity_id = "light.kitchen_lights";
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
sensor = [
|
||||
];
|
||||
};
|
||||
}
|
||||
16
hosts/fw-new/modules/home-assistant/pushover.nix
Normal file
16
hosts/fw-new/modules/home-assistant/pushover.nix
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
services.home-assistant.extraComponents = [
|
||||
"pushover"
|
||||
];
|
||||
|
||||
# services.home-assistant.config = {
|
||||
# notify = [
|
||||
# {
|
||||
# name = "pushover_dominik";
|
||||
# platform = "pushover";
|
||||
# api_key = "!secret pushover_dominik_api_key";
|
||||
# user_key = "!secret pushover_dominik_user_key";
|
||||
# }
|
||||
# ];
|
||||
# };
|
||||
}
|
||||
333
hosts/fw-new/modules/home-assistant/remote.nix
Normal file
333
hosts/fw-new/modules/home-assistant/remote.nix
Normal file
@@ -0,0 +1,333 @@
|
||||
let
|
||||
remote_entity = "remote.living_room";
|
||||
media_player_entity = "media_player.living_room";
|
||||
in {
|
||||
services.home-assistant.config = {
|
||||
"automation press power" = {
|
||||
alias = "steamdeck on";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "Keyboard Power";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
choose = [
|
||||
{
|
||||
conditions = [
|
||||
{
|
||||
condition = "state";
|
||||
entity_id = "switch.tv_switch";
|
||||
state = "off";
|
||||
}
|
||||
];
|
||||
sequence = [
|
||||
{
|
||||
service = "script.turn_on";
|
||||
target = {
|
||||
entity_id = "script.turn_on_tv";
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [
|
||||
{
|
||||
condition = "template";
|
||||
value_template = "{{ states('media_player.living_room') != 'off' }}";
|
||||
}
|
||||
];
|
||||
sequence = [
|
||||
{
|
||||
action = "remote.send_command";
|
||||
target = {
|
||||
entity_id = remote_entity;
|
||||
|
||||
};
|
||||
data = {
|
||||
hold_secs = 1;
|
||||
command = "home";
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation remote press up" = {
|
||||
alias = "remote press up";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "Keyboard UpArrow";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
action = "remote.send_command";
|
||||
target = {
|
||||
entity_id = remote_entity;
|
||||
};
|
||||
data = {
|
||||
command = "up";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation remote press right" = {
|
||||
alias = "remote press right";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "Keyboard RightArrow";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
action = "remote.send_command";
|
||||
target = {
|
||||
entity_id = remote_entity;
|
||||
};
|
||||
data = {
|
||||
command = "right";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation remote press down" = {
|
||||
alias = "remote press down";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "Keyboard DownArrow";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
action = "remote.send_command";
|
||||
target = {
|
||||
entity_id = remote_entity;
|
||||
};
|
||||
data = {
|
||||
command = "down";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation remote press left" = {
|
||||
alias = "remote press left";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "Keyboard LeftArrow";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
action = "remote.send_command";
|
||||
target = {
|
||||
entity_id = remote_entity;
|
||||
};
|
||||
data = {
|
||||
command = "left";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation remote press enter" = {
|
||||
alias = "remote press enter";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "Keypad ENTER";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
action = "remote.send_command";
|
||||
target = {
|
||||
entity_id = remote_entity;
|
||||
};
|
||||
data = {
|
||||
command = "select";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation remote press return" = {
|
||||
alias = "remote press return";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "7_241";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
action = "remote.send_command";
|
||||
target = {
|
||||
entity_id = remote_entity;
|
||||
};
|
||||
data = {
|
||||
command = "menu";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation remote press home" = {
|
||||
alias = "remote press home";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "AC Home";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
action = "remote.send_command";
|
||||
target = {
|
||||
entity_id = remote_entity;
|
||||
};
|
||||
data = {
|
||||
command = "home";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation remote press Menu" = {
|
||||
alias = "remote press menu";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "Menu";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
action = "remote.send_command";
|
||||
target = {
|
||||
entity_id = remote_entity;
|
||||
};
|
||||
data = {
|
||||
hold_secs = 1;
|
||||
command = "select";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation remote press program guide" = {
|
||||
alias = "remote press program guide";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "Media Select Program Guide";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "automation.trigger";
|
||||
target = {
|
||||
entity_id = "automation.multimedia_scene_switch";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation remote press 1" = {
|
||||
alias = "remote press 1";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "AC Set Clock";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "light.toggle";
|
||||
target = {
|
||||
entity_id = "light.livingroom_switch";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation remote press 2" = {
|
||||
alias = "remote press 2";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "AC View Clock";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "light.toggle";
|
||||
target = {
|
||||
entity_id = "light.4d_f7b4";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation remote press prime video" = {
|
||||
alias = "remote press prime video";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "161_0";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
action = "media_player.select_source";
|
||||
target.entity_id = media_player_entity;
|
||||
data.source = "Prime Video";
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation remote press netflix" = {
|
||||
alias = "remote press netflix";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "esphome.hid_events";
|
||||
event_data = {
|
||||
usage = "162_0";
|
||||
value = "1";
|
||||
};
|
||||
};
|
||||
action = [
|
||||
{
|
||||
action = "media_player.select_source";
|
||||
target.entity_id = media_player_entity;
|
||||
data.source = "Netflix";
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
};
|
||||
}
|
||||
27
hosts/fw-new/modules/home-assistant/roborock.nix
Normal file
27
hosts/fw-new/modules/home-assistant/roborock.nix
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
services.home-assistant.extraComponents = [
|
||||
"roborock"
|
||||
];
|
||||
|
||||
services.home-assistant.config = {
|
||||
"automation roborock" = {
|
||||
alias = "roborock";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = [
|
||||
"person.dominik"
|
||||
];
|
||||
from = "home";
|
||||
to = "not_home";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "vacuum.start";
|
||||
target = {
|
||||
device_id = "136c307ff46cd968d08e9f9d20886755";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
20
hosts/fw-new/modules/home-assistant/scene-switch.nix
Normal file
20
hosts/fw-new/modules/home-assistant/scene-switch.nix
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
services.home-assistant.config = {
|
||||
"automation scene_switch" = {
|
||||
alias = "switch scene";
|
||||
trigger = {
|
||||
platform = "event";
|
||||
event_type = "button_pressed";
|
||||
event_data = {
|
||||
id = [ 254 242 234 134 ];
|
||||
};
|
||||
};
|
||||
action = {
|
||||
service_template = "switch.turn_on";
|
||||
data_template = {
|
||||
entity_id = "switch.computer";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
106
hosts/fw-new/modules/home-assistant/shelly.nix
Normal file
106
hosts/fw-new/modules/home-assistant/shelly.nix
Normal file
@@ -0,0 +1,106 @@
|
||||
{ lib, ... }:
|
||||
let
|
||||
colorbulbs = [
|
||||
];
|
||||
|
||||
switches = [
|
||||
];
|
||||
|
||||
proswitches = [
|
||||
{ name = "Livingroom Switch"; id = "shellyplus2pm-e86beae5d5d8"; relay = "0"; }
|
||||
{ name = "Kitchen Switch"; id = "shellyplus2pm-e86beae5d5d8"; relay = "1"; }
|
||||
{ name = "Bedroom Switch"; id = "shelly1pmminig3-34b7da933fe0"; relay = "0"; }
|
||||
{ name = "Hallway Circuit"; id = "shellypro3-c8f09e894448"; relay = "0"; }
|
||||
{ name = "Bathroom Circuit"; id = "shellypro3-c8f09e894448"; relay = "1"; }
|
||||
{ name = "Kitchen Circuit"; id = "shellypro3-c8f09e894448"; relay = "2"; }
|
||||
{ name = "TV Switch"; id = "shelly1pmminig3-34b7da8b64c8"; relay = "0"; }
|
||||
];
|
||||
in {
|
||||
services.home-assistant.extraComponents = [
|
||||
"shelly"
|
||||
"esphome"
|
||||
];
|
||||
|
||||
services.home-assistant.config = {
|
||||
mqtt = {
|
||||
switch = builtins.concatLists [
|
||||
(builtins.map (switch:
|
||||
let
|
||||
unique_id = builtins.replaceStrings [" "] ["_"] switch.name;
|
||||
in {
|
||||
name = switch.name;
|
||||
unique_id = unique_id;
|
||||
state_topic = "shellies/shellyswitch25-${switch.id}/relay/${switch.relay}";
|
||||
command_topic = "shellies/shellyswitch25-${switch.id}/relay/${switch.relay}/command";
|
||||
payload_on = "on";
|
||||
payload_off = "off";
|
||||
}
|
||||
) switches)
|
||||
(builtins.map (switch:
|
||||
let
|
||||
unique_id = builtins.replaceStrings [" "] ["_"] switch.name;
|
||||
in {
|
||||
name = switch.name;
|
||||
unique_id = unique_id;
|
||||
state_topic = "shellies/${switch.id}/status/switch:${switch.relay}";
|
||||
value_template = "{{ value_json.output }}";
|
||||
state_on = true;
|
||||
state_off = false;
|
||||
command_topic = "shellies/${switch.id}/rpc";
|
||||
payload_on = "{\"id\":${switch.relay}, \"src\":\"homeassistant\", \"method\":\"Switch.Set\", \"params\":{\"id\":${switch.relay}, \"on\":true}}";
|
||||
payload_off = "{\"id\":${switch.relay}, \"src\":\"homeassistant\", \"method\":\"Switch.Set\", \"params\":{\"id\":${switch.relay}, \"on\":false}}";
|
||||
availability_topic = "shellies/${switch.id}/online";
|
||||
payload_available = "true";
|
||||
payload_not_available = "false";
|
||||
}
|
||||
) proswitches)
|
||||
];
|
||||
light = builtins.map (bulb:
|
||||
let
|
||||
unique_id = builtins.replaceStrings [" "] ["_"] bulb.name;
|
||||
in {
|
||||
name = bulb.name;
|
||||
unique_id = "${unique_id}";
|
||||
schema = "template";
|
||||
state_topic = "shellies/shellycolorbulb-${bulb.id}/color/0/status";
|
||||
state_template = "{% if value_json.ison %}on{% else %}off{% endif %}";
|
||||
command_topic = "shellies/shellycolorbulb-${bulb.id}/color/0/set";
|
||||
command_on_template = ''
|
||||
{
|
||||
"turn": "on",
|
||||
"effect": 0,
|
||||
|
||||
{%- if red is defined and green is defined and blue is defined -%}
|
||||
"mode": "color",
|
||||
"red": {{ red }},
|
||||
"green": {{ green }},
|
||||
"blue": {{ blue }},
|
||||
{%- endif -%}
|
||||
|
||||
{%- if brightness is defined -%}
|
||||
"gain": {{brightness | float | multiply(0.3922) | round(0)}},
|
||||
"brightness": {{brightness | float | multiply(0.3922) | round(0)}},
|
||||
{%- endif -%}
|
||||
|
||||
{% if color_temp is defined %}
|
||||
"mode": "white",
|
||||
"temp":{{ (1/(color_temp | float)) | multiply(1000000) | round(0) }},
|
||||
{% endif %}
|
||||
}
|
||||
'';
|
||||
command_off_template = ''
|
||||
{
|
||||
"turn": "off"
|
||||
}
|
||||
'';
|
||||
brightness_template = "{{ value_json.brightness | float | multiply(2.55) | round(0) }}";
|
||||
color_temp_template = "{{ 1000000 | multiply(1/(value_json.temp | float)) | round(0) }}";
|
||||
red_template = "{{ value_json.red }}";
|
||||
green_template = "{{ value_json.green }}";
|
||||
blue_template = "{{ value_json.blue }}";
|
||||
max_mireds = 333;
|
||||
min_mireds = 154;
|
||||
}) colorbulbs;
|
||||
};
|
||||
};
|
||||
}
|
||||
116
hosts/fw-new/modules/home-assistant/sleep.nix
Normal file
116
hosts/fw-new/modules/home-assistant/sleep.nix
Normal file
@@ -0,0 +1,116 @@
|
||||
{
|
||||
services.home-assistant.config = {
|
||||
"automation wakeup" = {
|
||||
alias = "wakeup";
|
||||
trigger = {
|
||||
platform = "time";
|
||||
at = "input_datetime.wakeup";
|
||||
};
|
||||
action = [
|
||||
{
|
||||
service = "switch.turn_on";
|
||||
entity_id = "switch.coffee";
|
||||
}
|
||||
{
|
||||
delay = 1700;
|
||||
}
|
||||
{
|
||||
service = "script.turn_on";
|
||||
target = {
|
||||
entity_id = "script.turn_on_circuits";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
"automation sleep" = {
|
||||
alias = "sleep";
|
||||
trigger = [
|
||||
{
|
||||
platform = "event";
|
||||
event_type = "shelly.click";
|
||||
event_data = {
|
||||
device = "shellybutton1-E8DB84AA196D";
|
||||
};
|
||||
}
|
||||
{
|
||||
platform = "event";
|
||||
event_type = "shelly.click";
|
||||
event_data = {
|
||||
device = "shellybutton1-E8DB84AA136D";
|
||||
};
|
||||
}
|
||||
];
|
||||
action = [
|
||||
{
|
||||
choose = [
|
||||
{
|
||||
conditions = [ "{{ trigger.event.data.click_type == \"long\" }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "script.turn_on";
|
||||
target = {
|
||||
entity_id = "script.turn_off_everything";
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
script = {
|
||||
turn_off_everything = {
|
||||
sequence = [
|
||||
{
|
||||
service = "light.turn_off";
|
||||
entity_id = "all";
|
||||
}
|
||||
{
|
||||
service = "switch.turn_off";
|
||||
entity_id = "switch.coffee";
|
||||
}
|
||||
{
|
||||
service = "switch.turn_off";
|
||||
entity_id = "switch.78_8c_b5_fe_41_62_port_2_poe";
|
||||
}
|
||||
{
|
||||
service = "switch.turn_off";
|
||||
entity_id = "switch.78_8c_b5_fe_41_62_port_3_poe";
|
||||
}
|
||||
{
|
||||
service = "switch.turn_off";
|
||||
entity_id = "switch.hallway_circuit";
|
||||
}
|
||||
# TODO: needs to stay on because phone is not loading otherwise
|
||||
# {
|
||||
# service = "switch.turn_off";
|
||||
# entity_id = "switch.bathroom_circuit";
|
||||
# }
|
||||
];
|
||||
};
|
||||
turn_on_circuits = {
|
||||
sequence = [
|
||||
{
|
||||
service = "switch.turn_on";
|
||||
entity_id = "switch.bathroom_circuit";
|
||||
}
|
||||
{
|
||||
delay = 60;
|
||||
}
|
||||
{
|
||||
service = "switch.turn_on";
|
||||
entity_id = "switch.hallway_circuit";
|
||||
}
|
||||
{
|
||||
service = "switch.turn_on";
|
||||
entity_id = "switch.78_8c_b5_fe_41_62_port_2_poe";
|
||||
}
|
||||
{
|
||||
service = "switch.turn_on";
|
||||
entity_id = "switch.78_8c_b5_fe_41_62_port_3_poe";
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
31
hosts/fw-new/modules/home-assistant/snapcast.nix
Normal file
31
hosts/fw-new/modules/home-assistant/snapcast.nix
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
services.home-assistant = {
|
||||
extraComponents = [ "snapcast" ];
|
||||
config = {
|
||||
"automation piano" = {
|
||||
alias = "piano";
|
||||
trigger = {
|
||||
platform = "state";
|
||||
entity_id = "media_player.music_piano_snapcast_client";
|
||||
attribute = "is_volume_muted";
|
||||
};
|
||||
condition = [
|
||||
{
|
||||
condition = "template";
|
||||
value_template = "{{ trigger.from_state.state != 'unavailable' }}";
|
||||
}
|
||||
{
|
||||
condition = "template";
|
||||
value_template = "{{ state_attr('media_player.music_piano_snapcast_client', 'is_volume_muted') == true or state_attr('media_player.music_piano_snapcast_client', 'is_volume_muted') == false }}";
|
||||
}
|
||||
];
|
||||
action = {
|
||||
service = "switch.turn_on";
|
||||
target = {
|
||||
entity_id = "switch.piano_switch_power";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
8
hosts/fw-new/modules/microvm.nix
Normal file
8
hosts/fw-new/modules/microvm.nix
Normal file
@@ -0,0 +1,8 @@
|
||||
{ nixpkgs, ...}:
|
||||
{
|
||||
imports = [ (builtins.fetchGit {
|
||||
url = "https://github.com/astro/microvm.nix";
|
||||
} + "/nixos-modules/host") ];
|
||||
|
||||
systemd.network.networks."31-server".matchConfig.Name = [ "vm-*" ];
|
||||
}
|
||||
59
hosts/fw-new/modules/mopidy.nix
Normal file
59
hosts/fw-new/modules/mopidy.nix
Normal file
@@ -0,0 +1,59 @@
|
||||
{ pkgs, lib, ... }:
|
||||
let
|
||||
mopidy-autoplay = pkgs.python3Packages.buildPythonApplication rec {
|
||||
pname = "Mopidy-Autoplay";
|
||||
version = "0.2.3";
|
||||
|
||||
src = pkgs.python3Packages.fetchPypi {
|
||||
inherit pname version;
|
||||
sha256 = "sha256-E2Q+Cn2LWSbfoT/gFzUfChwl67Mv17uKmX2woFz/3YM=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
pkgs.mopidy
|
||||
] ++ (with pkgs.python3Packages; [
|
||||
configobj
|
||||
]);
|
||||
|
||||
# no tests implemented
|
||||
doCheck = false;
|
||||
|
||||
meta = with lib; {
|
||||
homepage = "https://codeberg.org/sph/mopidy-autoplay";
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
services.mopidy = {
|
||||
enable = true;
|
||||
extensionPackages = [ pkgs.mopidy-iris pkgs.mopidy-tunein mopidy-autoplay ];
|
||||
configuration = ''
|
||||
[audio]
|
||||
output = audioresample ! audioconvert ! audio/x-raw,rate=48000,channels=2,format=S16LE ! wavenc ! filesink location=/run/snapserver/mopidy
|
||||
|
||||
[file]
|
||||
enabled = false
|
||||
|
||||
[autoplay]
|
||||
enabled = true
|
||||
'';
|
||||
};
|
||||
|
||||
services.nginx.virtualHosts."mopidy.cloonar.com" = {
|
||||
forceSSL = true;
|
||||
enableACME = true;
|
||||
acmeRoot = null;
|
||||
extraConfig = ''
|
||||
proxy_buffering off;
|
||||
'';
|
||||
locations."/".extraConfig = ''
|
||||
proxy_pass http://127.0.0.1:6680;
|
||||
proxy_set_header Host $host;
|
||||
proxy_redirect http:// https://;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
'';
|
||||
};
|
||||
}
|
||||
36
hosts/fw-new/modules/mosquitto.nix
Normal file
36
hosts/fw-new/modules/mosquitto.nix
Normal file
@@ -0,0 +1,36 @@
|
||||
{ config, pkgs, ... }:
|
||||
|
||||
{
|
||||
services.mosquitto = {
|
||||
enable = true;
|
||||
|
||||
listeners = [
|
||||
{
|
||||
users."espresense" = {
|
||||
password = "insecure-password";
|
||||
acl = [ "readwrite espresense/#" ];
|
||||
};
|
||||
users."home-assistant" = {
|
||||
hashedPassword = "$7$101$7uaagoQWQ3ICJ/wg$5cWZs4ae4DjToe44bOzpDopPv1kRaaVD+zF6BE64yDJH2/MBqXfD6f2/o9M/65ArhV92DAK+txXRYsEcZLl45A==";
|
||||
acl = [ "readwrite #" ];
|
||||
};
|
||||
users."ps5-mqtt" = {
|
||||
password = "insecure-password";
|
||||
acl = [ "readwrite #" ];
|
||||
};
|
||||
users."shairport-mqtt" = {
|
||||
password = "insecure-password";
|
||||
acl = [ "readwrite #" ];
|
||||
};
|
||||
users."shelly" = {
|
||||
password = "password";
|
||||
acl = [ "readwrite shellies/#" ];
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
# networking.firewall = {
|
||||
# allowedTCPPorts = [ 1883 ];
|
||||
# };
|
||||
}
|
||||
110
hosts/fw-new/modules/networking.nix
Normal file
110
hosts/fw-new/modules/networking.nix
Normal file
@@ -0,0 +1,110 @@
|
||||
{ ... }: {
|
||||
boot.kernel.sysctl = {
|
||||
# if you use ipv4, this is all you need
|
||||
"net.ipv4.conf.all.forwarding" = true;
|
||||
# If you want to use it for ipv6
|
||||
"net.ipv6.conf.all.forwarding" = false;
|
||||
};
|
||||
|
||||
systemd.network = {
|
||||
enable = true;
|
||||
wait-online.anyInterface = true;
|
||||
links = {
|
||||
"10-wan" = {
|
||||
matchConfig.PermanentMACAddress = "c0:74:2b:fd:9a:7f";
|
||||
linkConfig.Name = "wan";
|
||||
};
|
||||
};
|
||||
netdevs = {
|
||||
"30-server".netdevConfig = {
|
||||
Kind = "bridge";
|
||||
Name = "server";
|
||||
};
|
||||
};
|
||||
networks = {
|
||||
"31-server" = {
|
||||
matchConfig.Name = [ "vserver" ];
|
||||
# Attach to the bridge that was configured above
|
||||
networkConfig.Bridge = "server";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
networking = {
|
||||
useDHCP = false;
|
||||
# Define VLANS
|
||||
nameservers = [ "10.42.97.1" ];
|
||||
# resolvconf.enable = false;
|
||||
vlans = {
|
||||
lan = {
|
||||
id = 96;
|
||||
interface = "enP3p49s0";
|
||||
};
|
||||
vserver = {
|
||||
id = 97;
|
||||
interface = "enP3p49s0";
|
||||
};
|
||||
multimedia = {
|
||||
id = 98;
|
||||
interface = "enP3p49s0";
|
||||
};
|
||||
smart = {
|
||||
id = 99;
|
||||
interface = "enP3p49s0";
|
||||
};
|
||||
infrastructure = {
|
||||
id = 100;
|
||||
interface = "enP3p49s0";
|
||||
};
|
||||
guest = {
|
||||
id = 111;
|
||||
interface = "enP3p49s0";
|
||||
};
|
||||
};
|
||||
|
||||
interfaces = {
|
||||
# Don't request DHCP on the physical interfaces
|
||||
lan.useDHCP = false;
|
||||
enP3p49s0.useDHCP = false;
|
||||
|
||||
# Handle the VLANs
|
||||
wan.useDHCP = true;
|
||||
lan = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.96.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
server = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.97.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
multimedia = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.98.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
smart = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.99.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
infrastructure = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.100.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
guest = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.111.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
29
hosts/fw-new/modules/omada.nix
Normal file
29
hosts/fw-new/modules/omada.nix
Normal file
@@ -0,0 +1,29 @@
|
||||
{ config, pkgs, ... }: {
|
||||
users.users.omada = {
|
||||
isSystemUser = true;
|
||||
group = "omada";
|
||||
home = "/var/lib/omada";
|
||||
createHome = true;
|
||||
};
|
||||
users.groups.omada = { };
|
||||
users.groups.docker.members = [ "omada" ];
|
||||
|
||||
# TODO: check if we can run docker service as other user than root
|
||||
virtualisation = {
|
||||
oci-containers.containers = {
|
||||
omada = {
|
||||
autoStart = false;
|
||||
image = "mbentley/omada-controller:5.14.26.1";
|
||||
volumes = [
|
||||
"/var/lib/omada/data:/opt/tplink/EAPController/data"
|
||||
"/var/lib/omada/logs:/opt/tplink/EAPController/logs"
|
||||
];
|
||||
extraOptions = [
|
||||
"--network=server"
|
||||
"--mac-address=1a:c4:04:6e:29:bd"
|
||||
"--ip=10.42.97.2"
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
41
hosts/fw-new/modules/openconnect.nix
Normal file
41
hosts/fw-new/modules/openconnect.nix
Normal file
@@ -0,0 +1,41 @@
|
||||
{ config, pkgs, stdenv, ... }:
|
||||
let
|
||||
vpnc = pkgs.writeShellScript "vpnc" ''
|
||||
export INTERNAL_IP4_DNS=
|
||||
. ${pkgs.vpnc-scripts}/bin/vpnc-script
|
||||
'';
|
||||
in
|
||||
{
|
||||
sops.secrets.wrwks_vpn_key = {};
|
||||
|
||||
networking.openconnect.interfaces = {
|
||||
wrwks = {
|
||||
gateway = "vpn.wrwks.at";
|
||||
passwordFile = config.sops.secrets.wrwks_vpn_key.path;
|
||||
protocol = "anyconnect";
|
||||
user = "exdpolakovics@wrwks.local";
|
||||
extraOptions = {
|
||||
authgroup = "WRWKS-SSL-VPN-Service";
|
||||
script = "${vpnc}";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
systemd.services.openconnect-wrwks-keepalive = {
|
||||
serviceConfig.Type = "oneshot";
|
||||
path = with pkgs; [ bash inetutils ];
|
||||
script = ''
|
||||
ping -c 2 stage.wsw.at
|
||||
'';
|
||||
};
|
||||
|
||||
systemd.timers.openconnect-wrwks-keepalive = {
|
||||
wantedBy = [ "timers.target" ];
|
||||
partOf = [ "openconnect-wrwks-keepalive.service" ];
|
||||
timerConfig = {
|
||||
OnCalendar = "*:0/1";
|
||||
Unit = "openconnect-wrwks-keepalive.service";
|
||||
};
|
||||
};
|
||||
}
|
||||
44
hosts/fw-new/modules/palworld.nix
Normal file
44
hosts/fw-new/modules/palworld.nix
Normal file
@@ -0,0 +1,44 @@
|
||||
{ config, pkgs, ... }:
|
||||
|
||||
{
|
||||
virtualisation.oci-containers.backend = "podman";
|
||||
virtualisation.oci-containers.containers = {
|
||||
palworld = {
|
||||
image = "thijsvanloef/palworld-server-docker:latest";
|
||||
autoStart = false;
|
||||
ports = [
|
||||
"8211:8211/udp"
|
||||
"27015:27015/udp"
|
||||
];
|
||||
environmentFiles = [
|
||||
config.sops.secrets.palworld.path
|
||||
];
|
||||
volumes = [
|
||||
"/var/lib/palworld/:/palworld/"
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
systemd.timers."restart-palworld" = {
|
||||
wantedBy = [ "timers.target" ];
|
||||
timerConfig = {
|
||||
OnCalendar = "*-*-* 3:00:00";
|
||||
Unit = "restart-palworld.service";
|
||||
};
|
||||
};
|
||||
|
||||
systemd.services."restart-palworld" = {
|
||||
script = ''
|
||||
set -eu
|
||||
if ${pkgs.systemd}/bin/systemctl is-active --quiet podman-palworld.service; then
|
||||
${pkgs.systemd}/bin/systemctl restart podman-palworld.service
|
||||
fi
|
||||
'';
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
User = "root";
|
||||
};
|
||||
};
|
||||
|
||||
sops.secrets.palworld = {};
|
||||
}
|
||||
77
hosts/fw-new/modules/podman.nix
Normal file
77
hosts/fw-new/modules/podman.nix
Normal file
@@ -0,0 +1,77 @@
|
||||
{ pkgs, ... }:
|
||||
let
|
||||
cids = import ../modules/staticids.nix;
|
||||
json = pkgs.formats.json { };
|
||||
|
||||
update-containers = pkgs.writeShellScriptBin "update-containers" ''
|
||||
SUDO=""
|
||||
if [[ $(id -u) -ne 0 ]]; then
|
||||
SUDO="sudo"
|
||||
fi
|
||||
|
||||
images=$($SUDO ${pkgs.podman}/bin/podman ps -a --format="{{.Image}}" | sort -u)
|
||||
|
||||
for image in $images
|
||||
do
|
||||
$SUDO ${pkgs.podman}/bin/podman pull $image
|
||||
done
|
||||
'';
|
||||
in {
|
||||
users.groups.podman.gid = cids.gids.podman;
|
||||
virtualisation = {
|
||||
# containers.containersConf.settings = {
|
||||
# containers.dns_servers = [ "10.42.97.1" ];
|
||||
# };
|
||||
podman = {
|
||||
enable = true;
|
||||
dockerCompat = true;
|
||||
# defaultNetwork.settings = {
|
||||
# dns_enabled = true; # Enable DNS resolution in the podman network.
|
||||
# };
|
||||
};
|
||||
};
|
||||
|
||||
environment.etc."containers/networks/server.json" = {
|
||||
source = json.generate "server.json" ({
|
||||
name = "server";
|
||||
id = "d3a55d6bcc28571c124b4e65cdf1831339045d296858f79e7130fa70da9c0904";
|
||||
driver = "bridge";
|
||||
network_interface = "server";
|
||||
ipv6_enabled = false;
|
||||
internal = false;
|
||||
dns_enabled = false;
|
||||
subnets = [
|
||||
{
|
||||
subnet = "10.42.97.0/24";
|
||||
gateway = "10.42.97.1";
|
||||
}
|
||||
];
|
||||
ipam_options = {
|
||||
driver = "host-local";
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
systemd.timers = {
|
||||
# ...
|
||||
updatecontainers = {
|
||||
timerConfig = {
|
||||
Unit = "updatecontainers.service";
|
||||
OnCalendar = "02:00";
|
||||
};
|
||||
wantedBy = [ "timers.target" ];
|
||||
};
|
||||
# ...
|
||||
};
|
||||
|
||||
systemd.services = {
|
||||
# ...
|
||||
updatecontainers = {
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
ExecStart = "${update-containers}/bin/update-containers";
|
||||
};
|
||||
};
|
||||
# ...
|
||||
};
|
||||
}
|
||||
20
hosts/fw-new/modules/postgresql.nix
Normal file
20
hosts/fw-new/modules/postgresql.nix
Normal file
@@ -0,0 +1,20 @@
|
||||
{ config, pkgs, ... }:
|
||||
|
||||
{
|
||||
services.postgresql = {
|
||||
enable = true;
|
||||
ensureDatabases = [ "mydatabase" ];
|
||||
|
||||
identMap = ''
|
||||
# ArbitraryMapName systemUser DBUser
|
||||
superuser_map root postgres
|
||||
superuser_map postgres postgres
|
||||
# Let other names login as themselves
|
||||
superuser_map /^(.*)$ \1
|
||||
'';
|
||||
authentication = pkgs.lib.mkOverride 10 ''
|
||||
#type database DBuser auth-method optional_ident_map
|
||||
local sameuser all peer map=superuser_map
|
||||
'';
|
||||
};
|
||||
};
|
||||
58
hosts/fw-new/modules/setupnetwork.nix
Normal file
58
hosts/fw-new/modules/setupnetwork.nix
Normal file
@@ -0,0 +1,58 @@
|
||||
{ ... }: {
|
||||
networking = {
|
||||
vlans = {
|
||||
setup = {
|
||||
id = 110;
|
||||
interface = "enp5s0";
|
||||
};
|
||||
};
|
||||
|
||||
interfaces = {
|
||||
setup = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.110.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
services.kea.dhcp4 = {
|
||||
settings = {
|
||||
interfaces-config = {
|
||||
interfaces = [
|
||||
"setup"
|
||||
];
|
||||
};
|
||||
subnet4 = [
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.110.100 - 10.42.110.240";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.110.0/24";
|
||||
interface = "setup";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.110.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name";
|
||||
data = "cloonar.com";
|
||||
}
|
||||
{
|
||||
name = "domain-search";
|
||||
data = "cloonar.com";
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
data = "10.42.97.1";
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
133
hosts/fw-new/modules/snapserver.nix
Normal file
133
hosts/fw-new/modules/snapserver.nix
Normal file
@@ -0,0 +1,133 @@
|
||||
{ pkgs, config, python3Packages, ... }:
|
||||
let
|
||||
domain = "snapcast.cloonar.com";
|
||||
|
||||
snapweb = pkgs.stdenv.mkDerivation {
|
||||
pname = "snapweb";
|
||||
version = "0.8";
|
||||
|
||||
src = pkgs.fetchzip {
|
||||
url = "https://github.com/badaix/snapweb/releases/download/v0.8.0/snapweb.zip";
|
||||
sha256 = "sha256-IpT1pcuzcM8kqWJUX3xxpRQHlfPNsrwhemLmY0PyzjI=";
|
||||
stripRoot = false;
|
||||
};
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
cp -r $src/* $out/
|
||||
'';
|
||||
};
|
||||
in
|
||||
{
|
||||
security.acme.certs."${domain}" = {
|
||||
group = "nginx";
|
||||
};
|
||||
|
||||
containers.snapcast = {
|
||||
autoStart = true;
|
||||
ephemeral = false; # because of ssh key
|
||||
privateNetwork = true;
|
||||
hostBridge = "server";
|
||||
hostAddress = "10.42.97.1";
|
||||
localAddress = "10.42.97.21/24";
|
||||
bindMounts = {
|
||||
"/var/lib/acme/snapcast/" = {
|
||||
hostPath = "${config.security.acme.certs.${domain}.directory}";
|
||||
isReadOnly = true;
|
||||
};
|
||||
};
|
||||
config = { lib, config, pkgs, python3Packages, ... }:
|
||||
let
|
||||
shairport-sync = pkgs.shairport-sync.overrideAttrs (_: {
|
||||
configureFlags = [
|
||||
"--with-alsa" "--with-pipe" "--with-pa" "--with-stdout"
|
||||
"--with-avahi" "--with-ssl=openssl" "--with-soxr"
|
||||
"--without-configfiles"
|
||||
"--sysconfdir=/etc"
|
||||
"--with-metadata"
|
||||
];
|
||||
});
|
||||
in
|
||||
{
|
||||
networking = {
|
||||
hostName = "snapcast";
|
||||
useHostResolvConf = false;
|
||||
defaultGateway = {
|
||||
address = "10.42.96.1";
|
||||
interface = "eth0";
|
||||
};
|
||||
nameservers = [ "10.42.97.1" ];
|
||||
firewall.enable = false;
|
||||
};
|
||||
environment.etc = {
|
||||
# Creates /etc/nanorc
|
||||
shairport = {
|
||||
text = ''
|
||||
whatever you want to put in the file goes here.
|
||||
metadata =
|
||||
{
|
||||
enabled = "yes"; // set this to yes to get Shairport Sync to solicit metadata from the source and to pass it on via a pipe
|
||||
include_cover_art = "yes"; // set to "yes" to get Shairport Sync to solicit cover art from the source and pass it via the pipe. You must also set "enabled" to "yes".
|
||||
cover_art_cache_directory = "/tmp/shairport-sync/.cache/coverart"; // artwork will be stored in this directory if the dbus or MPRIS interfaces are enabled or if the MQTT client is in use. Set it to "" to prevent caching, which may be useful on some systems
|
||||
pipe_name = "/tmp/shairport-sync-metadata";
|
||||
pipe_timeout = 5000; // wait for this number of milliseconds for a blocked pipe to unblock before giving up
|
||||
};
|
||||
'';
|
||||
|
||||
# The UNIX file mode bits
|
||||
mode = "0440";
|
||||
};
|
||||
};
|
||||
|
||||
services.snapserver = {
|
||||
enable = true;
|
||||
codec = "flac";
|
||||
http.enable = true;
|
||||
http.docRoot = "${snapweb}/";
|
||||
streams.mopidy = {
|
||||
type = "pipe";
|
||||
location = "/run/snapserver/mopidy";
|
||||
};
|
||||
streams.airplay = {
|
||||
type = "airplay";
|
||||
location = "${shairport-sync}/bin/shairport-sync";
|
||||
query = {
|
||||
devicename = "Multi Room New";
|
||||
port = "5000";
|
||||
params = "--mdns=avahi";
|
||||
};
|
||||
};
|
||||
streams.mixed = {
|
||||
type = "meta";
|
||||
location = "/airplay/mopidy";
|
||||
};
|
||||
};
|
||||
|
||||
services.avahi.enable = true;
|
||||
services.avahi.publish.enable = true;
|
||||
services.avahi.publish.userServices = true;
|
||||
|
||||
services.nginx.enable = true;
|
||||
services.nginx.virtualHosts."snapcast.cloonar.com" = {
|
||||
sslCertificate = "/var/lib/acme/snapcast/fullchain.pem";
|
||||
sslCertificateKey = "/var/lib/acme/snapcast/key.pem";
|
||||
sslTrustedCertificate = "/var/lib/acme/snapcast/chain.pem";
|
||||
forceSSL = true;
|
||||
extraConfig = ''
|
||||
proxy_buffering off;
|
||||
'';
|
||||
locations."/".extraConfig = ''
|
||||
proxy_pass http://127.0.0.1:1780;
|
||||
proxy_set_header Host $host;
|
||||
proxy_redirect http:// https://;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
'';
|
||||
};
|
||||
|
||||
system.stateVersion = "23.05";
|
||||
};
|
||||
};
|
||||
}
|
||||
16
hosts/fw-new/modules/staticids.nix
Normal file
16
hosts/fw-new/modules/staticids.nix
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
uids = {
|
||||
unbound = 10001;
|
||||
gitea = 10002;
|
||||
gitea-runner = 10003;
|
||||
podman = 10004;
|
||||
foundry-vtt = 10005;
|
||||
};
|
||||
gids = {
|
||||
unbound = 10001;
|
||||
gitea = 10002;
|
||||
gitea-runner = 10003;
|
||||
podman = 10004;
|
||||
foundry-vtt = 10005;
|
||||
};
|
||||
}
|
||||
58
hosts/fw-new/modules/sysbox.nix
Normal file
58
hosts/fw-new/modules/sysbox.nix
Normal file
@@ -0,0 +1,58 @@
|
||||
{ pkgs, ... }:
|
||||
{
|
||||
systemd.services.sysbox = {
|
||||
description = "Sysbox container runtime";
|
||||
documentation = [ "https://github.com/nestybox/sysbox" ];
|
||||
bindsTo = [ "sysbox-mgr.service" "sysbox-fs.service" ];
|
||||
after = [ "sysbox-mgr.service" "sysbox-fs.service" ];
|
||||
before = [ "docker.service" "containerd.service" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
serviceConfig = {
|
||||
Type = "exec";
|
||||
ExecStart = ''
|
||||
/bin/sh -c "${pkgs.sysbox}/bin/sysbox-runc --version && ${pkgs.sysbox}/bin/sysbox-mgr --version && ${pkgs.sysbox}/bin/sysbox-fs --version && ${pkgs.coreutils-full}/bin/sleep infinity"
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
systemd.services.sysbox-fs = {
|
||||
description = "sysbox-fs (part of the Sysbox container runtime)";
|
||||
partOf = [ "sysbox.service" ];
|
||||
after = [ "sysbox-mgr.service" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
serviceConfig = {
|
||||
Type = "notify";
|
||||
ExecStart = "${pkgs.sysbox}/bin/sysbox-fs";
|
||||
TimeoutStartSec = 10;
|
||||
TimeoutStopSec = 10;
|
||||
StartLimitInterval = 0;
|
||||
NotifyAccess = "main";
|
||||
OOMScoreAdjust = -500;
|
||||
# The number of files opened by sysbox-fs is a function of the number of
|
||||
# containers and the workloads within them. Thus we set the limit to
|
||||
# infinite so to prevent "too many open files" errors.
|
||||
LimitNOFILE = "infinity";
|
||||
LimitNPROC = "infinity";
|
||||
};
|
||||
};
|
||||
|
||||
systemd.services.sysbox-mgr = {
|
||||
description = "sysbox-mgr (part of the Sysbox container runtime)";
|
||||
partOf = [ "sysbox.service" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
serviceConfig = {
|
||||
Type = "notify";
|
||||
ExecStart = "${pkgs.sysbox}/bin/sysbox-mgr";
|
||||
TimeoutStartSec = 45;
|
||||
TimeoutStopSec = 90;
|
||||
StartLimitInterval = 0;
|
||||
NotifyAccess = "main";
|
||||
OOMScoreAdjust = -500;
|
||||
# The number of files opened by sysbox-fs is a function of the number of
|
||||
# containers and the workloads within them. Thus we set the limit to
|
||||
# infinite so to prevent "too many open files" errors.
|
||||
LimitNOFILE = "infinity";
|
||||
LimitNPROC = "infinity";
|
||||
};
|
||||
};
|
||||
}
|
||||
330
hosts/fw-new/modules/unbound.nix
Normal file
330
hosts/fw-new/modules/unbound.nix
Normal file
@@ -0,0 +1,330 @@
|
||||
{ config, pkgs, ... }:
|
||||
let
|
||||
cids = import ../modules/staticids.nix;
|
||||
domain = "ns.cloonar.com";
|
||||
|
||||
adblockLocalZones = pkgs.stdenv.mkDerivation {
|
||||
name = "unbound-zones-adblock";
|
||||
|
||||
src = (pkgs.fetchFromGitHub {
|
||||
owner = "StevenBlack";
|
||||
repo = "hosts";
|
||||
rev = "3.0.0";
|
||||
sha256 = "01g6pc9s1ah2w1cbf6bvi424762hkbpbgja9585a0w99cq0n6bxv";
|
||||
} + "/hosts");
|
||||
|
||||
phases = [ "installPhase" ];
|
||||
|
||||
installPhase = ''
|
||||
${pkgs.gawk}/bin/awk '{sub(/\r$/,"")} {sub(/^127\.0\.0\.1/,"0.0.0.0")} BEGIN { OFS = "" } NF == 2 && $1 == "0.0.0.0" { print "local-zone: \"", $2, "\" static"}' $src | tr '[:upper:]' '[:lower:]' | sort -u > $out
|
||||
'';
|
||||
|
||||
};
|
||||
cfg = {
|
||||
remote-control.control-enable = true;
|
||||
server = {
|
||||
include = [
|
||||
"\"${adblockLocalZones}\""
|
||||
];
|
||||
interface = [ "0.0.0.0" "::0" ];
|
||||
interface-automatic = "yes";
|
||||
access-control = [
|
||||
"127.0.0.0/8 allow"
|
||||
"10.42.96.0/24 allow"
|
||||
"10.42.97.0/24 allow"
|
||||
"10.42.98.0/24 allow"
|
||||
"10.42.99.0/24 allow"
|
||||
"10.42.101.0/24 allow"
|
||||
"0.0.0.0/0 allow"
|
||||
];
|
||||
tls-cert-bundle = "/etc/ssl/certs/ca-certificates.crt";
|
||||
local-zone = "\"cloonar.com\" transparent";
|
||||
local-data = [
|
||||
"\"localhost A 127.0.0.1\""
|
||||
"\"localhost.cloonar.com A 127.0.0.1\""
|
||||
"\"localhost AAAA ::1\""
|
||||
"\"localhost.cloonar.com AAAA ::1\""
|
||||
"\"fw.cloonar.com A 10.42.97.1\""
|
||||
"\"fw A 10.42.97.1\""
|
||||
|
||||
"\"pc.cloonar.com IN A 10.42.96.5\""
|
||||
"\"omada.cloonar.com IN A 10.42.97.2\""
|
||||
"\"switch.cloonar.com IN A 10.42.97.10\""
|
||||
"\"mopidy.cloonar.com IN A 10.42.97.21\""
|
||||
"\"deconz.cloonar.com IN A 10.42.97.22\""
|
||||
"\"brn30055c566237.cloonar.com IN A 10.42.96.100\""
|
||||
"\"snapcast.cloonar.com IN A 10.42.97.21\""
|
||||
"\"home-assistant.cloonar.com IN A 10.42.97.20\""
|
||||
"\"web-02.cloonar.com IN A 10.42.97.5\""
|
||||
"\"matrix.cloonar.com IN A 10.42.97.5\""
|
||||
"\"element.cloonar.com IN A 10.42.97.5\""
|
||||
"\"support.cloonar.com IN A 10.42.97.5\""
|
||||
"\"git.cloonar.com IN A 10.42.97.50\""
|
||||
"\"sync.cloonar.com IN A 10.42.97.51\""
|
||||
|
||||
"\"feeds.cloonar.com IN A 188.34.191.144\""
|
||||
# "\"paraclub.cloonar.dev IN A 49.12.244.139\""
|
||||
# "\"api.paraclub.cloonar.dev IN A 49.12.244.139\""
|
||||
# "\"module.paraclub.cloonar.dev IN A 49.12.244.139\""
|
||||
# "\"tandem.paraclub.cloonar.dev IN A 49.12.244.139\""
|
||||
|
||||
"\"stage.wsw.at IN A 10.254.235.22\""
|
||||
"\"prod.wsw.at IN A 10.254.217.23\""
|
||||
"\"piwik.wohnservice-wien.at IN A 10.254.240.109\""
|
||||
"\"wohnservice-wien.at IN A 10.254.240.109\""
|
||||
"\"mieterhilfe.at IN A 10.254.240.109\""
|
||||
"\"wohnpartner-wien.at IN A 10.254.240.109\""
|
||||
"\"new.wohnberatung-wien.at IN A 10.254.240.109\""
|
||||
"\"new.wohnpartner-wien.at IN A 10.254.240.109\""
|
||||
"\"wohnberatung-wien.at IN A 10.254.240.109\""
|
||||
"\"wienbautvor.at IN A 10.254.240.109\""
|
||||
"\"wienwohntbesser.at IN A 10.254.240.109\""
|
||||
"\"b.wohnservice-wien.at IN A 10.254.240.109\""
|
||||
"\"b.mieterhilfe.at IN A 10.254.240.109\""
|
||||
"\"b.wohnpartner-wien.at IN A 10.254.240.109\""
|
||||
"\"b.wohnberatung-wien.at IN A 10.254.240.109\""
|
||||
"\"b.wienbautvor.at IN A 10.254.240.109\""
|
||||
"\"b.wienwohntbesser.at IN A 10.254.240.109\""
|
||||
"\"a.wohnservice-wien.at IN A 10.254.240.109\""
|
||||
"\"a.wohnpartner-wien.at IN A 10.254.240.109\""
|
||||
"\"a.stage.wohnservice-wien.at IN A 10.254.240.110\""
|
||||
"\"a.stage.mieterhilfe.at IN A 10.254.240.110\""
|
||||
"\"a.stage.wohnpartner-wien.at IN A 10.254.240.110\""
|
||||
"\"a.stage.wohnberatung-wien.at IN A 10.254.240.110\""
|
||||
"\"a.stage.wienbautvor.at IN A 10.254.240.110\""
|
||||
"\"a.stage.wienwohntbesser.at IN A 10.254.240.110\""
|
||||
"\"b.stage.wohnservice-wien.at IN A 10.254.240.110\""
|
||||
"\"b.stage.mieterhilfe.at IN A 10.254.240.110\""
|
||||
"\"b.stage.wohnpartner-wien.at IN A 10.254.240.110\""
|
||||
"\"b.stage.new.wohnberatung-wien.at IN A 10.254.240.110\""
|
||||
"\"b.stage.new.wohnpartner-wien.at IN A 10.254.240.110\""
|
||||
"\"b.stage.wohnberatung-wien.at IN A 10.254.240.110\""
|
||||
"\"b.stage.wienbautvor.at IN A 10.254.240.110\""
|
||||
"\"b.stage.wienwohntbesser.at IN A 10.254.240.110\""
|
||||
"\"upgrade-staging.wohnservice-wien.at IN A 10.254.240.110\""
|
||||
"\"upgrade-staging.mieterhilfe.at IN A 10.254.240.110\""
|
||||
"\"upgrade-staging.wohnpartner-wien.at IN A 10.254.240.110\""
|
||||
"\"upgrade-staging.wohnberatung-wien.at IN A 10.254.240.110\""
|
||||
"\"upgrade-staging.wienbautvor.at IN A 10.254.240.110\""
|
||||
"\"upgrade-staging.wienwohntbesser.at IN A 10.254.240.110\""
|
||||
"\"conf.wrwks.at IN A 10.254.240.105\""
|
||||
|
||||
"\"web.hilgenberg-gmbh.de IN A 91.107.197.169\""
|
||||
|
||||
# gaming
|
||||
"\"foundry-vtt.cloonar.com IN A 10.42.97.5\""
|
||||
|
||||
"\"deconz.cloonar.multimedia IN A 10.42.97.22\""
|
||||
"\"metz.cloonar.multimedia IN A 10.42.99.10\""
|
||||
# "\"ps5.cloonar.multimedia IN A 10.42.99.12\""
|
||||
"\"xbox.cloonar.multimedia IN A 10.42.99.13\""
|
||||
# "\"switch.cloonar.multimedia IN A 10.42.99.14\""
|
||||
#living room
|
||||
"\"shellyuni-livingroom-1.cloonar.smart IN A 10.42.100.8\""
|
||||
"\"shellyswitch25-livingroom-1.cloonar.smart IN A 10.42.100.9\""
|
||||
"\"shellyplug-s-living-1.cloonar.smart IN A 10.42.100.10\""
|
||||
"\"shellyplug-s-living-2.cloonar.smart IN A 10.42.100.11\""
|
||||
# kitchen
|
||||
"\"shellyplug-s-kitchen-1.cloonar.smart IN A 10.42.100.17\""
|
||||
"\"shellyrgbw2-kitchen-1.cloonar.smart IN A 10.42.100.18\""
|
||||
#bedroom
|
||||
"\"shelly1-bedroom-1.cloonar.smart IN A 10.42.100.33\""
|
||||
"\"shellybutton1-bedroom-1.cloonar.smart IN A 10.42.100.34\""
|
||||
"\"shellybutton1-bedroom-2.cloonar.smart IN A 10.42.100.35\"" # todo
|
||||
"\"shellyrgbw2-bedroom-1.cloonar.smart IN A 10.42.100.36\""
|
||||
"\"shellyrgbw2-bedroom-2.cloonar.smart IN A 10.42.100.37\""
|
||||
"\"shellyrgbw2-bedroom-3.cloonar.smart IN A 10.42.100.38\""
|
||||
# bath
|
||||
"\"shellyswitch25-bath-1.cloonar.smart IN A 10.42.100.49\""
|
||||
"\"shelly1pm-bath-1.cloonar.smart IN A 10.42.100.52\""
|
||||
"\"shellyht-bath-1.cloonar.smart IN A 10.42.100.53\"" # todo
|
||||
# hallway
|
||||
"\"shelly1-hallway-1.cloonar.smart IN A 10.42.100.65\""
|
||||
"\"shellyem3.cloonar.smart IN A 10.42.100.70\""
|
||||
"\"shellypro-1.cloonar.smart IN A 10.42.100.71\""
|
||||
"\"shellypro-2.cloonar.smart IN A 10.42.100.72\""
|
||||
# toilet
|
||||
"\"shelly1-toilet-1.cloonar.smart IN A 10.42.100.81\""
|
||||
"\"shellybulbduo-toilet-1.cloonar.smart IN A 10.42.100.82\""
|
||||
# storage
|
||||
"\"shelly1-storage-1.cloonar.smart IN A 10.42.100.97\""
|
||||
"\"shellyplug-storage-1.cloonar.smart IN A 10.42.100.98\""
|
||||
"\"brn30055c566237.cloonar.multimedia IN A 10.42.99.100\""
|
||||
|
||||
"\"ddl-warez.to IN A 172.67.184.30\""
|
||||
"\"cdnjs.cloudflare.com IN A 104.17.24.14\""
|
||||
];
|
||||
local-data-ptr = [
|
||||
"\"127.0.0.1 localhost\""
|
||||
"\"::1 localhost\""
|
||||
"\"10.42.97.1 fw.cloonar.com\""
|
||||
"\"10.42.97.20 home-assistant.cloonar.com\""
|
||||
"\"10.42.97.21 snapcast.cloonar.com\""
|
||||
"\"10.42.97.22 deconz.cloonar.com\""
|
||||
"\"10.42.97.50 git.cloonar.com\""
|
||||
|
||||
"\"10.254.235.22 stage.wsw.at\""
|
||||
"\"10.254.217.23 prod.wsw.at\""
|
||||
"\"10.254.240.109 wohnservice-wien.at\""
|
||||
"\"10.254.240.110 a.stage.wohnservice-wien.at\""
|
||||
|
||||
"\"172.67.184.30 ddl-warez.to\""
|
||||
"\"104.17.24.14 cdnjs.cloudflare.com\""
|
||||
];
|
||||
# ssl-upstream = "yes";
|
||||
};
|
||||
forward-zone = [
|
||||
{
|
||||
name = "local.ghetto.at.";
|
||||
forward-tls-upstream = "no";
|
||||
forward-addr = [
|
||||
"10.43.97.1"
|
||||
];
|
||||
}
|
||||
{
|
||||
name = "ghetto.at.local.";
|
||||
forward-tls-upstream = "no";
|
||||
forward-addr = [
|
||||
"10.43.97.1"
|
||||
];
|
||||
}
|
||||
{
|
||||
name = "epicenter.works.";
|
||||
forward-tls-upstream = "no";
|
||||
forward-addr = [
|
||||
"10.50.60.1"
|
||||
];
|
||||
}
|
||||
{
|
||||
name = "akvorrat.at.";
|
||||
forward-tls-upstream = "no";
|
||||
forward-addr = [
|
||||
"10.50.60.1"
|
||||
];
|
||||
}
|
||||
{
|
||||
name = "epicenter.intra.";
|
||||
forward-tls-upstream = "no";
|
||||
forward-addr = [
|
||||
"10.14.1.1"
|
||||
];
|
||||
}
|
||||
{
|
||||
name = "intra.epicenter.works.";
|
||||
forward-tls-upstream = "no";
|
||||
forward-addr = [
|
||||
"10.14.1.1"
|
||||
];
|
||||
}
|
||||
{
|
||||
name = ".";
|
||||
forward-tls-upstream = "yes";
|
||||
forward-first = "no";
|
||||
forward-addr = [
|
||||
"9.9.9.9@853#dns9.quad9.net"
|
||||
"149.112.112.11@853#dns11.quad9.net"
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
in {
|
||||
users.users.unbound = {
|
||||
group = "unbound";
|
||||
isSystemUser = true;
|
||||
uid = cids.uids.unbound;
|
||||
};
|
||||
users.groups.unbound = {
|
||||
gid = cids.gids.unbound;
|
||||
};
|
||||
|
||||
security.acme.certs."${domain}" = {
|
||||
group = "unbound";
|
||||
};
|
||||
security.acme.certs."fw.cloonar.com" = {
|
||||
group = "unbound";
|
||||
};
|
||||
|
||||
services.resolved.enable = false;
|
||||
|
||||
services.unbound = {
|
||||
enable = true;
|
||||
settings = cfg;
|
||||
};
|
||||
systemd.services.unbound-sync = {
|
||||
enable = true;
|
||||
path = with pkgs; [ unbound inotify-tools ];
|
||||
script = ''
|
||||
function readFile() {
|
||||
if [[ "''\$2" == "A" ]] ; then
|
||||
cat "''\$1" | tail -n +2 | while IFS=, read -r address hwaddr client_id valid_lifetime expire subnet_id fqdn_fwd fqdn_rev hostname state user_context
|
||||
do
|
||||
echo "''\${address},''\${hostname}"
|
||||
done
|
||||
else
|
||||
cat "''\$1" | tail -n +2 | while IFS=, read -r address duid valid_lifetime expire subnet_id pref_lifetime lease_type iaid prefix_len fqdn_fwd fqdn_rev hostname hwaddr state user_context hwtype hwaddr_source
|
||||
do
|
||||
echo "''\${address},''\${hostname}"
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
||||
function readFileUnique() {
|
||||
readFile "''\$1" ''\$2 | uniq | while IFS=, read -r address hostname
|
||||
do
|
||||
if echo "''\${1}" | grep -Eq '.*\.(cloonar.com|cloonar.multimedia|cloonar.smart)'; then
|
||||
echo ''\${hostname} ''\$2 ''\${address}
|
||||
unbound-control local_data ''\${hostname} ''\$2 ''\${address} > /dev/null 2>&1
|
||||
if [[ "''\$2" == "A" ]] ; then
|
||||
echo ''\${address} | while IFS=. read -r ip0 ip1 ip2 ip3
|
||||
do
|
||||
unbound-control local_data ''\${ip3}.''\${ip2}.''\${ip1}.''\${ip0}.ip4.arpa. PTR ''\${hostname} > /dev/null 2>&1
|
||||
unbound-control local_data ''\${ip3}.''\${ip2}.''\${ip1}.''\${ip0}.in-addr.arpa. PTR ''\${hostname} > /dev/null 2>&1
|
||||
done
|
||||
fi
|
||||
else
|
||||
if [[ "''\$2" == "A" ]] ; then
|
||||
echo ''\${address} | while IFS=. read -r ip0 ip1 ip2 ip3
|
||||
do
|
||||
if [[ "''\${hostname}" != "" ]]; then
|
||||
domain=cloonar.com
|
||||
if [[ "''\${ip2}" == 99 ]]; then
|
||||
domain=cloonar.multimedia
|
||||
fi
|
||||
if [[ "''\${ip2}" == 100 ]]; then
|
||||
domain=cloonar.smart
|
||||
fi
|
||||
if [[ "''\${hostname}" != *. ]]; then
|
||||
unbound-control local_data ''\${hostname}.''\${domain} ''\$2 ''\${address} > /dev/null 2>&1
|
||||
else
|
||||
unbound-control local_data ''\${hostname}''\${domain} ''\$2 ''\${address} > /dev/null 2>&1
|
||||
fi
|
||||
|
||||
fi
|
||||
unbound-control local_data ''\${ip3}.''\${ip2}.''\${ip1}.''\${ip0}.ip4.arpa. PTR ''\${hostname} > /dev/null 2>&1
|
||||
unbound-control local_data ''\${ip3}.''\${ip2}.''\${ip1}.''\${ip0}.in-addr.arpa. PTR ''\${hostname} > /dev/null 2>&1
|
||||
done
|
||||
fi
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
function syncFile() {
|
||||
# readFileUnique "''\$1" "''\$2"
|
||||
while true; do
|
||||
readFileUnique "''\$1" "''\$2"
|
||||
sleep 10
|
||||
done
|
||||
}
|
||||
|
||||
syncFile "/var/lib/kea/dhcp4.leases" A &
|
||||
# syncFile "/var/lib/kea/dhcp6.leases" AAAA &
|
||||
wait
|
||||
'';
|
||||
wants = [ "network-online.target" "unbound.service" ];
|
||||
after = [ "network-online.target" "unbound.service" ];
|
||||
partOf = [ "unbound.service" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
};
|
||||
|
||||
networking.firewall.allowedUDPPorts = [ 53 5353 ];
|
||||
}
|
||||
39
hosts/fw-new/modules/update-containers.nix
Normal file
39
hosts/fw-new/modules/update-containers.nix
Normal file
@@ -0,0 +1,39 @@
|
||||
{ config, pkgs, ... }:
|
||||
let
|
||||
update-containers = pkgs.writeShellScriptBin "update-containers" ''
|
||||
SUDO=""
|
||||
if [[ $(id -u) -ne 0 ]]; then
|
||||
SUDO="sudo"
|
||||
fi
|
||||
|
||||
images=$($SUDO ${pkgs.podman}/bin/podman ps -a --format="{{.Image}}" | sort -u)
|
||||
|
||||
for image in $images
|
||||
do
|
||||
$SUDO ${pkgs.podman}/bin/podman pull $image
|
||||
done
|
||||
'';
|
||||
in {
|
||||
systemd.timers = {
|
||||
# ...
|
||||
updatecontainers = {
|
||||
timerConfig = {
|
||||
Unit = "updatecontainers.service";
|
||||
OnCalendar = "02:00";
|
||||
};
|
||||
wantedBy = [ "timers.target" ];
|
||||
};
|
||||
# ...
|
||||
};
|
||||
|
||||
systemd.services = {
|
||||
# ...
|
||||
updatecontainers = {
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
ExecStart = "update-containers";
|
||||
};
|
||||
};
|
||||
# ...
|
||||
};
|
||||
}
|
||||
130
hosts/fw-new/modules/web/default.nix
Normal file
130
hosts/fw-new/modules/web/default.nix
Normal file
@@ -0,0 +1,130 @@
|
||||
{ lib, pkgs, config, ... }: let
|
||||
hostname = "web-02";
|
||||
json = pkgs.formats.json { };
|
||||
impermanence = builtins.fetchTarball "https://github.com/nix-community/impermanence/archive/master.tar.gz";
|
||||
in {
|
||||
microvm.vms = {
|
||||
web = {
|
||||
pkgs = import pkgs.path {
|
||||
config = {
|
||||
permittedInsecurePackages = [
|
||||
# needed for matrix
|
||||
"olm-3.2.16"
|
||||
];
|
||||
};
|
||||
};
|
||||
config = {
|
||||
microvm = {
|
||||
mem = 4096;
|
||||
# hypervisor = "cloud-hypervisor";
|
||||
shares = [
|
||||
{
|
||||
source = "/nix/store";
|
||||
mountPoint = "/nix/.ro-store";
|
||||
tag = "ro-store";
|
||||
proto = "virtiofs";
|
||||
}
|
||||
{
|
||||
source = "/var/lib/microvms/persist/web-02";
|
||||
mountPoint = "/persist";
|
||||
tag = "persist";
|
||||
proto = "virtiofs";
|
||||
}
|
||||
];
|
||||
volumes = [
|
||||
{
|
||||
image = "rootfs.img";
|
||||
mountPoint = "/";
|
||||
size = 102400;
|
||||
}
|
||||
];
|
||||
interfaces = [
|
||||
{
|
||||
type = "tap";
|
||||
id = "vm-${hostname}";
|
||||
mac = "02:00:00:00:01:01";
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
imports = [
|
||||
"${impermanence}/nixos.nix"
|
||||
../../utils/modules/sops.nix
|
||||
../../utils/modules/lego/lego.nix
|
||||
# ../../utils/modules/borgbackup.nix
|
||||
|
||||
./zammad.nix
|
||||
./proxies.nix
|
||||
./matrix.nix
|
||||
];
|
||||
|
||||
time.timeZone = "Europe/Vienna";
|
||||
|
||||
systemd.network.networks."10-lan" = {
|
||||
matchConfig.PermanentMACAddress = "02:00:00:00:01:01";
|
||||
address = [ "10.42.97.5/24" ];
|
||||
gateway = [ "10.42.97.1" ];
|
||||
dns = [ "10.42.97.1" ];
|
||||
};
|
||||
|
||||
fileSystems."/persist".neededForBoot = lib.mkForce true;
|
||||
environment.persistence."/persist-local" = {
|
||||
directories = [
|
||||
"/var/lib/zammad"
|
||||
"/var/lib/postgresql"
|
||||
"/var/log"
|
||||
"/var/lib/systemd/coredump"
|
||||
];
|
||||
};
|
||||
|
||||
environment.systemPackages = with pkgs; [
|
||||
vim # my preferred editor
|
||||
];
|
||||
|
||||
networking.hostName = hostname;
|
||||
|
||||
services.openssh = {
|
||||
enable = true;
|
||||
hostKeys = [
|
||||
{
|
||||
path = "/persist/etc/ssh/ssh_host_ed25519_key";
|
||||
type = "ed25519";
|
||||
}
|
||||
{
|
||||
path = "/persist/etc/ssh/ssh_host_rsa_key";
|
||||
type = "rsa";
|
||||
bits = 4096;
|
||||
}
|
||||
];
|
||||
};
|
||||
users.users.root.openssh.authorizedKeys.keys = [
|
||||
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDN/2SAFm50kraB1fepAizox/QRXxB7WbqVbH+5OPalDT47VIJGNKOKhixQoqhABHxEoLxdf/C83wxlCVlPV9poLfDgVkA3Lyt5r3tSFQ6QjjOJAgchWamMsxxyGBedhKvhiEzcr/Lxytnoz3kjDG8fqQJwEpdqMmJoMUfyL2Rqp16u+FQ7d5aJtwO8EUqovhMaNO7rggjPpV/uMOg+tBxxmscliN7DLuP4EMTA/FwXVzcFNbOx3K9BdpMRAaSJt4SWcJO2cS2KHA5n/H+PQI7nz5KN3Yr/upJN5fROhi/SHvK39QOx12Pv7FCuWlc+oR68vLaoCKYhnkl3DnCfc7A7"
|
||||
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIRQuPqH5fdX3KEw7DXzWEdO3AlUn1oSmtJtHB71ICoH Generated By Termius"
|
||||
];
|
||||
|
||||
services.nginx = {
|
||||
enable = true;
|
||||
recommendedTlsSettings = true;
|
||||
recommendedOptimisation = true;
|
||||
recommendedGzipSettings = true;
|
||||
recommendedProxySettings = true;
|
||||
};
|
||||
|
||||
# backups
|
||||
# borgbackup.repo = "u149513-sub2@u149513-sub2.your-backup.de:borg";
|
||||
|
||||
|
||||
sops.age.sshKeyPaths = [ "/persist/etc/ssh/ssh_host_ed25519_key" ];
|
||||
sops.defaultSopsFile = ./secrets.yaml;
|
||||
|
||||
networking.firewall = {
|
||||
enable = true;
|
||||
allowedTCPPorts = [ 22 80 443 ];
|
||||
};
|
||||
|
||||
|
||||
system.stateVersion = "22.05";
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
509
hosts/fw-new/modules/web/matrix.nix
Normal file
509
hosts/fw-new/modules/web/matrix.nix
Normal file
@@ -0,0 +1,509 @@
|
||||
{ pkgs, lib, config, ... }:
|
||||
let
|
||||
hostname = "matrix";
|
||||
fqdn = "${hostname}.cloonar.com";
|
||||
baseUrl = "https://${fqdn}";
|
||||
clientConfig."m.homeserver".base_url = baseUrl;
|
||||
serverConfig."m.server" = "${fqdn}:443";
|
||||
mkWellKnown = data: ''
|
||||
default_type application/json;
|
||||
add_header Access-Control-Allow-Origin *;
|
||||
return 200 '${builtins.toJSON data}';
|
||||
'';
|
||||
in {
|
||||
sops.secrets.matrix-shared-secret = {
|
||||
};
|
||||
sops.secrets.dendrite-private-key = {
|
||||
};
|
||||
|
||||
services.postgresql = {
|
||||
enable = true;
|
||||
ensureDatabases = [ "dendrite" ];
|
||||
ensureUsers = [
|
||||
{
|
||||
name = "dendrite";
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
services.nginx.virtualHosts."element.cloonar.com" = {
|
||||
forceSSL = true;
|
||||
enableACME = true;
|
||||
acmeRoot = null;
|
||||
root = pkgs.element-web.override {
|
||||
conf = {
|
||||
default_theme = "dark";
|
||||
default_server_config = {
|
||||
"m.homeserver" = {
|
||||
base_url = "https://matrix.cloonar.com";
|
||||
server_name = "cloonar.com";
|
||||
};
|
||||
};
|
||||
disable_custom_urls = true;
|
||||
disable_3pid_login = true;
|
||||
default_country_code = "AT";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
services.postgresqlBackup.enable = true;
|
||||
services.postgresqlBackup.databases = [ "dendrite" ];
|
||||
|
||||
services.nginx.virtualHosts."${fqdn}" = {
|
||||
forceSSL = true;
|
||||
enableACME = true;
|
||||
acmeRoot = null;
|
||||
locations."/".extraConfig = ''
|
||||
return 404;
|
||||
'';
|
||||
locations."/_dendrite".proxyPass = "http://[::1]:8008";
|
||||
locations."/_matrix".proxyPass = "http://[::1]:8008";
|
||||
locations."/_synapse/client".proxyPass = "http://[::1]:8008";
|
||||
};
|
||||
|
||||
|
||||
services.dendrite = {
|
||||
enable = true;
|
||||
settings = {
|
||||
global = {
|
||||
server_name = "cloonar.com";
|
||||
private_key = "$CREDENTIALS_DIRECTORY/private_key";
|
||||
database.connection_string = "postgresql:///dendrite?host=/run/postgresql";
|
||||
};
|
||||
client_api.registration_shared_secret = "$REGISTRATION_SHARED_SECRET";
|
||||
app_service_api.config_files = [
|
||||
"$CREDENTIALS_DIRECTORY/whatsapp_registration"
|
||||
"$CREDENTIALS_DIRECTORY/signal_registration"
|
||||
"$CREDENTIALS_DIRECTORY/discord_registration"
|
||||
];
|
||||
app_service_api.database.connection_string = "";
|
||||
federation_api.database.connection_string = "";
|
||||
key_server.database.connection_string = "";
|
||||
relay_api.database.connection_string = "";
|
||||
media_api.database.connection_string = "";
|
||||
room_server.database.connection_string = "";
|
||||
sync_api.database.connection_string = "";
|
||||
user_api.account_database.connection_string = "";
|
||||
user_api.device_database.connection_string = "";
|
||||
mscs.database.connection_string = "";
|
||||
};
|
||||
loadCredential = [
|
||||
"private_key:${config.sops.secrets.dendrite-private-key.path}"
|
||||
"whatsapp_registration:/var/lib/mautrix-whatsapp/whatsapp-registration.yaml"
|
||||
"signal_registration:/var/lib/mautrix-signal/signal-registration.yaml"
|
||||
"discord_registration:/var/lib/mautrix-discord/discord-registration.yaml"
|
||||
];
|
||||
environmentFile = config.sops.secrets.matrix-shared-secret.path;
|
||||
};
|
||||
|
||||
users.users.mautrix-whatsapp = {
|
||||
isSystemUser = true;
|
||||
group = "mautrix-whatsapp";
|
||||
home = "/var/lib/mautrix-whatsapp";
|
||||
description = "Mautrix-WhatsApp bridge user";
|
||||
};
|
||||
|
||||
users.groups.mautrix-whatsapp = {};
|
||||
systemd.services.mautrix-whatsapp = let
|
||||
dataDir = "/var/lib/mautrix-whatsapp";
|
||||
registrationFile = "${dataDir}/whatsapp-registration.yaml";
|
||||
settingsFile = "${dataDir}/config.json";
|
||||
settingsFileUnsubstituted = settingsFormat.generate "mautrix-whatsapp-config-unsubstituted.json" defaultConfig;
|
||||
settingsFormat = pkgs.formats.json {};
|
||||
appservicePort = 29318;
|
||||
defaultConfig = {
|
||||
homeserver = {
|
||||
address = "http://[::1]:8008";
|
||||
domain = "cloonar.com";
|
||||
};
|
||||
appservice = {
|
||||
hostname = "[::]";
|
||||
port = appservicePort;
|
||||
database.type = "sqlite3";
|
||||
database.uri = "${dataDir}/mautrix-whatsapp.db";
|
||||
id = "whatsapp";
|
||||
bot.username = "whatsappbot";
|
||||
bot.displayname = "WhatsApp Bridge Bot";
|
||||
as_token = "";
|
||||
hs_token = "";
|
||||
};
|
||||
bridge = {
|
||||
username_template = "whatsapp_{{.}}";
|
||||
displayname_template = "{{if .BusinessName}}{{.BusinessName}}{{else if .PushName}}{{.PushName}}{{else}}{{.JID}}{{end}} (WA)";
|
||||
double_puppet_server_map = {};
|
||||
login_shared_secret_map = {};
|
||||
command_prefix = "!wa";
|
||||
permissions."*" = "relay";
|
||||
permissions."cloonar.com" = "user";
|
||||
relay.enabled = true;
|
||||
history_sync.request_full_sync = false;
|
||||
encryption = {
|
||||
allow = true;
|
||||
default = true;
|
||||
require = true;
|
||||
};
|
||||
};
|
||||
logging = {
|
||||
min_level = "info";
|
||||
writers = lib.singleton {
|
||||
type = "stdout";
|
||||
format = "pretty-colored";
|
||||
time_format = " ";
|
||||
};
|
||||
};
|
||||
};
|
||||
in {
|
||||
description = "Mautrix-WhatsApp Service - A WhatsApp bridge for Matrix";
|
||||
|
||||
wantedBy = ["multi-user.target"];
|
||||
wants = ["network-online.target"];
|
||||
after = ["network-online.target"];
|
||||
|
||||
preStart = ''
|
||||
test -f '${settingsFile}' && rm -f '${settingsFile}'
|
||||
old_umask=$(umask)
|
||||
umask 0177
|
||||
${pkgs.envsubst}/bin/envsubst \
|
||||
-o '${settingsFile}' \
|
||||
-i '${settingsFileUnsubstituted}'
|
||||
umask $old_umask
|
||||
|
||||
# generate the appservice's registration file if absent
|
||||
if [ ! -f '${registrationFile}' ]; then
|
||||
${pkgs.mautrix-whatsapp}/bin/mautrix-whatsapp \
|
||||
--generate-registration \
|
||||
--config='${settingsFile}' \
|
||||
--registration='${registrationFile}'
|
||||
fi
|
||||
chmod 640 ${registrationFile}
|
||||
|
||||
umask 0177
|
||||
${pkgs.yq}/bin/yq -s '.[0].appservice.as_token = .[1].as_token
|
||||
| .[0].appservice.hs_token = .[1].hs_token
|
||||
| .[0]' '${settingsFile}' '${registrationFile}' \
|
||||
> '${settingsFile}.tmp'
|
||||
mv '${settingsFile}.tmp' '${settingsFile}'
|
||||
umask $old_umask
|
||||
'';
|
||||
|
||||
serviceConfig = {
|
||||
User = "mautrix-whatsapp";
|
||||
Group = "mautrix-whatsapp";
|
||||
# EnvironmentFile = cfg.environmentFile;
|
||||
StateDirectory = baseNameOf dataDir;
|
||||
WorkingDirectory = dataDir;
|
||||
ExecStart = ''
|
||||
${pkgs.mautrix-whatsapp}/bin/mautrix-whatsapp \
|
||||
--config='${settingsFile}' \
|
||||
--registration='${registrationFile}' \
|
||||
--ignore-unsupported-server
|
||||
'';
|
||||
LockPersonality = true;
|
||||
MemoryDenyWriteExecute = true;
|
||||
NoNewPrivileges = true;
|
||||
PrivateDevices = true;
|
||||
PrivateTmp = true;
|
||||
PrivateUsers = true;
|
||||
ProtectClock = true;
|
||||
ProtectControlGroups = true;
|
||||
ProtectHome = true;
|
||||
ProtectHostname = true;
|
||||
ProtectKernelLogs = true;
|
||||
ProtectKernelModules = true;
|
||||
ProtectKernelTunables = true;
|
||||
ProtectSystem = "strict";
|
||||
Restart = "on-failure";
|
||||
RestartSec = "30s";
|
||||
RestrictRealtime = true;
|
||||
RestrictSUIDSGID = true;
|
||||
SystemCallArchitectures = "native";
|
||||
SystemCallErrorNumber = "EPERM";
|
||||
SystemCallFilter = ["@system-service"];
|
||||
Type = "simple";
|
||||
UMask = 0027;
|
||||
};
|
||||
restartTriggers = [settingsFileUnsubstituted];
|
||||
};
|
||||
|
||||
users.users.mautrix-signal = {
|
||||
isSystemUser = true;
|
||||
group = "mautrix-signal";
|
||||
home = "/var/lib/mautrix-signal";
|
||||
description = "Mautrix-Signal bridge user";
|
||||
};
|
||||
|
||||
users.groups.mautrix-signal = {};
|
||||
systemd.services.mautrix-signal = let
|
||||
pkgswithsignal = import (fetchTarball "https://github.com/NixOS/nixpkgs/archive/fd698a4ab779fb7fb95425f1b56974ba9c2fa16c.tar.gz") {
|
||||
config = {
|
||||
permittedInsecurePackages = [
|
||||
# needed for matrix
|
||||
"olm-3.2.16"
|
||||
];
|
||||
};
|
||||
};
|
||||
dataDir = "/var/lib/mautrix-signal";
|
||||
registrationFile = "${dataDir}/signal-registration.yaml";
|
||||
settingsFile = "${dataDir}/config.json";
|
||||
settingsFileUnsubstituted = settingsFormat.generate "mautrix-signal-config-unsubstituted.json" defaultConfig;
|
||||
settingsFormat = pkgs.formats.json {};
|
||||
appservicePort = 29328;
|
||||
defaultConfig = {
|
||||
homeserver = {
|
||||
address = "http://[::1]:8008";
|
||||
domain = "cloonar.com";
|
||||
};
|
||||
appservice = {
|
||||
hostname = "[::]";
|
||||
port = appservicePort;
|
||||
database.type = "sqlite3";
|
||||
database.uri = "file:${dataDir}/mautrix-signal.db?_txlock=immediate";
|
||||
id = "signal";
|
||||
bot = {
|
||||
username = "signalbot";
|
||||
displayname = "Signal Bridge Bot";
|
||||
};
|
||||
as_token = "";
|
||||
hs_token = "";
|
||||
};
|
||||
bridge = {
|
||||
username_template = "signal_{{.}}";
|
||||
displayname_template = "{{or .ProfileName .PhoneNumber \"Unknown user\"}} (Signal)";
|
||||
double_puppet_server_map = { };
|
||||
login_shared_secret_map = { };
|
||||
command_prefix = "!signal";
|
||||
permissions."*" = "relay";
|
||||
permissions."cloonar.com" = "user";
|
||||
relay.enabled = true;
|
||||
encryption = {
|
||||
allow = true;
|
||||
default = true;
|
||||
require = true;
|
||||
};
|
||||
};
|
||||
matrix = {
|
||||
sync_direct_chat_list = true;
|
||||
};
|
||||
logging = {
|
||||
min_level = "info";
|
||||
writers = lib.singleton {
|
||||
type = "stdout";
|
||||
format = "pretty-colored";
|
||||
time_format = " ";
|
||||
};
|
||||
};
|
||||
};
|
||||
in {
|
||||
description = "Mautrix-Signal Service - A Signal bridge for Matrix";
|
||||
|
||||
wantedBy = ["multi-user.target"];
|
||||
wants = ["network-online.target"];
|
||||
after = ["network-online.target"];
|
||||
|
||||
preStart = ''
|
||||
test -f '${settingsFile}' && rm -f '${settingsFile}'
|
||||
old_umask=$(umask)
|
||||
umask 0177
|
||||
${pkgs.envsubst}/bin/envsubst \
|
||||
-o '${settingsFile}' \
|
||||
-i '${settingsFileUnsubstituted}'
|
||||
umask $old_umask
|
||||
|
||||
# generate the appservice's registration file if absent
|
||||
if [ ! -f '${registrationFile}' ]; then
|
||||
${pkgswithsignal.mautrix-signal}/bin/mautrix-signal \
|
||||
--generate-registration \
|
||||
--config='${settingsFile}' \
|
||||
--registration='${registrationFile}'
|
||||
fi
|
||||
chmod 640 ${registrationFile}
|
||||
|
||||
umask 0177
|
||||
${pkgs.yq}/bin/yq -s '.[0].appservice.as_token = .[1].as_token
|
||||
| .[0].appservice.hs_token = .[1].hs_token
|
||||
| .[0]
|
||||
| if env.MAUTRIX_SIGNAL_BRIDGE_LOGIN_SHARED_SECRET then .bridge.login_shared_secret_map.[.homeserver.domain] = env.MAUTRIX_SIGNAL_BRIDGE_LOGIN_SHARED_SECRET else . end' \
|
||||
'${settingsFile}' '${registrationFile}' > '${settingsFile}.tmp'
|
||||
mv '${settingsFile}.tmp' '${settingsFile}'
|
||||
umask $old_umask
|
||||
'';
|
||||
|
||||
serviceConfig = {
|
||||
User = "mautrix-signal";
|
||||
Group = "mautrix-signal";
|
||||
# EnvironmentFile = cfg.environmentFile;
|
||||
StateDirectory = baseNameOf dataDir;
|
||||
WorkingDirectory = dataDir;
|
||||
ExecStart = ''
|
||||
${pkgswithsignal.mautrix-signal}/bin/mautrix-signal \
|
||||
--config='${settingsFile}' \
|
||||
--registration='${registrationFile}' \
|
||||
--ignore-unsupported-server
|
||||
'';
|
||||
LockPersonality = true;
|
||||
MemoryDenyWriteExecute = true;
|
||||
NoNewPrivileges = true;
|
||||
PrivateDevices = true;
|
||||
PrivateTmp = true;
|
||||
PrivateUsers = true;
|
||||
ProtectClock = true;
|
||||
ProtectControlGroups = true;
|
||||
ProtectHome = true;
|
||||
ProtectHostname = true;
|
||||
ProtectKernelLogs = true;
|
||||
ProtectKernelModules = true;
|
||||
ProtectKernelTunables = true;
|
||||
ProtectSystem = "strict";
|
||||
Restart = "on-failure";
|
||||
RestartSec = "30s";
|
||||
RestrictRealtime = true;
|
||||
RestrictSUIDSGID = true;
|
||||
SystemCallArchitectures = "native";
|
||||
SystemCallErrorNumber = "EPERM";
|
||||
SystemCallFilter = ["@system-service"];
|
||||
Type = "simple";
|
||||
UMask = 0027;
|
||||
};
|
||||
restartTriggers = [settingsFileUnsubstituted];
|
||||
};
|
||||
|
||||
|
||||
users.users.mautrix-discord = {
|
||||
isSystemUser = true;
|
||||
group = "mautrix-discord";
|
||||
home = "/var/lib/mautrix-discord";
|
||||
description = "Mautrix-Discord bridge user";
|
||||
};
|
||||
|
||||
users.groups.mautrix-discord = {};
|
||||
systemd.services.mautrix-discord = let
|
||||
pkgswithdiscord = import (fetchTarball "https://github.com/NixOS/nixpkgs/archive/5ed627539ac84809c78b2dd6d26a5cebeb5ae269.tar.gz") {
|
||||
config = {
|
||||
permittedInsecurePackages = [
|
||||
# needed for matrix
|
||||
"olm-3.2.16"
|
||||
];
|
||||
};
|
||||
};
|
||||
dataDir = "/var/lib/mautrix-discord";
|
||||
registrationFile = "${dataDir}/discord-registration.yaml";
|
||||
settingsFile = "${dataDir}/config.json";
|
||||
settingsFileUnsubstituted = settingsFormat.generate "mautrix-discord-config-unsubstituted.json" defaultConfig;
|
||||
settingsFormat = pkgs.formats.json {};
|
||||
appservicePort = 29329;
|
||||
defaultConfig = {
|
||||
homeserver = {
|
||||
address = "http://[::1]:8008";
|
||||
domain = "cloonar.com";
|
||||
};
|
||||
appservice = {
|
||||
hostname = "[::]";
|
||||
port = appservicePort;
|
||||
database.type = "sqlite3";
|
||||
database.uri = "file:${dataDir}/mautrix-discord.db?_txlock=immediate";
|
||||
id = "discord";
|
||||
bot = {
|
||||
username = "discordbot";
|
||||
displayname = "Discord Bridge Bot";
|
||||
};
|
||||
as_token = "";
|
||||
hs_token = "";
|
||||
};
|
||||
bridge = {
|
||||
username_template = "discord_{{.}}";
|
||||
displayname_template = "{{or .GlobalName .Username}} (Discord{{if .Bot}} bot{{end}})";
|
||||
double_puppet_server_map = { };
|
||||
login_shared_secret_map = { };
|
||||
command_prefix = "!discord";
|
||||
permissions."*" = "relay";
|
||||
permissions."cloonar.com" = "user";
|
||||
relay.enabled = true;
|
||||
restricted_rooms = false;
|
||||
encryption = {
|
||||
allow = true;
|
||||
default = true;
|
||||
require = true;
|
||||
};
|
||||
};
|
||||
logging = {
|
||||
min_level = "info";
|
||||
writers = lib.singleton {
|
||||
type = "stdout";
|
||||
format = "pretty-colored";
|
||||
time_format = " ";
|
||||
};
|
||||
};
|
||||
};
|
||||
in {
|
||||
description = "Mautrix-Discord Service - A Discord bridge for Matrix";
|
||||
|
||||
wantedBy = ["multi-user.target"];
|
||||
wants = ["network-online.target"];
|
||||
after = ["network-online.target"];
|
||||
|
||||
preStart = ''
|
||||
test -f '${settingsFile}' && rm -f '${settingsFile}'
|
||||
old_umask=$(umask)
|
||||
umask 0177
|
||||
${pkgs.envsubst}/bin/envsubst \
|
||||
-o '${settingsFile}' \
|
||||
-i '${settingsFileUnsubstituted}'
|
||||
umask $old_umask
|
||||
|
||||
# generate the appservice's registration file if absent
|
||||
if [ ! -f '${registrationFile}' ]; then
|
||||
${pkgswithdiscord.mautrix-discord}/bin/mautrix-discord \
|
||||
--generate-registration \
|
||||
--config='${settingsFile}' \
|
||||
--registration='${registrationFile}'
|
||||
fi
|
||||
chmod 640 ${registrationFile}
|
||||
|
||||
umask 0177
|
||||
${pkgs.yq}/bin/yq -s '.[0].appservice.as_token = .[1].as_token
|
||||
| .[0].appservice.hs_token = .[1].hs_token
|
||||
| .[0]
|
||||
| if env.MAUTRIX_DISCORD_BRIDGE_LOGIN_SHARED_SECRET then .bridge.login_shared_secret_map.[.homeserver.domain] = env.MAUTRIX_DISCORD_BRIDGE_LOGIN_SHARED_SECRET else . end' \
|
||||
'${settingsFile}' '${registrationFile}' > '${settingsFile}.tmp'
|
||||
mv '${settingsFile}.tmp' '${settingsFile}'
|
||||
umask $old_umask
|
||||
'';
|
||||
|
||||
serviceConfig = {
|
||||
User = "mautrix-discord";
|
||||
Group = "mautrix-discord";
|
||||
# EnvironmentFile = cfg.environmentFile;
|
||||
StateDirectory = baseNameOf dataDir;
|
||||
WorkingDirectory = dataDir;
|
||||
ExecStart = ''
|
||||
${pkgswithdiscord.mautrix-discord}/bin/mautrix-discord \
|
||||
--config='${settingsFile}' \
|
||||
--registration='${registrationFile}'
|
||||
'';
|
||||
LockPersonality = true;
|
||||
MemoryDenyWriteExecute = true;
|
||||
NoNewPrivileges = true;
|
||||
PrivateDevices = true;
|
||||
PrivateTmp = true;
|
||||
PrivateUsers = true;
|
||||
ProtectClock = true;
|
||||
ProtectControlGroups = true;
|
||||
ProtectHome = true;
|
||||
ProtectHostname = true;
|
||||
ProtectKernelLogs = true;
|
||||
ProtectKernelModules = true;
|
||||
ProtectKernelTunables = true;
|
||||
ProtectSystem = "strict";
|
||||
Restart = "on-failure";
|
||||
RestartSec = "30s";
|
||||
RestrictRealtime = true;
|
||||
RestrictSUIDSGID = true;
|
||||
SystemCallArchitectures = "native";
|
||||
SystemCallErrorNumber = "EPERM";
|
||||
SystemCallFilter = ["@system-service"];
|
||||
Type = "simple";
|
||||
UMask = 0027;
|
||||
};
|
||||
restartTriggers = [settingsFileUnsubstituted];
|
||||
};
|
||||
}
|
||||
19
hosts/fw-new/modules/web/proxies.nix
Normal file
19
hosts/fw-new/modules/web/proxies.nix
Normal file
@@ -0,0 +1,19 @@
|
||||
{ ... }: {
|
||||
services.nginx.virtualHosts."git.cloonar.com" = {
|
||||
forceSSL = true;
|
||||
enableACME = true;
|
||||
acmeRoot = null;
|
||||
locations."/" = {
|
||||
proxyPass = "https://git.cloonar.com/";
|
||||
};
|
||||
};
|
||||
services.nginx.virtualHosts."foundry-vtt.cloonar.com" = {
|
||||
forceSSL = true;
|
||||
enableACME = true;
|
||||
acmeRoot = null;
|
||||
locations."/" = {
|
||||
proxyPass = "http://10.42.97.21:30000";
|
||||
proxyWebsockets = true;
|
||||
};
|
||||
};
|
||||
}
|
||||
34
hosts/fw-new/modules/web/secrets.yaml
Normal file
34
hosts/fw-new/modules/web/secrets.yaml
Normal file
@@ -0,0 +1,34 @@
|
||||
borg-passphrase: ENC[AES256_GCM,data:2WjoqMRmXvW9EGMmpMYhrC0Qt0Dk7QWlbEncZPdK2SxVljEoFibjVEr6jeYdAx6UkaXdjk9pD3PBbls2tWt0TiNQdh8=,iv:bHzASNjqqfPsQ/1w/oM7x0FubAzzRkn+iWrZlenU9rs=,tag:ektqi0rqEywg9YGybPQesw==,type:str]
|
||||
borg-ssh-key: ENC[AES256_GCM,data:b/xZnUTfi85IG1s897CBF1HD7BTswQUatbotyZfLmbhxXxEyffUeaiGsT9Gh9yQqOKTstTihA48nVk/4ekAPD/ZGDQ189V1BwKkQ5chN9TSULofekfmemhUhVGjnx8OFl6hYYpTttQSTLHtczmfE2iX1JyrZy2Z+H+w6dbZjkYDayRUt/4+5wCtQJ1Nt7bjzwLWhjdVtwDeBLm/kCywVguZLCgyiuqmXMr1h9jpUS7URZegGz1lFs34Ismu1LtaRjFGRyd8aKaTU6PSxDbjE4dQ3Lh1Hm3nhtOrSkswBZLp8OTP6emrQ7c3oJp1zqO5zQHXxD2V5hkPw6ln0Ee1aQp1rvLD8shRXzRbHG+mySvjKLJvLypnNuYfQklqlnhbG+M1/NN13oVF13nHpKwP5q33sRr49mfHw8YHdRhHuhYHVrpy8ep0AmPXiDYCDM4cnlOMnzlH/toF0fq0YRny6QoqKNpaYhmA61MXRPTZCqoAcE1N+oo7HymjJetzL9b2FkPCoDOx989IJ8SUaBJpzR+agNsFi87htVllRp4ozms/m56dI0AdwqeAre00iMBzpVS0hXURE7fqvAnLHQD1goW9XB2mztqcJ09YafrOgTA3oyazWcAjxgV33GupxxIDmwRdLmavvr4qrHfddYctYLPI7VolqT9JmKN6iVG9vYsDutgoyRlhzbGASKPLgcYn9sGG+LBgTHfZyABnYOaUetVP72mhSN30ZZixcCskVlGg5C53wrW5o6mBv+PyG8PimxLmQylbvHUdGGVLQfMpJaaXgpUjBX1MWdQAVa+Nyjm7QwYdRKoCb3suQ6bOq5O9eotel3GPB8gpKzInhNA/0xiB4UyCGp1i21iRS9+Rc7yufo5s3t56k0643K2DhBUVgssiTsG15BbQdX4c1O28i9zwEZ+wVci1yvLX38M0a3tDDt9iW1BIOWehShS7dpyJR2/OgWLFagw9hYP5h24t5k6Gz2ODhPouaFccYDRUBR6UECxA+gDS+trN8iNSX1oWa0ys0XvgwWpJ2CrdSArNqe1BdhM47BQwudiA3RwaEN3wRh5PeykSk/3BUXK+ZdAr0BZ8ij2q4F8zQexLxnrV6xRqofNcVs62iJAjx6g86InSv0nNjLQ9U/fBTL66u1iRZFJhuxPjDNfLJZqT0TvRR7KBcNWTwTuMCGNp5s9TngMUF4uhHx8qGxtjfH58WjixOhC9lgUt7cYEFIeefcwIO9VVnKoiXK5sPIvIsjtLRzGvejYSd0ZwSF3Ly9FkWLkr+o5rs5bXtGMsSQ+BUFg5nM1BqrHIGv9M+F4kPxhnqm9/JXuMSQ+JUzix5N0vHuSTphCayDpMHJYRUEkDEmwPXMyB9zWVmvMb0ByUnfs/n/jmL4WRuggYqchIR3/xuco5HUqLbEKXiJ39wVgy+i3/biWOOEu5BmMx3qbgQ1+6nlxY+f1qpXZ8br0RlXLOQ6L/O9Qa9gKZaxLm/5GCiFZ+SeU/c5OgUndYqTk6FsbDlNurA69IqjwubG345lpdB9VPoGP7dLsx3VaGKW0bvr06oRaeasMx90SN5bGQJH+0iQFkGPhp0m2v31zpBk1IibXi5Qb1OWGXGYd+iNt1ZQF0HVuEqQEXI62x92QkaR7eHowR4tCRF1xH1ZrBkyjtdofUU2wPqsRrOWqGIZWUh/JpfXkSAZQo9yJKnHcp9d3BPEvWpLWS9g1Jfej5XG497aP6crWw5XawOyzi+PEgz2Y3Q0R/MM3S1W2R7Z+21nekbCfghpNylwIX4UYkeX8YorheiumkUfFXjktPSkFCTuUrYAA89WZjIIqd4/gt3tS7keCsjEiTkW2KdDPlzNItKnC8xWnpRc+Wh6ghA/nt3j4POb880j3scFoDjgOv5lNk2Q84S/IW+DQ3U8o4JrKiXsxchDvmgGbU4FbXZTGLXeM1CybmbZKogIHdwJkhC425oqA1PMiq5tDPLKpl2214JuaV4Xd8R0bwCSHYjQp9gqJT9j1Wg/3P0M3/VGZGoJEVriiBl6PBHP2CcvxK1NADDmMHgGQwwfROoSijAzzPKCy9sgzsquTkqzq8q4aChjGKShxs+52dpnmmuygSlxjyVQCEW9kLERf1Nm1arsLkHJ4ZsWgSrskGvjsPEvyEnpY33gGB7fpy90NW0GtELgGzEw/1nfLcFbRBJ7gH+4Dby2fBTxoV2ks9m0Fv6OWsfIe6H54zWLmqB1RkQaskb1wDKU3HATOmuYo/fByLIsMyR5l3P7LXWF5CJOprzp41rGts/ybJEG1EUtmVCs2epTwbeG/Waq1DB3TFa639ETjxOfGQ65PXp5aT1d5v+ko87LiR+0us6xwlfZ6NMRRZuPt4wycFgPUAAmpdmguwDKifHKA258g9kzotT25JeFFEMVhsMi1PoXEqA+sFomdsLt+Vtpr2aGMUWyHD/E2fgAtybLwxbjqDINi8vXWJxv/UZdH8wBOlWLtaeGg5/jRsMuL/hSSZ84Q2zfRVvV7/BZ7wnxfoXmAwRdTZijAvc9TxWszP6E5mAix7s/znU+1vnseJdxWa4Ff1wOGVL/Tem2K0J/mp75XuzSP7nCYDMgqhnvfzlD8vv6QpxtDUAbdTBDyPkQ4U9L6+y5ul5Aegpui+p0G9/0UHdBYhJiFd90omnhSmyHx2pvgUTfbL/Kv/pk7nwTv89a87NXNA9K6AATwx0kUPgIWs/5FGi8leCXGSsgBbJogL1htC72pKzVH6ckEzKeBzRADmwFLhnPIvp37ZkQPj0rrWRhkd5RqsFcN0166N+M4lPD0hzPd2+nEXDAOHoCK7U+BcRcJ3GUlyPU91dbWfo9otPd3naTvGVZuFDxOihLtBXaLTsxmS4STk6DVRjwNmX8YC9FwXkED19xEeH6KkaFs1nVXnmDqpvi2BcueT96t6TOeu5HcA9fAgFTpOKVT6cK2PcHTtJhjrPkfSYr0/ksJdV7r9N4JgAEfiASMMHS5uQWJlyJKWo92rJ2IvSCQx4lcK3gasgcTsVaYmuRORM+6263r4NKS8W8r55XvVyW/C7vvsVq6wF3xUkQadBkxIUQUVWxxCc1pWOlfWwMs0i+ZssoaWopbs7x45z86i+3HsHmfS6GuXUpQfgvXe9Bn7mOj7VQWaG9NIFUpIxisGfdY9L8+RXobo7etD3da7TNMs40BT+34tijcX53FzKwvG3ESNPB2hjOAITDta6LDOHhJrlVqn90p1DicThHOaT3fxt6ST287EhWqK9S1gpkLrp0gNSA9v+K9mBvWaWYNDXY7sGxOIMzCEIdFT18Pra92NhGTJtC0XizHDMUfGx5WAaard1Iy/PYXvavoAwp30qDCQGF+PgwSProa+JtQQPzoEgtSXNVhUWIzz10TACuo+vHt8sHvFG3VuU7jSOr9sqVrN36KMDUlwo0gavHKsjRxHf2OGh552q7AP+sM6Y5WhA4KhmQSUKCVxYVQ==,iv:U3+fjacm8+gZAjPQNz2mjFYTUbLyltTaPiSKb3lvCmk=,tag:ZR6zI1UijDayIvH3v35Hqg==,type:str]
|
||||
zammad-key-base: ENC[AES256_GCM,data:HO9MuwcwjryuXr5No8sCPfso5bpLtQCoczrC/R214ecVIFwwH1uhMeNO8Tlh6EjRLPo7aVTSz87Vx5yaNVezvHCs55G6TT9mcNS/v/V7sbFz9dNIgbFblY3gFIAa4cViioYc71wdb7d4Tta7qhse5zQ41KhAqCWuGDgFErQA4Oc=,iv:b1wY8fW0psircSlNXwDjPzNWK8NyAMNqegitNcqV6U4=,tag:oQ7nyO9TKOOu6IF7ODzpPA==,type:str]
|
||||
dendrite-private-key: ENC[AES256_GCM,data:ZHDIa/iYSZGofE67JU63fHRdKbs/ZyEJY45tV6H8WZAOcduGafPYBo2NCZ7nqLbc2Z9dUUgsrpzvkQ3+VaWqFUv7YsE+CbCx4CeiLGMkj8EAGzX4rkJGHMzkkc2UT7v9znCnKACS3fZtU69trqVMcf1PzgqepOHMBku37dzpwOQC/Tc3UTuO72M=,iv:Ljun1/ruY9cDBm9vu62riUrpGjrWtFFx90GeE7uc3Yo=,tag:FF4xPb1SDhK/4ITr/idvYg==,type:str]
|
||||
matrix-shared-secret: ENC[AES256_GCM,data:HeS4PT0R+TRU6Htwa5TChjK1VAjAdgSS8tSnva+ga3f+mEfJPTQ02pEvS2WFvcnchmEjNYy39zL/rbtX,iv:4yR+VgdJY3VcvLg18v+5jbJDSkFzaeyLNAZ0k8ivjdQ=,tag:RA96iSFDUdlXq30c/vkvpA==,type:str]
|
||||
sops:
|
||||
kms: []
|
||||
gcp_kms: []
|
||||
azure_kv: []
|
||||
hc_vault: []
|
||||
age:
|
||||
- recipient: age16veg3fmvpfm7a89a9fc8dvvsxmsthlm70nfxqspr6t8vnf9wkcwsvdq38d
|
||||
enc: |
|
||||
-----BEGIN AGE ENCRYPTED FILE-----
|
||||
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBoUWdTYlRjWDJvemF5Q2sr
|
||||
VCtrS2dTTGRwUlNIWHd0WkVCRkRMcGhuTzE0ClNic1FmQ05UNWQwbGc4TUFMNGlI
|
||||
K0RhK2pqUGY3UElmK1pNUEkxV2xGUTQKLS0tIFRORE9JTDRZK0MwZUJoc2xlcHFH
|
||||
bmp3ZW14TVdCMHhkSi84NE5neDdrY3cKYfgu7aqvG6wQmEFhmzieXFGoQpyffPXj
|
||||
jiHrAPjBBFy21wdYf0nQXNMzekqOMJwOj0oNA2b5omprPxjB9uns4Q==
|
||||
-----END AGE ENCRYPTED FILE-----
|
||||
- recipient: age1gjm4c3swt8u88e36gf2qlg3syxfc0ly94u64c42f2tsf24npw4csa6e4fw
|
||||
enc: |
|
||||
-----BEGIN AGE ENCRYPTED FILE-----
|
||||
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBUUjQxWnBMQXo3QmF1STUw
|
||||
bHh1NDhvQXZIQ2RiOUx5OU5Wc3BVSEJDUEZVCmVzeFk5SWpMbVV4VUdsRmhiaWwz
|
||||
bTJDY1pJRXJvNUdCSXJqQ3Byd3lWN2sKLS0tIHRKdXRNc1BYcURBRVNlenk1OEl3
|
||||
Q05BN0VnQ0haeHBobWhRV0EzL3dLSEkKWlALiX5mvG8y0WUc8yFWMbcpSRrSGoQx
|
||||
SHaOlDCjYvViZ7GPRLqnSwDGZ1clC6JsTbwKXrMsWdZBKvSO/VIWQw==
|
||||
-----END AGE ENCRYPTED FILE-----
|
||||
lastmodified: "2024-10-14T16:53:41Z"
|
||||
mac: ENC[AES256_GCM,data:DUi6zUrZBMVaYZ/BvWny7RwPgXe+vQ+odO30fGe8iZHj9d3gzB95F75CqIgENi4gVOA4CQDADE+p45z/mtl04HAh7RiT0/k21RSdQcH2W9AX525fOzeqbxbPA/tXJOctwGrytFwlK9UdJULXkJCwYrJnwNc0XPnBk1FodTykXWs=,iv:q/eapgTVL/rifrrZeIcXT5VO9bEoS4EmmEhYJ2xHvQ4=,tag:xb0Qj/wu17cLTkvefsDqiw==,type:str]
|
||||
pgp: []
|
||||
unencrypted_suffix: _unencrypted
|
||||
version: 3.8.1
|
||||
119
hosts/fw-new/modules/web/zammad.nix
Normal file
119
hosts/fw-new/modules/web/zammad.nix
Normal file
@@ -0,0 +1,119 @@
|
||||
{ config, pkgs, ... }:
|
||||
|
||||
{
|
||||
services.zammad = {
|
||||
enable = true;
|
||||
port = 3010;
|
||||
secretKeyBaseFile = config.sops.secrets.zammad-key-base.path;
|
||||
database = {
|
||||
createLocally = true;
|
||||
};
|
||||
};
|
||||
|
||||
services.nginx.virtualHosts."support.cloonar.com" = {
|
||||
forceSSL = true;
|
||||
enableACME = true;
|
||||
acmeRoot = null;
|
||||
|
||||
extraConfig = ''
|
||||
# Virtual endpoint created by nginx to forward auth requests.
|
||||
location /authelia {
|
||||
internal;
|
||||
set $upstream_authelia https://auth.cloonar.com/api/verify;
|
||||
proxy_pass_request_body off;
|
||||
proxy_pass $upstream_authelia;
|
||||
proxy_set_header Content-Length "";
|
||||
|
||||
# Timeout if the real server is dead
|
||||
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503;
|
||||
|
||||
# [REQUIRED] Needed by Authelia to check authorizations of the resource.
|
||||
# Provide either X-Original-URL and X-Forwarded-Proto or
|
||||
# X-Forwarded-Proto, X-Forwarded-Host and X-Forwarded-Uri or both.
|
||||
# Those headers will be used by Authelia to deduce the target url of the user.
|
||||
# Basic Proxy Config
|
||||
client_body_buffer_size 128k;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Original-URL $scheme://$http_host$request_uri;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $remote_addr;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $http_host;
|
||||
proxy_set_header X-Forwarded-Uri $request_uri;
|
||||
proxy_set_header X-Forwarded-Ssl on;
|
||||
proxy_redirect http:// $scheme://;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
proxy_cache_bypass $cookie_session;
|
||||
proxy_no_cache $cookie_session;
|
||||
proxy_buffers 4 32k;
|
||||
|
||||
# Advanced Proxy Config
|
||||
send_timeout 5m;
|
||||
proxy_read_timeout 240;
|
||||
proxy_send_timeout 240;
|
||||
proxy_connect_timeout 240;
|
||||
}
|
||||
'';
|
||||
|
||||
locations."/" = {
|
||||
proxyPass = "http://127.0.0.1:3010";
|
||||
proxyWebsockets = true;
|
||||
extraConfig =
|
||||
"proxy_set_header X-Forwarded-Proto 'https';" +
|
||||
"proxy_set_header X-Forwarded-Ssl on;" +
|
||||
"proxy_connect_timeout 300;" +
|
||||
"proxy_send_timeout 300;" +
|
||||
"proxy_read_timeout 300;" +
|
||||
"send_timeout 300;"
|
||||
;
|
||||
};
|
||||
locations."/auth/sso" = {
|
||||
proxyPass = "http://127.0.0.1:3010";
|
||||
proxyWebsockets = true;
|
||||
|
||||
extraConfig = ''
|
||||
# Basic Authelia Config
|
||||
# Send a subsequent request to Authelia to verify if the user is authenticated
|
||||
# and has the right permissions to access the resource.
|
||||
auth_request /authelia;
|
||||
# Set the `target_url` variable based on the request. It will be used to build the portal
|
||||
# URL with the correct redirection parameter.
|
||||
auth_request_set $target_url $scheme://$http_host$request_uri;
|
||||
# Set the X-Forwarded-User and X-Forwarded-Groups with the headers
|
||||
# returned by Authelia for the backends which can consume them.
|
||||
# This is not safe, as the backend must make sure that they come from the
|
||||
# proxy. In the future, it's gonna be safe to just use OAuth.
|
||||
auth_request_set $user $upstream_http_remote_user;
|
||||
auth_request_set $groups $upstream_http_remote_groups;
|
||||
auth_request_set $name $upstream_http_remote_name;
|
||||
auth_request_set $email $upstream_http_remote_email;
|
||||
proxy_set_header Remote-User $user;
|
||||
proxy_set_header Remote-Groups $groups;
|
||||
proxy_set_header Remote-Name $name;
|
||||
proxy_set_header Remote-Email $email;
|
||||
# If Authelia returns 401, then nginx redirects the user to the login portal.
|
||||
# If it returns 200, then the request pass through to the backend.
|
||||
# For other type of errors, nginx will handle them as usual.
|
||||
error_page 401 =302 https://auth.cloonar.com/?rd=$target_url;
|
||||
'';
|
||||
};
|
||||
locations."/ws" = {
|
||||
proxyPass = "http://127.0.0.1:6042";
|
||||
proxyWebsockets = true;
|
||||
extraConfig =
|
||||
"proxy_set_header X-Forwarded-Proto 'https';" +
|
||||
"proxy_set_header X-Forwarded-Ssl on;" +
|
||||
"proxy_read_timeout 86400;" +
|
||||
"send_timeout 300;"
|
||||
;
|
||||
};
|
||||
};
|
||||
|
||||
sops.secrets = {
|
||||
zammad-key-base.owner = "zammad";
|
||||
};
|
||||
|
||||
services.postgresqlBackup.enable = true;
|
||||
services.postgresqlBackup.databases = [ "zammad" ];
|
||||
}
|
||||
53
hosts/fw-new/modules/wireguard.nix
Normal file
53
hosts/fw-new/modules/wireguard.nix
Normal file
@@ -0,0 +1,53 @@
|
||||
{ config, ... }: {
|
||||
sops.secrets.wg_cloonar_key = {};
|
||||
sops.secrets.wg_epicenter_works_key = {};
|
||||
sops.secrets.wg_epicenter_works_psk = {};
|
||||
sops.secrets.wg_ghetto_at_key = {};
|
||||
|
||||
# https://wiki.archlinux.org/title/WireGuard#Loop_routing
|
||||
|
||||
networking.wireguard.interfaces = {
|
||||
wg_cloonar = {
|
||||
ips = [ "10.42.98.1/24" ];
|
||||
listenPort = 51820;
|
||||
# publicKey: TKQVDmBnf9av46kQxLQSBDhAeaK8r1zh8zpU64zuc1Q=
|
||||
privateKeyFile = config.sops.secrets.wg_cloonar_key.path;
|
||||
peers = [
|
||||
{ # Notebook
|
||||
publicKey = "YdlRGsjh4hS3OMJI+t6SZ2eGXKbs0wZBXWudHW4NyS8=";
|
||||
allowedIPs = [ "10.42.98.201/32" ];
|
||||
}
|
||||
{ # iPhone
|
||||
publicKey = "nkm10abmwt2G8gJXnpqel6QW5T8aSaxiqqGjE8va/A0=";
|
||||
allowedIPs = [ "10.42.98.202/32" ];
|
||||
}
|
||||
];
|
||||
};
|
||||
wg_epicenter = {
|
||||
ips = [ "10.50.60.6/32" ];
|
||||
privateKeyFile = config.sops.secrets.wg_epicenter_works_key.path;
|
||||
peers = [
|
||||
{
|
||||
endpoint = "5.9.131.17:51821";
|
||||
publicKey = "T7jPGSapSudtKyWwi2nu+2hjjse96I4U3lccRHZWd2s=";
|
||||
presharedKeyFile = config.sops.secrets.wg_epicenter_works_psk.path;
|
||||
allowedIPs = [ "10.14.1.0/24" "10.14.2.0/24" "10.14.11.0/24" "10.14.40.0/24" "10.25.0.0/24" "10.50.60.0/24" ];
|
||||
}
|
||||
];
|
||||
};
|
||||
wg_ghetto_at = {
|
||||
ips = [ "10.43.98.2/32" ];
|
||||
# publicKey: o0FsoHL7ymwuDYmWA5N1mngbGT1sZJnhK6zhJkuEtzE=
|
||||
privateKeyFile = config.sops.secrets.wg_ghetto_at_key.path;
|
||||
peers = [
|
||||
{
|
||||
endpoint = "vpn.ghetto.at:51820";
|
||||
publicKey = "v4pr6tzS0Xpwh/mWTohxxvCRaAj2B4bqtJnNOu9v2Xs=";
|
||||
allowedIPs = [ "10.43.0.0/16" ];
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
networking.firewall.allowedUDPPorts = [ 51820 ];
|
||||
}
|
||||
16
hosts/fw-new/modules/wol.nix
Normal file
16
hosts/fw-new/modules/wol.nix
Normal file
@@ -0,0 +1,16 @@
|
||||
{ pkgs, ... }:
|
||||
let
|
||||
wolScript = pkgs.writeScriptBin "wol-script" ''
|
||||
case $1 in
|
||||
"gaming")
|
||||
${pkgs.wol}/bin/wol -i 10.42.96.255 78:8c:b5:fe:41:62
|
||||
};
|
||||
"") echo "Usage: $0 <hostname>"; exit 1;;
|
||||
esac
|
||||
'';
|
||||
in
|
||||
{
|
||||
environment.systemPackages = [
|
||||
wolScript
|
||||
];
|
||||
}
|
||||
BIN
hosts/fw-new/pkgs/foundry-vtt/FoundryVTT-12.331.zip
Normal file
BIN
hosts/fw-new/pkgs/foundry-vtt/FoundryVTT-12.331.zip
Normal file
Binary file not shown.
25
hosts/fw-new/pkgs/foundry-vtt/default.nix
Normal file
25
hosts/fw-new/pkgs/foundry-vtt/default.nix
Normal file
@@ -0,0 +1,25 @@
|
||||
{ stdenv, lib, unzip }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "foundry-vtt";
|
||||
version = "12.331";
|
||||
|
||||
src = ./FoundryVTT-12.331.zip;
|
||||
|
||||
nativeBuildInputs = [ unzip ];
|
||||
|
||||
unpackPhase = ''
|
||||
unzip $src
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/share/foundry-vtt
|
||||
cp -r . $out/share/foundry-vtt
|
||||
'';
|
||||
|
||||
meta = with lib; {
|
||||
description = "Tabletop simulator";
|
||||
license = licenses.mit; # Adjust as needed
|
||||
platforms = platforms.all;
|
||||
};
|
||||
}
|
||||
9591
hosts/fw-new/pkgs/kernel/rk35xx_vendor_config
Normal file
9591
hosts/fw-new/pkgs/kernel/rk35xx_vendor_config
Normal file
File diff suppressed because it is too large
Load Diff
4780
hosts/fw-new/pkgs/kernel/rk35xx_vendor_config.nix
Normal file
4780
hosts/fw-new/pkgs/kernel/rk35xx_vendor_config.nix
Normal file
File diff suppressed because it is too large
Load Diff
61
hosts/fw-new/pkgs/kernel/vendor.nix
Normal file
61
hosts/fw-new/pkgs/kernel/vendor.nix
Normal file
@@ -0,0 +1,61 @@
|
||||
# args of buildLinux:
|
||||
# https://github.com/NixOS/nixpkgs/blob/nixos-unstable/pkgs/os-specific/linux/kernel/generic.nix
|
||||
# Note that this method will use the deconfig in source tree,
|
||||
# commbined the common configuration defined in pkgs/os-specific/linux/kernel/common-config.nix, which is suitable for a NixOS system.
|
||||
# but it't not suitable for embedded systems, so we comment it out.
|
||||
# ================================================================
|
||||
# If you already have a generated configuration file, you can build a kernel that uses it with pkgs.linuxManualConfig
|
||||
# The difference between deconfig and the generated configuration file is that the generated configuration file is more complete,
|
||||
#
|
||||
{ fetchFromGitHub
|
||||
, linuxManualConfig
|
||||
, ubootTools
|
||||
, fetchurl
|
||||
, ...
|
||||
}:
|
||||
let
|
||||
modDirVersion = "6.1.75";
|
||||
|
||||
panthor-base = "aa54fa4e0712616d44f2c2f312ecc35c0827833d";
|
||||
panthor-head = "c81ebd8e12b64a42a6efd68cc0ed018b57d14e91";
|
||||
in
|
||||
(linuxManualConfig {
|
||||
inherit modDirVersion;
|
||||
version = "${modDirVersion}-jr-noble";
|
||||
extraMeta.branch = "6.1";
|
||||
|
||||
# https://github.com/Joshua-Riek/linux-rockchip/tree/noble
|
||||
src = fetchFromGitHub {
|
||||
owner = "Joshua-Riek";
|
||||
repo = "linux-rockchip";
|
||||
rev = "5c43412639fd134f0ba690de2108eaa7ea349e2a";
|
||||
hash = "sha256-aKm/RQTRTzLr8+ACdG6QW1LWn+ZOjQtlvU2KkZmYicg=";
|
||||
};
|
||||
|
||||
# https://github.com/hbiyik/linux/tree/rk-6.1-rkr3-panthor
|
||||
# allows usage of mainline mesa
|
||||
kernelPatches = [{
|
||||
name = "hbiyik-panthor.patch";
|
||||
# NOTE: This needs to be `fetchurl` instead of `fetchpatch`, because `fetchpatch`
|
||||
# reorders the patches, and the order matters since they're generated from commits.
|
||||
patch = fetchurl {
|
||||
url = "https://github.com/hbiyik/linux/compare/${panthor-base}...${panthor-head}.patch";
|
||||
hash = "sha256-nSfmgem0CElUHL1wXSL+9aVixeaRjcxMyey4YaNdHfc=";
|
||||
};
|
||||
extraConfig = { };
|
||||
}];
|
||||
|
||||
# Steps to the generated kernel config file
|
||||
# 1. git clone --depth 1 https://github.com/hbiyik/linux.git -b rk-6.1-rkr3-panthor
|
||||
# 2. put https://github.com/hbiyik/linux/blob/rk-6.1-rkr3-panthor/debian.rockchip/config/config.common.ubuntu to arch/arm64/configs/rk35xx_vendor_defconfig
|
||||
# 3. run `nix develop .#fhsEnv` in this project to enter the fhs test environment defined here.
|
||||
# 4. `make rk35xx_vendor_defconfig` in the kernel root directory to configure the kernel.
|
||||
# 5. Then use `make menuconfig` in kernel's root directory to view and customize the kernel(like enable/disable rknpu, rkflash, ACPI(for UEFI) etc).
|
||||
# 6. copy the generated .config to ./pkgs/kernel/rk35xx_vendor_config (also be sure to update the corresponding `.nix` file accordingly) and commit it.
|
||||
#
|
||||
configfile = ./rk35xx_vendor_config;
|
||||
config = import ./rk35xx_vendor_config.nix;
|
||||
}).overrideAttrs (old: {
|
||||
name = "k"; # dodge uboot length limits
|
||||
nativeBuildInputs = old.nativeBuildInputs ++ [ ubootTools ];
|
||||
})
|
||||
17
hosts/fw-new/pkgs/mali-firmware/default.nix
Normal file
17
hosts/fw-new/pkgs/mali-firmware/default.nix
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
stdenv,
|
||||
fetchurl,
|
||||
}:
|
||||
stdenv.mkDerivation {
|
||||
pname = "mali-g610-firmware";
|
||||
version = "g21p0-01eac0";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://github.com/JeffyCN/mirrors/raw/e08ced3e0235b25a7ba2a3aeefd0e2fcbd434b68/firmware/g610/mali_csffw.bin";
|
||||
hash = "sha256-jnyCGlXKHDRcx59hJDYW3SX8NbgfCQlG8wKIbWdxLfU=";
|
||||
};
|
||||
|
||||
buildCommand = ''
|
||||
install -Dm444 $src $out/lib/firmware/mali_csffw.bin
|
||||
'';
|
||||
}
|
||||
23
hosts/fw-new/pkgs/orangepi-firmware/default.nix
Normal file
23
hosts/fw-new/pkgs/orangepi-firmware/default.nix
Normal file
@@ -0,0 +1,23 @@
|
||||
{ fetchFromGitHub, stdenvNoCC, ... }: stdenvNoCC.mkDerivation {
|
||||
pname = "orangepi-firmware";
|
||||
version = "2024.01.24";
|
||||
dontBuild = true;
|
||||
dontFixup = true;
|
||||
compressFirmware = false;
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "orangepi-xunlong";
|
||||
repo = "firmware";
|
||||
rev = "76ead17a1770459560042a9a7c43fe615bbce5e7";
|
||||
hash = "sha256-mltaup92LTGbuCXeGTMdoFloX3vZRbaUFVbh6lwveFs=";
|
||||
};
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
mkdir -p $out/lib/firmware
|
||||
cp -a * $out/lib/firmware/
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
}
|
||||
73
hosts/fw-new/secrets.yaml
Normal file
73
hosts/fw-new/secrets.yaml
Normal file
@@ -0,0 +1,73 @@
|
||||
borg-passphrase: ENC[AES256_GCM,data:jHb+yXK0RqNdVYtWiueztZFlHC/xQ6ZiAOUcLt6BxmZQewuL3mh4AZ+lQdmA/4EaaTTIhVMR3xFx5fU6b2CtNLiGb/0=,iv:IW09B1EE1OupMCOvv13MXRYiMsD4VmIfyYONUyrPX1c=,tag:3ankeLOaDJkwRUGCd72DuA==,type:str]
|
||||
borg-ssh-key: ENC[AES256_GCM,data:ir25XfzLBb/H/YWzxP501hCaLBB4jpiLW7WUcnvguzosT9QeOtBdJ0WB1IndEMtiEgQyE9kyGOJ3QJwzbQNkX6CG96Uzt2mKw8gw8ayUqC+B9zR8eIRYiDKOYs+YREVo7nA5pLLzIc/9jaRicDFMmw1Thmk7UUJKB1DNV49nU9K+nAfrCzk7ZQieY8oaasFD0cvNb4Ndj6f9PWSXkNBwKK52ig4hDeNBs1bdy8nDE8VqlwOo8H2DcYMzdMjKCZDBRccy8NofHEhakCW5OdliFyIHsLkcBHca3Bp46JN7wbo8avPPd9bXGuRiOSWYq50RcyZUovnB3g7Dk3swCyuiFztnStN63+g7ZnGFdYLYDYfuDSPN1W2HCkknmaoT910VNE8sEAMyfXk4tqJv4eW4qmFk2UwPlRCrsk9GtdRQ5wm8muNPHEZ8s2dGkn4WDcjy7SUpgF4UJJZV8iJe74W9BK1Ef+AWWNsNjYfZde3iw1+8Fz1u65u4seFWqQMok/noADpszbpk+YYRoM+5D/YVMx+KeDtoFqnZfULM/BqvAqdYYZtRzojndeNW6Ea4sxDE+XQ5b1OwGFlNAlnuS1fYYPvKojrKNgT9KMwbsvPijU5vFddY8Qpz2h6GKEv/OW87j5UeyDW4l32lvyawBuzczBfiFgCElggGSZHM5rjE4Deb06eQleTioZ79EDXTv5UsPQ6Bc1v5Wvnu8DvxJe4B10vxH70JIGIlmjwo0yhMkxDTN7BkAGQC0QAPhwtURDq+XVufQNjlTUjjH1Q1E4u0Vy19clMs8SStqFeMN02BfWZdS9mbueF5Ehc+8wTfAs43CQFublJ4wfG1PzEbqj9LZdimFe4hCnE2y6Gbf591shugVSAMA3UXQUuvFQmm69i9gz88YSYrkLlVStM+dtXCugZho72xgHtnI+5o19wuoZPRoxe47W0T2kJZZeomtqoAsSo5yr5JeYzYdaHYcK2fgRY0HWgWzOxnVEfX/gRPR3b20Tko6yp9lIDECkXVDQSxptxqIYk+VuETnD9YF2OpYeHZLGoo9OLdEHVZRcuy1S74aAOJGO9SAHLw3eukxG//AZlwcOYjOsYDVt3BjhYZEkYCLg8GkAqV/7bGsxT7pgckNEB2NRYQI9ckqEcEw9CdkYre67HwfPCvAble68VnRzgp+v5s0koVjTURF9FTxvVOXQEbvSpY828idyx6nOaAIHoqpIOFz4jsGE9L4FKamqnlnjzj2Ri/MboT9JQBj8bnIF/ej+dQGpfqZo7zqtu3d0B/9e0xuVTcqI9Bxlqn3D4108I8R37Ctr5OFKloeOZ8HHMsHcBUAzZC6/fWrOspru14YHW2YNj8nBxHve/P3oiTQ/nlXLcBGLoFfI+hOpofccQB8FnkKfTbLSRUGrGY6NJt9RCnZgm2+RUgel77XpsCsT/Q5ZGclBdyk8mSaqVjiNyHCbCV5tF/tWnuvf859S0tcmqbJ0FhIRAvwxFucmfi6FSPX5HEMdRbNV7szrHKSX60u7YA2DBBzv3c/+C2bxq70vhwFelqz7FqpVKwebbE4/a59lZpibzefCoji/TPDJB62/ox5NHHE5qenv7IPcEj3dEmdasbrApAw1UFsFlRCnlg4JIYley/AQx7OzUSImqkG8JWvSJ4JXijhsr9dPFR/cb0srUO88aFNh/ZUQhELZCVnzAsF81Y4w6LTGApMfUVN/yx9MqENGvObywzMls1UJphvzDZzvb+Ue6eqELogN1QcEI/WOirwVtJO6E7IevEtK4xxWsLfRHVjtbLc4QjCWuiyszAPTTttKJ+iC2h14Wj1XoiMpWRiVnj+jI9iWRen96P4glYEfuCYQS6vbGkNDEoZt/FnkLJDbLdjXatmhUoRpvExOtp26ULR/f1lwzLMJBt1qPvhuGur1ru2B1e8+AVte1Cfjmk+xrnxNwkTFLGe89Qjd77wPyQv9h0YrhZ6uDi2zLemhZs2LjW5ZvzV5P4thMDxkhezJHatPHAGa8OfclJOyrRTyW2azdz2A45MNzZtCQcnQdQxBXf+XRskLnhquZfgv66hFITjuF/HeI9cq4HJcrgaOcVj+tBdK1bTCyL2kqKkCpSCbh/Pv6FuAlDXgLjsWwZgOKz8gfTIfXMapPLDYVTbS/PPPABylZflN98FFyeFDHB3Fwn1a6qAJ0mC7+4sowVZ1DIAoflaHqNs5TXyb3KeZGgXj5ZQwhv1z6NySvOS6cHxx0PvkFo99T1NHztxCRERNvBdWSwsr32DTwEvZo5iNPy3lvKI5A+rXc7jlQkUbufbddtLw2iPtt29XyMDOysK010fXzzQRjaz4R8ZaDtHNjqPrynvqFPXRB0VSIrwXS2utU7bmD+0dGX26t9k5qRBi7Gm+iZNKGMnSRsm17bVk5o8q0tb1P1eGL9mexZJJvxolfXVFJJtR8m6vLmUX1LSht/JhoWFElrINl0hviwd1dehmTqdQqWz5/imjF+pVOasrt7XVZ+7T/rDpuwNl375qSZptM1pMUExJ3CvzigpnarXXQxEBYkf0haGvQwPWNVHe/bR/1VooSQkH/mGg1g+rcTqp4yB5hsFu1lNK4ph04WQOqaafg40HBv6e5cOjLkFdEtYNpjyd6sRS+WHk7zzFlfPVlzijq8f+oDH9ALRzNnL1Y2DrX53wx4dBBWvxE1Yhb6Kj6Er4ZDiRLLXo+wJOGCpnNTPJMVaYskZ+LN2e9nS2/ZwbsNBnPHxSqCc1oP4d3yXH0j90VKnWg79aIEOagRvTF/9F6SkkGL9zVuUnoVSPwq97etWWtjGoEORMGY7jkGOK+U391p7Z69Hrv2AejS1BoSDeGcxXasFvINpmc+Hl2c+zOlFBySu2zA39cVlcStUFICA5GCmE5Eum4ED9DXP6RAuicD7YE0qSKbMkfLxIWMCZ6wBcwVUjdt43SI/ZqdpDm3E1kTRg07dE0R091rtfzEiIwBM4xFPJBafOx0L/Do61YMOHGzi6wgIQO7P7wIslv62M8MD1KKa/eH0tE2vhG/GyEGtKkg3P9vZRJwioifyshS1hvrt5pLinuCaDYyqMAl8Ro0OOm8di7+mBvXib0nRLfW7wBGDA4ADTipizNWAmbspQQl89kH5gdxgXO5U+N/qc0zXbpB+qeHVkPIK1DmrJ8pHLOE8mOpLy7eHUsSku/WtTt/RP4pcDbBU/43MCbk7NXKu/LjKjkQBjAL49LxnYmhEU7X//jtwSPE3gdx0x+wRJxzlbehM6rpfDRV5WQGSFf7yjLc/Ga1KwsgVdAstJEzDdv2vWSsjNzfJvHVBLrQPIC9fggi3DeLiHTAryCUcLUhNj4xtZWhSS1qmx07E4VzfjDJLMOsLY0vlimgngZ3YYCjC3Sw0frfQH2SZvmbLd3XfBdud67ZaMUobcRhnKzQnilldyD1jWVWLdVTup4RVxT4GYek9nmYflzpWWmwbXatz9Sgcw==,iv:9E1uiPqM3Hh4KWtL8haxm6PRm2VPc+DggrA135FvfB8=,tag:QSOgzVH9IBMgZxJvUhvY2w==,type:str]
|
||||
ddclient: ENC[AES256_GCM,data:EaXjXS/bwL3S/Fr+rzQ7dXA1eIzeFpHH7H+SvoNhVSg=,iv:3BzjnJG5yT1W8ob2nm0oUlr+sSJ73W/ctl48xyxeeWM=,tag:TqKSwfxF0V1v5T8VT/qblw==,type:str]
|
||||
wrwks_vpn_key: ENC[AES256_GCM,data:gGipXC8JJO59b4KWMSo0+r761raQl7RzgBuUbXmPEKlZR21bs5XRAQalzDCFNtjcpNkXiGqAHCLkDTtjPagMsw==,iv:MH1EBJEOdQDEgm9E0F884fynhsH8KiS5QSc605XbASQ=,tag:FUM1eptHS0rpt6ILyQjGOg==,type:str]
|
||||
wg_cloonar_key: ENC[AES256_GCM,data:Dtp6I5J0jU5LLVwEFU4DFCpUngPRmFMebGXnk2oSwsKtsir/DtRBFG7ictM=,iv:1Abx/EAZRJrRQURljofzUYDgJpuREriX0nSrFbH5Npw=,tag:l4uFl9Uc+W0XeLVfLGmgZA==,type:str]
|
||||
wg_epicenter_works_key: ENC[AES256_GCM,data:LeLjfwfaz+loWyHYRgIMIPzHzlOnhl9tluKcQFgdes6r+deft1JfnUzDuF0=,iv:DKrc3I+U2hWDH8nnc8ZQeaVtA1eVXu7SXdTn1fxHoH4=,tag:V0PL0GrL2NEPVslAZa801A==,type:str]
|
||||
wg_epicenter_works_psk: ENC[AES256_GCM,data:Den3NDWdP013Or6/2Vll1igUahuRSNW4hu+nDa5vkr93bbveQTaWFT4TD4U=,iv:r3UsD3+3lUIP2X3Grti7wpXTQBXtu1/MdrycEmpZfsI=,tag:ghbAcxmjGVOe9jCZsmFzjA==,type:str]
|
||||
wg_ghetto_at_key: ENC[AES256_GCM,data:OIHmoy3SpIi9aefZnZ1PzpyHbEso18ceoTULf2eQkx1rJbaxC6PD1lma7eQ=,iv:u0eFjHHOBzPTmBvBEQsYY5flcBayiAQKd6e7RyiPwJI=,tag:731C9wvv8bA5fuuQq+weVQ==,type:str]
|
||||
gitea-mailer-password: ENC[AES256_GCM,data:M4qCWNt1oQVJzxThIjocm2frwuVMyx+69TBpke25RwxJxEQnvHL1CM579OVroTm7+gGE/oOJqAwDIepfiDtyM1xm,iv:jayFZMbu3uDimS/rIKZSeoU0MsYwWp880iEMs1oQE4k=,tag:qGDncRkyuCWaELhcxUrqtQ==,type:str]
|
||||
gitea-runner: ENC[AES256_GCM,data:NYG3qRLiMjmfA+oHYBXBbxpuX2ZjB/VgvLaS7yr5kJeDN/NukB/B3OZcEfsUWgbBS5IsLENESngWTFmK4W3htN4lSqdg/g4UsUr20beNov+pbyPN05rkBYmSCZZFwZ1L9POEE4GF4LuuoNpDlWIw0mrA8oV8MoI4W5QS2IGranBTIQQaYXU5TEGYa4XMVo4oC75iuH6DIq1KD6OgFAfMhm/wlbP8CP/Iaw2K8CNPxktk93pm3OSmggf22Z4JPEnvV25sc9iBkxLkDk9FXYFys0g=,iv:UzL5ncVOC/loJwcFSG1QJHnzLp3il4Hf3qDwLWxrIlo=,tag:w0Zn/E+02KyAsPXZdOLrew==,type:str]
|
||||
gitea-runner-token: ENC[AES256_GCM,data:HpBjLS10w78ihbnAUrlCRGvwrXLBYKH5v/P7XggoUSWLoAazSVQArABxaK7PJas=,iv:q3Y6jV0gmug06O0EYqGVyIJ4AvMGr2ydwY17YKxo0Qw=,tag:Ws5HLbdaeYGGXzDZW/FX4w==,type:str]
|
||||
home-assistant-ldap: ENC[AES256_GCM,data:uZEPbSnkgQYSd8ev6FD8TRHWWr+vusadtMcvP7KKL2AZAV0h1hga5fODN6I5u0DNL9hq2pNM+FwU0E/svWLRww==,iv:IhmUgSu34NaAY+kUZehx40uymydUYYAyte1aGqQ33/8=,tag:BKFCJPr7Vz4EG78ry/ZD7g==,type:str]
|
||||
home-assistant-secrets.yaml: ENC[AES256_GCM,data:m7uOVo7hPk/RmqqRS6y7NKoMKsR9Bdi1ntatsZdDOAbJMjZmZL2FgPEHi/zF73zCfRfTOca3dwpulR3WXZ9Ic1sbUIggmusJMg4Gellw1CUhx7SbQN5nieAbPbB9GVxMuV4OakD1u7Swz8JggDT6IwojSnuD5omCRCyUH1wvKB+Re59q6EStderlm5MJNVFlVrbKVbLKLcw4yRgTh34BGnTTjcJmgSlQjO1ciu2B7YQmdl0Fw6d8AdbEzgB5TFG5ONc85UhJDE8Wlw==,iv:GCtpcVChN2UMWtfnWURozCfVj2YbRPqp/bH4Jjntybs=,tag:pcxP7gTBtXMNT5iyW5YXTw==,type:str]
|
||||
matrix-shared-secret: ENC[AES256_GCM,data:67imd3m6WBeGP/5Msmjy8B6sP983jMyWzRIzWgNVV5jZslX+GBJyEYzm3OTDs1iTZf4ScvuYheTH0QFPfw==,iv:7ElCpESWumbIHmmFaedcpkFm5M58ZT3vW9wb9e1Sbh4=,tag:wr4FIymtJBtCerVqae+Xlw==,type:str]
|
||||
palworld: ENC[AES256_GCM,data:rdqChPt4gSJHS1D60+HJ+4m5mg35JbC+pOmevK21Y95QyAIeyBLVGhRYlOaUcqdZM2e4atyTTSf6z4nHsm539ddCbW7J2DCdF5PQkrAGDmmdTVq+jyJAT8gTrbXXCglT1wvFYY5dbf2NKA4ASJIA8bdVNuwRZU0CtFiishzLuc9m8ZcGCNwQ/+xkMZgkUAHYRlEJAZyMpXR6KkFftiR05JRAFczD4N7GXPPe+vyvgXg7QBGtf20Qd4SGBUw0zI/SNTRmifHUuc4Z6+Fe9JHgvTc3uFcTMVnty0fEuL+a29liaVdAFq8BnqJfc5CNV401ZSUeMbG41lCn1cegP/WChs9J6HXNrhWDgiXa6ln++NoKcfOHIfZVbYOCoOxFR6+YWeBU2+sHmdwI9j5XQf5Ly2hmg12j0Ds2Cn8k4PG5aQP+HT2bedqyxwSt6fi97A0Osnh4ig7+DzYAjSNLewbYLzVdK39VdvB9hqLto+yFS3gAaeYOHwPwtqa+COI85c55lHiyKHlSwPhBqYaaiDu00lQTUzq9R5vz6F/l+T3bUjuna5RryUu8yhnk5DyK834KycTOg4ETcZTqro6prfiEBxc+Utsc9JvEtZgwFv6fsVLOu7nHxuiYuvseZ4YA8LlYdwPJboMPO2XsuhwWtT1uz/rh2orH7/vsXvzA/kF8NFemWBEMVLYA8byC5ze8doiGDYp4T5AAf10nJB1ceQ==,iv:gs78fxhvo9KlTaR5nzs12/LdgPChSFPHD2k4VQp3ARo=,tag:lpWBOi9xh2cWkS+71KD/UQ==,type:str]
|
||||
ark: ENC[AES256_GCM,data:YYGyzoVIKI9Ac1zGOr0BEpd3fgBsvp1hSwAvfO07/EQdg8ufMWUkNvqNHDKN62ZK5A1NnY3JTA1p4gyZ4ryQeAOsbwqU1GSk2YKHFyPeEnpLz/Ml82KMsv7XPGXuKRXZ4v3UcLu0R8k1Q0gQsMWo4FjCs3FF5mVtJG/YWxxbCYHoBLJ/di5p0DgjuFgJBQknYBpuLzr+yIoeqEyN7XcGYAJO53trEJuOOxLILULifkqISHjZ66i5F1fHW0iUdRbmeWV4aOAeOrsQqXYv,iv:gJwV5ip84zHqpU0l0uESfWWOtcgihMvEEdLaeI+twcU=,tag:sy8udVQsKxV/jOqwhJmWAg==,type:str]
|
||||
firefox-sync: ENC[AES256_GCM,data:uAJAdyKAuXRuqCFl8742vIejU5RnAPpUxUFCC0s0QeXZR5oH2YOrDh+3vKUmckW4V1cIhSHoe+4+I4HuU5E73DDrJThfIzBEw+spo4HXwZf5KBtu3ujgX6/fSTlPWV7pEsDDsZ0y6ziKPADBDym8yEk0bU9nRedvTBUhVryo3aolzF/c+gJvdeDvKUYa8+8=,iv:yuvE4KG7z7Rp9ZNlLiJ2rh0keed3DuvrELzsfJu4+bs=,tag:HFo1A53Eva31NJ8fRE7TlA==,type:str]
|
||||
sops:
|
||||
kms: []
|
||||
gcp_kms: []
|
||||
azure_kv: []
|
||||
hc_vault: []
|
||||
age:
|
||||
- recipient: age14grjcxaq4h55yfnjxvnqhtswxhj9sfdcvyas4lwvpa8py27pjy2sv3g6v7
|
||||
enc: |
|
||||
-----BEGIN AGE ENCRYPTED FILE-----
|
||||
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB2dUdkQ2MrUTRtV3RIWHo1
|
||||
T0F2cEh2czgrNVB0ZVlTa1hEVllJZUlkNGljCms3R1c1U0F6cnRaWk54M1FJeWwz
|
||||
cGltRDN2ZHRrWnBWY1o3MHNnK2dkM00KLS0tIG0zL0svWEtweW14Q1V1WXlWMEVS
|
||||
cy9IdTNTeDIrVUl1WWk2dnplcVZaZFkKAXoWKjSryB9jpoSf/KSUsmSfDb3IJjY3
|
||||
3BDOyoQq52zimP27iy8jhEPUwcLfL093FrRYFraOTOzL19uKDr1Kaw==
|
||||
-----END AGE ENCRYPTED FILE-----
|
||||
- recipient: age16veg3fmvpfm7a89a9fc8dvvsxmsthlm70nfxqspr6t8vnf9wkcwsvdq38d
|
||||
enc: |
|
||||
-----BEGIN AGE ENCRYPTED FILE-----
|
||||
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBzL1JLNzhNa0JZV1lxS295
|
||||
VExnbEhTeVZLMDRPUHJtTVRsa29QamNseFZFCkdnNEZ0ZHYxNFBTVC9MUVhZN1ZX
|
||||
bHpsRkMvWlFBZkwxTG9DZEFwRTdSbWsKLS0tIC9QS3ZRZnZiNXl5VDJ4OWhrT1U5
|
||||
di9kS05JMENYRHNYSHBQbmJZaHZKZ2cKcQBBKuqR748ReULbmOWxLV+4l/kI6WC5
|
||||
PDPTLzeVPzlFXKGXq/OwkNbgzN0Pq9HdOqNImheb6Qdi8X5CLHVT1Q==
|
||||
-----END AGE ENCRYPTED FILE-----
|
||||
- recipient: age1v6p8dan2t3w9h94fz4flldl32082j3s9x6zqq7u5j66keth9aphsd6pvch
|
||||
enc: |
|
||||
-----BEGIN AGE ENCRYPTED FILE-----
|
||||
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBqc1dRT3h4RDlzdi9UWHEr
|
||||
KzJmck9Edk50YmxJOUFZQ1F2aEZ0aEdNL3drCkFyaDBQVzg4UlYvd0hWL0dUNXlz
|
||||
eVpFUUJXY2sxNHBqbFZjUlZJbVV6ZDQKLS0tIDR4TUF1N2VYMnVXMUZiMHFhQ202
|
||||
VkhkRWF6MWZwbmVzRFBhS01pSnNoaEkKjrXaqL1OoiuWvMIRek6ozICxK4bzNLHc
|
||||
bP0G3q86VR0uV+oQFjmSx0OGphAEjv1KDPpCIdJ5P8o3JUs/crHUEQ==
|
||||
-----END AGE ENCRYPTED FILE-----
|
||||
- recipient: age1wq82xjyj80htz33x7agxddjfumr3wkwh3r24tasagepxw7ka893sau68df
|
||||
enc: |
|
||||
-----BEGIN AGE ENCRYPTED FILE-----
|
||||
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBTZmVKVTAxUVE2dmRoSzdh
|
||||
QkZGOW9CNUpobEVIY25acmdaUDVYbWY3TURZCjZ4a3RsajBCajdiTkh5NGNhZi8y
|
||||
RC9VRGJYOTJzWjA3cld1Skw5TXUzbzAKLS0tIDE2RlZLZlNrb1F2VjJPK3R3di80
|
||||
ci8wRmV1clZlaHp2cDRXZSs3VzdBUnMKfnvxSasz18LrF7ZZOQjAOVAVsWGcF7Ax
|
||||
rYe9dM46Mbb2measOgXlwqKVqINcvhVxKdgOiJJ0ZdGtNeNsmyBRgA==
|
||||
-----END AGE ENCRYPTED FILE-----
|
||||
- recipient: age12msc2c6drsaw0yk2hjlaw0q0lyq0emjx5e8rq7qc7ql689k593kqfmhss2
|
||||
enc: |
|
||||
-----BEGIN AGE ENCRYPTED FILE-----
|
||||
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBWK25sZGh0TVdGM0lUU1Fj
|
||||
T1g3cXByYTVzTTFUeFBYMEJjelRLVXFaUVN3CnQyUG5CSG1FWVJoYnNYaWFOYjBU
|
||||
dGh6V29Bb01iTXk5YmRvQ0V6SUtSVGMKLS0tIEhVeVRQbVUzcWtFMWpDNjdWeTAv
|
||||
VDIzd0RIUlAya1lscTJ6bUJqMFRtS00K0TJ4ji3UU5G14xNC9Qru9bH9MdEbIJd7
|
||||
rzVp5S1HS+pdpprzOTmqAPFBe87Y6oX2yPauo/3GFTDZtjuVfYCvLA==
|
||||
-----END AGE ENCRYPTED FILE-----
|
||||
lastmodified: "2024-11-20T21:39:00Z"
|
||||
mac: ENC[AES256_GCM,data:JCFvFwSqnAQCOB76n5pfQsdsaod8bBiVZ2VY+WWBDWi84gQByhqy808E2ZZJSJ1/amUi8dNBeOPNWZIGdieuWJyatrqjWziAl7gXx5u35i77sS6hAD+G/Fc/elgRbjc0VIbplZ7UxBmwo3vkVpI4RqQiQv63MvKHI+TkoY8vFUM=,iv:uy50x8FqqDW7hCLZeHfhFB/dxa3N6kM2Vj9waAZJngg=,tag:Wt1FG0kW4VFZ2fvvAC0T4A==,type:str]
|
||||
pgp: []
|
||||
unencrypted_suffix: _unencrypted
|
||||
version: 3.8.1
|
||||
1
hosts/fw-new/utils
Symbolic link
1
hosts/fw-new/utils
Symbolic link
@@ -0,0 +1 @@
|
||||
../../utils
|
||||
@@ -12,7 +12,9 @@
|
||||
# ./utils/modules/netdata.nix
|
||||
|
||||
# fw
|
||||
./modules/network-prefix.nix
|
||||
./modules/networking.nix
|
||||
./modules/setupnetwork.nix
|
||||
./modules/firewall.nix
|
||||
./modules/dhcp4.nix
|
||||
./modules/unbound.nix
|
||||
@@ -56,6 +58,8 @@
|
||||
./hardware-configuration.nix
|
||||
];
|
||||
|
||||
networkPrefix = "10.42";
|
||||
|
||||
nixpkgs.overlays = [
|
||||
(import ./utils/overlays/packages.nix)
|
||||
];
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
];
|
||||
extraOptions = [
|
||||
"--network=server"
|
||||
"--ip=10.42.97.201"
|
||||
"--ip=${config.networkPrefix}.97.201"
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
};
|
||||
extraOptions = [
|
||||
"--network=server"
|
||||
"--ip=10.42.97.22"
|
||||
"--ip=${config.networkPrefix}.97.22"
|
||||
"--device=/dev/ttyACM0"
|
||||
"--hostname=deconz"
|
||||
];
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
{ ... }: {
|
||||
{ config, ... }:
|
||||
{
|
||||
services.kea.dhcp4 = {
|
||||
enable = true;
|
||||
settings = {
|
||||
@@ -23,15 +24,15 @@
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.96.100 - 10.42.96.240";
|
||||
pool = "${config.networkPrefix}.96.100 - ${config.networkPrefix}.96.240";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.96.0/24";
|
||||
subnet = "${config.networkPrefix}.96.0/24";
|
||||
interface = "lan";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.96.1";
|
||||
data = "${config.networkPrefix}.96.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name";
|
||||
@@ -43,23 +44,23 @@
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
data = "10.42.96.1";
|
||||
data = "${config.networkPrefix}.96.1";
|
||||
}
|
||||
];
|
||||
reservations = [
|
||||
{
|
||||
hw-address = "04:7c:16:d5:63:5e";
|
||||
ip-address = "10.42.96.5";
|
||||
ip-address = "${config.networkPrefix}.96.5";
|
||||
server-hostname = "omada.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "30:05:5c:56:62:37";
|
||||
ip-address = "10.42.96.100";
|
||||
ip-address = "${config.networkPrefix}.96.100";
|
||||
server-hostname = "brn30055c566237.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "24:df:a7:b1:1b:74";
|
||||
ip-address = "10.42.96.101";
|
||||
ip-address = "${config.networkPrefix}.96.101";
|
||||
server-hostname = "rmproplus-b1-1b-74.cloonar.com";
|
||||
}
|
||||
];
|
||||
@@ -68,15 +69,15 @@
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.97.100 - 10.42.97.240";
|
||||
pool = "${config.networkPrefix}.97.100 - ${config.networkPrefix}.97.240";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.97.0/24";
|
||||
subnet = "${config.networkPrefix}.97.0/24";
|
||||
interface = "server";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.97.1";
|
||||
data = "${config.networkPrefix}.97.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name";
|
||||
@@ -84,38 +85,38 @@
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
data = "10.42.97.1";
|
||||
data = "${config.networkPrefix}.97.1";
|
||||
}
|
||||
];
|
||||
reservations = [
|
||||
{
|
||||
hw-address = "1a:c4:04:6e:29:bd";
|
||||
ip-address = "10.42.97.2";
|
||||
ip-address = "${config.networkPrefix}.97.2";
|
||||
server-hostname = "omada.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "02:00:00:00:00:03";
|
||||
ip-address = "10.42.97.5";
|
||||
ip-address = "${config.networkPrefix}.97.5";
|
||||
server-hostname = "web-02.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "02:00:00:00:00:04";
|
||||
ip-address = "10.42.97.6";
|
||||
ip-address = "${config.networkPrefix}.97.6";
|
||||
server-hostname = "matrix.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "ea:db:d4:c1:18:ba";
|
||||
ip-address = "10.42.97.50";
|
||||
ip-address = "${config.networkPrefix}.97.50";
|
||||
server-hostname = "git.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "c2:4f:64:dd:13:0c";
|
||||
ip-address = "10.42.97.20";
|
||||
ip-address = "${config.networkPrefix}.97.20";
|
||||
server-hostname = "home-assistant.cloonar.com";
|
||||
}
|
||||
{
|
||||
hw-address = "1a:c4:04:6e:29:02";
|
||||
ip-address = "10.42.97.25";
|
||||
ip-address = "${config.networkPrefix}.97.25";
|
||||
server-hostname = "deconz.cloonar.com";
|
||||
}
|
||||
];
|
||||
@@ -123,15 +124,15 @@
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.101.100 - 10.42.101.240";
|
||||
pool = "${config.networkPrefix}.101.100 - ${config.networkPrefix}.101.240";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.101.0/24";
|
||||
subnet = "${config.networkPrefix}.101.0/24";
|
||||
interface = "infrastructure";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.101.1";
|
||||
data = "${config.networkPrefix}.101.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name";
|
||||
@@ -139,12 +140,12 @@
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
data = "10.42.101.1";
|
||||
data = "${config.networkPrefix}.101.1";
|
||||
}
|
||||
{
|
||||
name = "capwap-ac-v4";
|
||||
code = 138;
|
||||
data = "10.42.97.2";
|
||||
data = "${config.networkPrefix}.97.2";
|
||||
}
|
||||
];
|
||||
reservations = [
|
||||
@@ -153,15 +154,15 @@
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.99.100 - 10.42.99.240";
|
||||
pool = "${config.networkPrefix}.99.100 - ${config.networkPrefix}.99.240";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.99.0/24";
|
||||
subnet = "${config.networkPrefix}.99.0/24";
|
||||
interface = "multimedia";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.99.1";
|
||||
data = "${config.networkPrefix}.99.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name";
|
||||
@@ -169,43 +170,43 @@
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
data = "10.42.99.1";
|
||||
data = "${config.networkPrefix}.99.1";
|
||||
}
|
||||
];
|
||||
reservations = [
|
||||
{
|
||||
hw-address = "c4:a7:2b:c7:ea:30";
|
||||
ip-address = "10.42.99.10";
|
||||
ip-address = "${config.networkPrefix}.99.10";
|
||||
hostname = "metz.cloonar.multimedia";
|
||||
}
|
||||
{
|
||||
hw-address = "f0:2f:9e:d4:3b:21";
|
||||
ip-address = "10.42.99.11";
|
||||
ip-address = "${config.networkPrefix}.99.11";
|
||||
hostname = "firetv-living";
|
||||
}
|
||||
{
|
||||
hw-address = "bc:33:29:ed:24:f0";
|
||||
ip-address = "10.42.99.12";
|
||||
ip-address = "${config.networkPrefix}.99.12";
|
||||
hostname = "ps5";
|
||||
}
|
||||
{
|
||||
hw-address = "e4:2a:ac:32:3f:79";
|
||||
ip-address = "10.42.99.13";
|
||||
ip-address = "${config.networkPrefix}.99.13";
|
||||
hostname = "xbox";
|
||||
}
|
||||
{
|
||||
hw-address = "98:b6:e9:b6:ef:f4";
|
||||
ip-address = "10.42.99.14";
|
||||
ip-address = "${config.networkPrefix}.99.14";
|
||||
hostname = "switch";
|
||||
}
|
||||
{
|
||||
hw-address = "f0:2f:9e:c1:74:72";
|
||||
ip-address = "10.42.99.21";
|
||||
ip-address = "${config.networkPrefix}.99.21";
|
||||
hostname = "firetv-bedroom";
|
||||
}
|
||||
{
|
||||
hw-address = "30:05:5c:56:62:37";
|
||||
ip-address = "10.42.99.100";
|
||||
ip-address = "${config.networkPrefix}.99.100";
|
||||
server-hostname = "brn30055c566237";
|
||||
}
|
||||
];
|
||||
@@ -213,15 +214,15 @@
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.254.10 - 10.42.254.254";
|
||||
pool = "${config.networkPrefix}.254.10 - ${config.networkPrefix}.254.254";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.254.0/24";
|
||||
subnet = "${config.networkPrefix}.254.0/24";
|
||||
interface = "guest";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.254.1";
|
||||
data = "${config.networkPrefix}.254.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
@@ -232,15 +233,15 @@
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.100.100 - 10.42.100.240";
|
||||
pool = "${config.networkPrefix}.100.100 - ${config.networkPrefix}.100.240";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.100.0/24";
|
||||
subnet = "${config.networkPrefix}.100.0/24";
|
||||
interface = "smart";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.100.1";
|
||||
data = "${config.networkPrefix}.100.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name";
|
||||
@@ -248,29 +249,29 @@
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
data = "10.42.100.1";
|
||||
data = "${config.networkPrefix}.100.1";
|
||||
}
|
||||
];
|
||||
reservations = [
|
||||
{
|
||||
hw-address = "fc:ee:28:03:63:e9";
|
||||
ip-address = "10.42.100.148";
|
||||
ip-address = "${config.networkPrefix}.100.148";
|
||||
server-hostname = "k1c";
|
||||
}
|
||||
{
|
||||
hw-address = "cc:50:e3:bc:27:64";
|
||||
ip-address = "10.42.100.112";
|
||||
ip-address = "${config.networkPrefix}.100.112";
|
||||
server-hostname = "Nuki_Bridge_1A753F72";
|
||||
}
|
||||
|
||||
{
|
||||
hw-address = "34:6f:24:f3:af:ad";
|
||||
ip-address = "10.42.100.137";
|
||||
ip-address = "${config.networkPrefix}.100.137";
|
||||
server-hostname = "daikin86604";
|
||||
}
|
||||
{
|
||||
hw-address = "34:6f:24:c1:f8:54";
|
||||
ip-address = "10.42.100.139";
|
||||
ip-address = "${config.networkPrefix}.100.139";
|
||||
server-hostname = "daikin53800";
|
||||
}
|
||||
];
|
||||
|
||||
@@ -13,8 +13,8 @@ in {
|
||||
ephemeral = false; # because of ssh key
|
||||
privateNetwork = true;
|
||||
hostBridge = "server";
|
||||
hostAddress = "10.42.97.1";
|
||||
localAddress = "10.42.97.51/24";
|
||||
hostAddress = "${config.networkPrefix}.97.1";
|
||||
localAddress = "${config.networkPrefix}.97.51/24";
|
||||
bindMounts = {
|
||||
"/run/secrets/firefox-sync" = {
|
||||
hostPath = "/run/secrets/firefox-sync";
|
||||
@@ -30,11 +30,11 @@ in {
|
||||
hostName = "firefox-sync";
|
||||
useHostResolvConf = false;
|
||||
defaultGateway = {
|
||||
address = "10.42.97.1";
|
||||
address = "${config.networkPrefix}.97.1";
|
||||
interface = "eth0";
|
||||
};
|
||||
firewall.enable = false;
|
||||
nameservers = [ "10.42.97.1" ];
|
||||
nameservers = [ "${config.networkPrefix}.97.1" ];
|
||||
};
|
||||
|
||||
services.nginx.enable = true;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{ pkgs, ... }: {
|
||||
{ config, pkgs, ... }: {
|
||||
networking = {
|
||||
firewall.checkReversePath = false;
|
||||
nat.enable = false;
|
||||
@@ -37,8 +37,8 @@
|
||||
iifname { "wan", "multimedia" } icmp type { echo-request, destination-unreachable, time-exceeded } counter accept comment "Allow select ICMP"
|
||||
|
||||
# Accept mDNS for avahi reflection
|
||||
iifname "server" ip saddr 10.42.97.20/32 tcp dport { llmnr } counter accept
|
||||
iifname "server" ip saddr 10.42.97.20/32 udp dport { mdns, llmnr } counter accept
|
||||
iifname "server" ip saddr ${config.networkPrefix}.97.20/32 tcp dport { llmnr } counter accept
|
||||
iifname "server" ip saddr ${config.networkPrefix}.97.20/32 udp dport { mdns, llmnr } counter accept
|
||||
|
||||
# Allow all returning traffic
|
||||
ct state { established, related } counter accept
|
||||
@@ -81,15 +81,15 @@
|
||||
iifname "multimedia" oifname "server" tcp dport { 1704, 1705 } counter accept
|
||||
iifname "lan" oifname "server" udp dport { 5000, 5353, 6001 - 6011 } counter accept
|
||||
# avahi
|
||||
iifname "server" ip saddr 10.42.97.20/32 oifname { "lan" } counter accept
|
||||
iifname "server" ip saddr ${config.networkPrefix}.97.20/32 oifname { "lan" } counter accept
|
||||
|
||||
# smart home coap
|
||||
iifname "smart" oifname "server" ip daddr 10.42.97.20/32 udp dport { 5683 } counter accept
|
||||
iifname "smart" oifname "server" ip daddr 10.42.97.20/32 tcp dport { 1883 } counter accept
|
||||
iifname "smart" oifname "server" ip daddr ${config.networkPrefix}.97.20/32 udp dport { 5683 } counter accept
|
||||
iifname "smart" oifname "server" ip daddr ${config.networkPrefix}.97.20/32 tcp dport { 1883 } counter accept
|
||||
|
||||
# Forward to git server
|
||||
oifname "server" ip daddr 10.42.97.50 tcp dport { 22 } counter accept
|
||||
oifname "server" ip daddr 10.42.97.5 tcp dport { 80, 443 } counter accept
|
||||
oifname "server" ip daddr ${config.networkPrefix}.97.50 tcp dport { 22 } counter accept
|
||||
oifname "server" ip daddr ${config.networkPrefix}.97.5 tcp dport { 80, 443 } counter accept
|
||||
|
||||
# lan and vpn to any
|
||||
iifname { "lan", "server", "vserver", "wg_cloonar" } oifname { "lan", "vb-*", "vm-*", "server", "vserver", "infrastructure", "multimedia", "smart", "wg_cloonar", "guest", "setup" } counter accept
|
||||
@@ -100,11 +100,11 @@
|
||||
# accept palword server
|
||||
iifname { "wan", "lan" } oifname "podman0" udp dport { 8211, 27015 } counter accept comment "palworld"
|
||||
# forward to ark server
|
||||
oifname "server" ip daddr 10.42.97.201 tcp dport { 27020 } counter accept comment "ark survival evolved"
|
||||
oifname "server" ip daddr 10.42.97.201 udp dport { 7777, 7778, 27015 } counter accept comment "ark survival evolved"
|
||||
oifname "server" ip daddr ${config.networkPrefix}.97.201 tcp dport { 27020 } counter accept comment "ark survival evolved"
|
||||
oifname "server" ip daddr ${config.networkPrefix}.97.201 udp dport { 7777, 7778, 27015 } counter accept comment "ark survival evolved"
|
||||
|
||||
# firefox-sync
|
||||
oifname "server" ip daddr 10.42.97.51 tcp dport { 5000 } counter accept comment "firefox-sync"
|
||||
oifname "server" ip daddr ${config.networkPrefix}.97.51 tcp dport { 5000 } counter accept comment "firefox-sync"
|
||||
|
||||
# allow all established, related
|
||||
ct state { established, related } accept comment "Allow established traffic"
|
||||
@@ -136,21 +136,22 @@
|
||||
content = ''
|
||||
chain prerouting {
|
||||
type nat hook prerouting priority filter; policy accept;
|
||||
iifname "server" ip daddr 10.42.96.255 udp dport { 9 } dnat to 10.42.96.255
|
||||
iifname "wan" tcp dport { 22 } dnat to 10.42.97.50
|
||||
iifname "wan" tcp dport { 80, 443 } dnat to 10.42.97.5
|
||||
iifname "wan" tcp dport { 5000 } dnat to 10.42.97.51
|
||||
iifname { "wan", "lan" } udp dport { 7777, 7778, 27015 } dnat to 10.42.97.201
|
||||
iifname { "wan", "lan" } tcp dport { 27020 } dnat to 10.42.97.201
|
||||
iifname "server" ip daddr ${config.networkPrefix}.96.255 udp dport { 9 } dnat to ${config.networkPrefix}.96.255
|
||||
iifname "wan" tcp dport { 22 } dnat to ${config.networkPrefix}.97.50
|
||||
iifname "wan" tcp dport { 80, 443 } dnat to ${config.networkPrefix}.97.5
|
||||
iifname "wan" tcp dport { 5000 } dnat to ${config.networkPrefix}.97.51
|
||||
iifname { "wan", "lan" } udp dport { 7777, 7778, 27015 } dnat to ${config.networkPrefix}.97.201
|
||||
iifname { "wan", "lan" } tcp dport { 27020 } dnat to ${config.networkPrefix}.97.201
|
||||
}
|
||||
|
||||
# Setup NAT masquerading on external interfaces
|
||||
chain postrouting {
|
||||
type nat hook postrouting priority filter; policy accept;
|
||||
oifname { "wan", "wg_cloonar", "wrwks", "wg_epicenter", "wg_ghetto_at" } masquerade
|
||||
iifname { "wan", "wg_cloonar" } ip daddr 10.42.97.50 masquerade
|
||||
iifname { "wan", "wg_cloonar" } ip daddr 10.42.97.51 masquerade
|
||||
iifname { "wan", "wg_cloonar" } ip daddr 10.42.97.201 masquerade
|
||||
iifname { "lan", "wg_cloonar" } ip daddr ${config.networkPrefix}.110.101 masquerade
|
||||
iifname { "wan", "wg_cloonar" } ip daddr ${config.networkPrefix}.97.50 masquerade
|
||||
iifname { "wan", "wg_cloonar" } ip daddr ${config.networkPrefix}.97.51 masquerade
|
||||
iifname { "wan", "wg_cloonar" } ip daddr ${config.networkPrefix}.97.201 masquerade
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
let
|
||||
foundry-vtt = pkgs.callPackage ../pkgs/foundry-vtt {};
|
||||
cids = import ../modules/staticids.nix;
|
||||
hostConfig = config;
|
||||
in {
|
||||
users.users.foundry-vtt = {
|
||||
isSystemUser = true;
|
||||
@@ -21,8 +22,8 @@ in {
|
||||
ephemeral = true;
|
||||
privateNetwork = true;
|
||||
hostBridge = "server";
|
||||
hostAddress = "10.42.97.1";
|
||||
localAddress = "10.42.97.21/24";
|
||||
hostAddress = "${hostConfig.networkPrefix}.97.1";
|
||||
localAddress = "${hostConfig.networkPrefix}.97.21/24";
|
||||
bindMounts = {
|
||||
"/var/lib/foundry-vtt" = {
|
||||
hostPath = "/var/lib/foundry-vtt";
|
||||
@@ -34,10 +35,10 @@ in {
|
||||
hostName = "foundry-vtt";
|
||||
useHostResolvConf = false;
|
||||
defaultGateway = {
|
||||
address = "10.42.97.1";
|
||||
address = "${hostConfig.networkPrefix}.97.1";
|
||||
interface = "eth0";
|
||||
};
|
||||
nameservers = [ "10.42.97.1" ];
|
||||
nameservers = [ "${hostConfig.networkPrefix}.97.1" ];
|
||||
};
|
||||
systemd.services.foundry-vtt = {
|
||||
description = "Foundry VTT Server";
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
let
|
||||
cids = import ../modules/staticids.nix;
|
||||
domain = "git.cloonar.com";
|
||||
networkPrefix = config.networkPrefix;
|
||||
|
||||
user = {
|
||||
isSystemUser = true;
|
||||
@@ -27,8 +28,8 @@ in
|
||||
ephemeral = false; # because of ssh key
|
||||
privateNetwork = true;
|
||||
hostBridge = "server";
|
||||
hostAddress = "10.42.97.1";
|
||||
localAddress = "10.42.97.50/24";
|
||||
hostAddress = "${networkPrefix}.97.1";
|
||||
localAddress = "${networkPrefix}.97.50/24";
|
||||
bindMounts = {
|
||||
"/var/lib/gitea" = {
|
||||
hostPath = "/var/lib/gitea/";
|
||||
@@ -55,11 +56,11 @@ in
|
||||
hostName = "git";
|
||||
useHostResolvConf = false;
|
||||
defaultGateway = {
|
||||
address = "10.42.96.1";
|
||||
address = "${networkPrefix}.96.1";
|
||||
interface = "eth0";
|
||||
};
|
||||
firewall.enable = false;
|
||||
nameservers = [ "10.42.97.1" ];
|
||||
nameservers = [ "${networkPrefix}.97.1" ];
|
||||
};
|
||||
|
||||
services.nginx.enable = true;
|
||||
|
||||
@@ -6,6 +6,7 @@ let
|
||||
url = "https://github.com/nixos/nixpkgs/";
|
||||
rev = "41dea55321e5a999b17033296ac05fe8a8b5a257";
|
||||
}) {};
|
||||
networkPrefix = config.networkPrefix;
|
||||
in
|
||||
{
|
||||
users.users.hass = {
|
||||
@@ -35,8 +36,8 @@ in
|
||||
ephemeral = false;
|
||||
privateNetwork = true;
|
||||
hostBridge = "server";
|
||||
hostAddress = "10.42.97.1";
|
||||
localAddress = "10.42.97.20/24";
|
||||
hostAddress = "${networkPrefix}.97.1";
|
||||
localAddress = "${networkPrefix}.97.20/24";
|
||||
extraFlags = [
|
||||
"--capability=CAP_NET_ADMIN"
|
||||
"--capability=CAP_MKNOD"
|
||||
@@ -74,7 +75,9 @@ in
|
||||
};
|
||||
};
|
||||
config = { lib, config, pkgs, ... }: {
|
||||
networkPrefix = networkPrefix;
|
||||
imports = [
|
||||
../network-prefix.nix
|
||||
./3dprinter.nix
|
||||
./ac.nix
|
||||
# ./aeg.nix
|
||||
@@ -103,11 +106,11 @@ in
|
||||
hostName = "home-assistant";
|
||||
useHostResolvConf = false;
|
||||
defaultGateway = {
|
||||
address = "10.42.96.1";
|
||||
address = "${networkPrefix}.96.1";
|
||||
interface = "eth0";
|
||||
};
|
||||
firewall.enable = false;
|
||||
nameservers = [ "10.42.97.1" ];
|
||||
nameservers = [ "${networkPrefix}.97.1" ];
|
||||
};
|
||||
|
||||
environment.systemPackages = [
|
||||
|
||||
@@ -71,6 +71,21 @@
|
||||
action = [
|
||||
{
|
||||
choose = [
|
||||
{
|
||||
conditions = [ "{{ is_state('automation.light_sunset', 'off') }}" ];
|
||||
sequence = [
|
||||
{
|
||||
service = "light.turn_on";
|
||||
target = {
|
||||
entity_id = "{{ trigger.entity_id }}";
|
||||
};
|
||||
data = {
|
||||
brightness_pct = 100;
|
||||
color_temp = 250;
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
conditions = [ "{{ state_attr('sun.sun', 'elevation') < 5 and trigger.entity_id == 'light.toilet_lights' }}" ];
|
||||
sequence = [
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{
|
||||
{ config, lib, ... }: {
|
||||
services.home-assistant.extraComponents = [
|
||||
"wake_on_lan"
|
||||
];
|
||||
@@ -41,7 +41,7 @@
|
||||
service = "wake_on_lan.send_magic_packet";
|
||||
data = {
|
||||
mac = "04:7c:16:d5:63:5e";
|
||||
broadcast_address = "10.42.96.5";
|
||||
broadcast_address = "${config.networkPrefix}.96.5";
|
||||
broadcast_port = 9;
|
||||
};
|
||||
}
|
||||
|
||||
9
hosts/fw/modules/network-prefix.nix
Normal file
9
hosts/fw/modules/network-prefix.nix
Normal file
@@ -0,0 +1,9 @@
|
||||
{ lib, ... }: {
|
||||
options = {
|
||||
networkPrefix = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
example = "10.42";
|
||||
description = "First two octets of the network";
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
{ ... }: {
|
||||
{ config, lib, ... }:
|
||||
{
|
||||
boot.kernel.sysctl = {
|
||||
# if you use ipv4, this is all you need
|
||||
"net.ipv4.conf.all.forwarding" = true;
|
||||
@@ -37,7 +38,7 @@
|
||||
networking = {
|
||||
useDHCP = false;
|
||||
# Define VLANS
|
||||
nameservers = [ "10.42.97.1" ];
|
||||
nameservers = [ "${config.networkPrefix}.97.1" ];
|
||||
# resolvconf.enable = false;
|
||||
vlans = {
|
||||
infrastructure = {
|
||||
@@ -81,37 +82,37 @@
|
||||
wan.useDHCP = true;
|
||||
lan = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.96.1";
|
||||
address = "${config.networkPrefix}.96.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
server = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.97.1";
|
||||
address = "${config.networkPrefix}.97.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
infrastructure = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.101.1";
|
||||
address = "${config.networkPrefix}.101.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
multimedia = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.99.1";
|
||||
address = "${config.networkPrefix}.99.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
smart = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.100.1";
|
||||
address = "${config.networkPrefix}.100.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
guest = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.254.1";
|
||||
address = "${config.networkPrefix}.254.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
extraOptions = [
|
||||
"--network=server"
|
||||
"--mac-address=1a:c4:04:6e:29:bd"
|
||||
"--ip=10.42.97.2"
|
||||
"--ip=${config.networkPrefix}.97.2"
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{ pkgs, ... }:
|
||||
{ config, pkgs, ... }:
|
||||
let
|
||||
cids = import ../modules/staticids.nix;
|
||||
json = pkgs.formats.json { };
|
||||
@@ -20,7 +20,7 @@ in {
|
||||
users.groups.podman.gid = cids.gids.podman;
|
||||
virtualisation = {
|
||||
# containers.containersConf.settings = {
|
||||
# containers.dns_servers = [ "10.42.97.1" ];
|
||||
# containers.dns_servers = [ "${config.networkPrefix}.97.1" ];
|
||||
# };
|
||||
podman = {
|
||||
enable = true;
|
||||
@@ -42,8 +42,8 @@ in {
|
||||
dns_enabled = false;
|
||||
subnets = [
|
||||
{
|
||||
subnet = "10.42.97.0/24";
|
||||
gateway = "10.42.97.1";
|
||||
subnet = "${config.networkPrefix}.97.0/24";
|
||||
gateway = "${config.networkPrefix}.97.1";
|
||||
}
|
||||
];
|
||||
ipam_options = {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{ ... }: {
|
||||
{ config, ... }: {
|
||||
networking = {
|
||||
vlans = {
|
||||
setup = {
|
||||
@@ -10,7 +10,7 @@
|
||||
interfaces = {
|
||||
setup = {
|
||||
ipv4.addresses = [{
|
||||
address = "10.42.110.1";
|
||||
address = "${config.networkPrefix}.110.1";
|
||||
prefixLength = 24;
|
||||
}];
|
||||
};
|
||||
@@ -28,15 +28,15 @@
|
||||
{
|
||||
pools = [
|
||||
{
|
||||
pool = "10.42.110.100 - 10.42.110.240";
|
||||
pool = "${config.networkPrefix}.110.100 - ${config.networkPrefix}.110.240";
|
||||
}
|
||||
];
|
||||
subnet = "10.42.110.0/24";
|
||||
subnet = "${config.networkPrefix}.110.0/24";
|
||||
interface = "setup";
|
||||
option-data = [
|
||||
{
|
||||
name = "routers";
|
||||
data = "10.42.110.1";
|
||||
data = "${config.networkPrefix}.110.1";
|
||||
}
|
||||
{
|
||||
name = "domain-name";
|
||||
@@ -48,7 +48,7 @@
|
||||
}
|
||||
{
|
||||
name = "domain-name-servers";
|
||||
data = "10.42.97.1";
|
||||
data = "${config.networkPrefix}.97.1";
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{ pkgs, config, python3Packages, ... }:
|
||||
let
|
||||
domain = "snapcast.cloonar.com";
|
||||
networkPrefix = config.networkPrefix;
|
||||
|
||||
snapweb = pkgs.stdenv.mkDerivation {
|
||||
pname = "snapweb";
|
||||
@@ -28,8 +29,8 @@ in
|
||||
ephemeral = false; # because of ssh key
|
||||
privateNetwork = true;
|
||||
hostBridge = "server";
|
||||
hostAddress = "10.42.97.1";
|
||||
localAddress = "10.42.97.21/24";
|
||||
hostAddress = "${networkPrefix}.97.1";
|
||||
localAddress = "${networkPrefix}.97.21/24";
|
||||
bindMounts = {
|
||||
"/var/lib/acme/snapcast/" = {
|
||||
hostPath = "${config.security.acme.certs.${domain}.directory}";
|
||||
@@ -53,10 +54,10 @@ in
|
||||
hostName = "snapcast";
|
||||
useHostResolvConf = false;
|
||||
defaultGateway = {
|
||||
address = "10.42.96.1";
|
||||
address = "${networkPrefix}.96.1";
|
||||
interface = "eth0";
|
||||
};
|
||||
nameservers = [ "10.42.97.1" ];
|
||||
nameservers = [ "${networkPrefix}.97.1" ];
|
||||
firewall.enable = false;
|
||||
};
|
||||
environment.etc = {
|
||||
|
||||
@@ -30,11 +30,11 @@ let
|
||||
interface-automatic = "yes";
|
||||
access-control = [
|
||||
"127.0.0.0/8 allow"
|
||||
"10.42.96.0/24 allow"
|
||||
"10.42.97.0/24 allow"
|
||||
"10.42.98.0/24 allow"
|
||||
"10.42.99.0/24 allow"
|
||||
"10.42.101.0/24 allow"
|
||||
"${config.networkPrefix}.96.0/24 allow"
|
||||
"${config.networkPrefix}.97.0/24 allow"
|
||||
"${config.networkPrefix}.98.0/24 allow"
|
||||
"${config.networkPrefix}.99.0/24 allow"
|
||||
"${config.networkPrefix}.101.0/24 allow"
|
||||
"0.0.0.0/0 allow"
|
||||
];
|
||||
tls-cert-bundle = "/etc/ssl/certs/ca-certificates.crt";
|
||||
@@ -44,23 +44,23 @@ let
|
||||
"\"localhost.cloonar.com A 127.0.0.1\""
|
||||
"\"localhost AAAA ::1\""
|
||||
"\"localhost.cloonar.com AAAA ::1\""
|
||||
"\"fw.cloonar.com A 10.42.97.1\""
|
||||
"\"fw A 10.42.97.1\""
|
||||
"\"fw.cloonar.com A ${config.networkPrefix}.97.1\""
|
||||
"\"fw A ${config.networkPrefix}.97.1\""
|
||||
|
||||
"\"pc.cloonar.com IN A 10.42.96.5\""
|
||||
"\"omada.cloonar.com IN A 10.42.97.2\""
|
||||
"\"switch.cloonar.com IN A 10.42.97.10\""
|
||||
"\"mopidy.cloonar.com IN A 10.42.97.21\""
|
||||
"\"deconz.cloonar.com IN A 10.42.97.22\""
|
||||
"\"brn30055c566237.cloonar.com IN A 10.42.96.100\""
|
||||
"\"snapcast.cloonar.com IN A 10.42.97.21\""
|
||||
"\"home-assistant.cloonar.com IN A 10.42.97.20\""
|
||||
"\"web-02.cloonar.com IN A 10.42.97.5\""
|
||||
"\"matrix.cloonar.com IN A 10.42.97.5\""
|
||||
"\"element.cloonar.com IN A 10.42.97.5\""
|
||||
"\"support.cloonar.com IN A 10.42.97.5\""
|
||||
"\"git.cloonar.com IN A 10.42.97.50\""
|
||||
"\"sync.cloonar.com IN A 10.42.97.51\""
|
||||
"\"pc.cloonar.com IN A ${config.networkPrefix}.96.5\""
|
||||
"\"omada.cloonar.com IN A ${config.networkPrefix}.97.2\""
|
||||
"\"switch.cloonar.com IN A ${config.networkPrefix}.97.10\""
|
||||
"\"mopidy.cloonar.com IN A ${config.networkPrefix}.97.21\""
|
||||
"\"deconz.cloonar.com IN A ${config.networkPrefix}.97.22\""
|
||||
"\"brn30055c566237.cloonar.com IN A ${config.networkPrefix}.96.100\""
|
||||
"\"snapcast.cloonar.com IN A ${config.networkPrefix}.97.21\""
|
||||
"\"home-assistant.cloonar.com IN A ${config.networkPrefix}.97.20\""
|
||||
"\"web-02.cloonar.com IN A ${config.networkPrefix}.97.5\""
|
||||
"\"matrix.cloonar.com IN A ${config.networkPrefix}.97.5\""
|
||||
"\"element.cloonar.com IN A ${config.networkPrefix}.97.5\""
|
||||
"\"support.cloonar.com IN A ${config.networkPrefix}.97.5\""
|
||||
"\"git.cloonar.com IN A ${config.networkPrefix}.97.50\""
|
||||
"\"sync.cloonar.com IN A ${config.networkPrefix}.97.51\""
|
||||
|
||||
"\"feeds.cloonar.com IN A 188.34.191.144\""
|
||||
# "\"paraclub.cloonar.dev IN A 49.12.244.139\""
|
||||
@@ -112,44 +112,44 @@ let
|
||||
"\"web.hilgenberg-gmbh.de IN A 91.107.197.169\""
|
||||
|
||||
# gaming
|
||||
"\"foundry-vtt.cloonar.com IN A 10.42.97.5\""
|
||||
"\"foundry-vtt.cloonar.com IN A ${config.networkPrefix}.97.5\""
|
||||
|
||||
"\"deconz.cloonar.multimedia IN A 10.42.97.22\""
|
||||
"\"metz.cloonar.multimedia IN A 10.42.99.10\""
|
||||
# "\"ps5.cloonar.multimedia IN A 10.42.99.12\""
|
||||
"\"xbox.cloonar.multimedia IN A 10.42.99.13\""
|
||||
# "\"switch.cloonar.multimedia IN A 10.42.99.14\""
|
||||
"\"deconz.cloonar.multimedia IN A ${config.networkPrefix}.97.22\""
|
||||
"\"metz.cloonar.multimedia IN A ${config.networkPrefix}.99.10\""
|
||||
# "\"ps5.cloonar.multimedia IN A ${config.networkPrefix}.99.12\""
|
||||
"\"xbox.cloonar.multimedia IN A ${config.networkPrefix}.99.13\""
|
||||
# "\"switch.cloonar.multimedia IN A ${config.networkPrefix}.99.14\""
|
||||
#living room
|
||||
"\"shellyuni-livingroom-1.cloonar.smart IN A 10.42.100.8\""
|
||||
"\"shellyswitch25-livingroom-1.cloonar.smart IN A 10.42.100.9\""
|
||||
"\"shellyplug-s-living-1.cloonar.smart IN A 10.42.100.10\""
|
||||
"\"shellyplug-s-living-2.cloonar.smart IN A 10.42.100.11\""
|
||||
"\"shellyuni-livingroom-1.cloonar.smart IN A ${config.networkPrefix}.100.8\""
|
||||
"\"shellyswitch25-livingroom-1.cloonar.smart IN A ${config.networkPrefix}.100.9\""
|
||||
"\"shellyplug-s-living-1.cloonar.smart IN A ${config.networkPrefix}.100.10\""
|
||||
"\"shellyplug-s-living-2.cloonar.smart IN A ${config.networkPrefix}.100.11\""
|
||||
# kitchen
|
||||
"\"shellyplug-s-kitchen-1.cloonar.smart IN A 10.42.100.17\""
|
||||
"\"shellyrgbw2-kitchen-1.cloonar.smart IN A 10.42.100.18\""
|
||||
"\"shellyplug-s-kitchen-1.cloonar.smart IN A ${config.networkPrefix}.100.17\""
|
||||
"\"shellyrgbw2-kitchen-1.cloonar.smart IN A ${config.networkPrefix}.100.18\""
|
||||
#bedroom
|
||||
"\"shelly1-bedroom-1.cloonar.smart IN A 10.42.100.33\""
|
||||
"\"shellybutton1-bedroom-1.cloonar.smart IN A 10.42.100.34\""
|
||||
"\"shellybutton1-bedroom-2.cloonar.smart IN A 10.42.100.35\"" # todo
|
||||
"\"shellyrgbw2-bedroom-1.cloonar.smart IN A 10.42.100.36\""
|
||||
"\"shellyrgbw2-bedroom-2.cloonar.smart IN A 10.42.100.37\""
|
||||
"\"shellyrgbw2-bedroom-3.cloonar.smart IN A 10.42.100.38\""
|
||||
"\"shelly1-bedroom-1.cloonar.smart IN A ${config.networkPrefix}.100.33\""
|
||||
"\"shellybutton1-bedroom-1.cloonar.smart IN A ${config.networkPrefix}.100.34\""
|
||||
"\"shellybutton1-bedroom-2.cloonar.smart IN A ${config.networkPrefix}.100.35\"" # todo
|
||||
"\"shellyrgbw2-bedroom-1.cloonar.smart IN A ${config.networkPrefix}.100.36\""
|
||||
"\"shellyrgbw2-bedroom-2.cloonar.smart IN A ${config.networkPrefix}.100.37\""
|
||||
"\"shellyrgbw2-bedroom-3.cloonar.smart IN A ${config.networkPrefix}.100.38\""
|
||||
# bath
|
||||
"\"shellyswitch25-bath-1.cloonar.smart IN A 10.42.100.49\""
|
||||
"\"shelly1pm-bath-1.cloonar.smart IN A 10.42.100.52\""
|
||||
"\"shellyht-bath-1.cloonar.smart IN A 10.42.100.53\"" # todo
|
||||
"\"shellyswitch25-bath-1.cloonar.smart IN A ${config.networkPrefix}.100.49\""
|
||||
"\"shelly1pm-bath-1.cloonar.smart IN A ${config.networkPrefix}.100.52\""
|
||||
"\"shellyht-bath-1.cloonar.smart IN A ${config.networkPrefix}.100.53\"" # todo
|
||||
# hallway
|
||||
"\"shelly1-hallway-1.cloonar.smart IN A 10.42.100.65\""
|
||||
"\"shellyem3.cloonar.smart IN A 10.42.100.70\""
|
||||
"\"shellypro-1.cloonar.smart IN A 10.42.100.71\""
|
||||
"\"shellypro-2.cloonar.smart IN A 10.42.100.72\""
|
||||
"\"shelly1-hallway-1.cloonar.smart IN A ${config.networkPrefix}.100.65\""
|
||||
"\"shellyem3.cloonar.smart IN A ${config.networkPrefix}.100.70\""
|
||||
"\"shellypro-1.cloonar.smart IN A ${config.networkPrefix}.100.71\""
|
||||
"\"shellypro-2.cloonar.smart IN A ${config.networkPrefix}.100.72\""
|
||||
# toilet
|
||||
"\"shelly1-toilet-1.cloonar.smart IN A 10.42.100.81\""
|
||||
"\"shellybulbduo-toilet-1.cloonar.smart IN A 10.42.100.82\""
|
||||
"\"shelly1-toilet-1.cloonar.smart IN A ${config.networkPrefix}.100.81\""
|
||||
"\"shellybulbduo-toilet-1.cloonar.smart IN A ${config.networkPrefix}.100.82\""
|
||||
# storage
|
||||
"\"shelly1-storage-1.cloonar.smart IN A 10.42.100.97\""
|
||||
"\"shellyplug-storage-1.cloonar.smart IN A 10.42.100.98\""
|
||||
"\"brn30055c566237.cloonar.multimedia IN A 10.42.99.100\""
|
||||
"\"shelly1-storage-1.cloonar.smart IN A ${config.networkPrefix}.100.97\""
|
||||
"\"shellyplug-storage-1.cloonar.smart IN A ${config.networkPrefix}.100.98\""
|
||||
"\"brn30055c566237.cloonar.multimedia IN A ${config.networkPrefix}.99.100\""
|
||||
|
||||
"\"ddl-warez.to IN A 172.67.184.30\""
|
||||
"\"cdnjs.cloudflare.com IN A 104.17.24.14\""
|
||||
@@ -157,11 +157,11 @@ let
|
||||
local-data-ptr = [
|
||||
"\"127.0.0.1 localhost\""
|
||||
"\"::1 localhost\""
|
||||
"\"10.42.97.1 fw.cloonar.com\""
|
||||
"\"10.42.97.20 home-assistant.cloonar.com\""
|
||||
"\"10.42.97.21 snapcast.cloonar.com\""
|
||||
"\"10.42.97.22 deconz.cloonar.com\""
|
||||
"\"10.42.97.50 git.cloonar.com\""
|
||||
"\"${config.networkPrefix}.97.1 fw.cloonar.com\""
|
||||
"\"${config.networkPrefix}.97.20 home-assistant.cloonar.com\""
|
||||
"\"${config.networkPrefix}.97.21 snapcast.cloonar.com\""
|
||||
"\"${config.networkPrefix}.97.22 deconz.cloonar.com\""
|
||||
"\"${config.networkPrefix}.97.50 git.cloonar.com\""
|
||||
|
||||
"\"10.254.235.22 stage.wsw.at\""
|
||||
"\"10.254.217.23 prod.wsw.at\""
|
||||
|
||||
@@ -49,6 +49,7 @@ in {
|
||||
|
||||
imports = [
|
||||
"${impermanence}/nixos.nix"
|
||||
../network-prefix.nix
|
||||
../../utils/modules/sops.nix
|
||||
../../utils/modules/lego/lego.nix
|
||||
# ../../utils/modules/borgbackup.nix
|
||||
@@ -58,13 +59,15 @@ in {
|
||||
./matrix.nix
|
||||
];
|
||||
|
||||
networkPrefix = config.networkPrefix;
|
||||
|
||||
time.timeZone = "Europe/Vienna";
|
||||
|
||||
systemd.network.networks."10-lan" = {
|
||||
matchConfig.PermanentMACAddress = "02:00:00:00:01:01";
|
||||
address = [ "10.42.97.5/24" ];
|
||||
gateway = [ "10.42.97.1" ];
|
||||
dns = [ "10.42.97.1" ];
|
||||
address = [ "${config.networkPrefix}.97.5/24" ];
|
||||
gateway = [ "${config.networkPrefix}.97.1" ];
|
||||
dns = [ "${config.networkPrefix}.97.1" ];
|
||||
};
|
||||
|
||||
fileSystems."/persist".neededForBoot = lib.mkForce true;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{ ... }: {
|
||||
{ config, lib, ... }: {
|
||||
services.nginx.virtualHosts."git.cloonar.com" = {
|
||||
forceSSL = true;
|
||||
enableACME = true;
|
||||
@@ -12,7 +12,7 @@
|
||||
enableACME = true;
|
||||
acmeRoot = null;
|
||||
locations."/" = {
|
||||
proxyPass = "http://10.42.97.21:30000";
|
||||
proxyPass = "http://${config.networkPrefix}.97.21:30000";
|
||||
proxyWebsockets = true;
|
||||
};
|
||||
};
|
||||
|
||||
@@ -8,18 +8,18 @@
|
||||
|
||||
networking.wireguard.interfaces = {
|
||||
wg_cloonar = {
|
||||
ips = [ "10.42.98.1/24" ];
|
||||
ips = [ "${config.networkPrefix}.98.1/24" ];
|
||||
listenPort = 51820;
|
||||
# publicKey: TKQVDmBnf9av46kQxLQSBDhAeaK8r1zh8zpU64zuc1Q=
|
||||
privateKeyFile = config.sops.secrets.wg_cloonar_key.path;
|
||||
peers = [
|
||||
{ # Notebook
|
||||
publicKey = "YdlRGsjh4hS3OMJI+t6SZ2eGXKbs0wZBXWudHW4NyS8=";
|
||||
allowedIPs = [ "10.42.98.201/32" ];
|
||||
allowedIPs = [ "${config.networkPrefix}.98.201/32" ];
|
||||
}
|
||||
{ # iPhone
|
||||
publicKey = "nkm10abmwt2G8gJXnpqel6QW5T8aSaxiqqGjE8va/A0=";
|
||||
allowedIPs = [ "10.42.98.202/32" ];
|
||||
allowedIPs = [ "${config.networkPrefix}.98.202/32" ];
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
@@ -3,7 +3,7 @@ let
|
||||
wolScript = pkgs.writeScriptBin "wol-script" ''
|
||||
case $1 in
|
||||
"gaming")
|
||||
${pkgs.wol}/bin/wol -i 10.42.96.255 78:8c:b5:fe:41:62
|
||||
${pkgs.wol}/bin/wol -i ${config.networkPrefix}.96.255 78:8c:b5:fe:41:62
|
||||
};
|
||||
"") echo "Usage: $0 <hostname>"; exit 1;;
|
||||
esac
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# your system. Help is available in the configuration.nix(5) man page
|
||||
# and in the NixOS manual (accessible by running ‘nixos-help’).
|
||||
|
||||
{ config, pkgs, ... }:
|
||||
{ config, lib, pkgs, ... }:
|
||||
let
|
||||
unstable = import (fetchTarball https://nixos.org/channels/nixos-unstable/nixexprs.tar.xz) {
|
||||
config = { allowUnfree = true; };
|
||||
@@ -12,6 +12,8 @@ let
|
||||
in {
|
||||
nixpkgs.config.allowUnfree = true;
|
||||
|
||||
security.pki.certificates = [ "/home/dominik/.local/share/mkcert/rootCA.pem" ];
|
||||
|
||||
imports =
|
||||
[ # Include the results of the hardware scan.
|
||||
"${impermanence}/nixos.nix"
|
||||
@@ -151,7 +153,6 @@ in {
|
||||
};
|
||||
|
||||
networking.hostName = "nb-01"; # Define your hostname.
|
||||
networking.resolvconf.enable = true;
|
||||
networking.networkmanager.enable = true; # Easiest to use and most distros use this by default.
|
||||
networking.extraHosts = ''
|
||||
77.119.230.30 vpn.cloonar.com
|
||||
@@ -229,6 +230,7 @@ in {
|
||||
"10.42.96.0/20"
|
||||
# wohnservice-wien
|
||||
"10.254.240.0/24"
|
||||
"10.254.235.0/24"
|
||||
# epicenter.works
|
||||
"10.14.0.0/16"
|
||||
"10.25.0.0/16" ];
|
||||
@@ -242,6 +244,45 @@ in {
|
||||
};
|
||||
};
|
||||
|
||||
# networking.networkmanager.insertNameservers = [ "9.9.9.9" "149.112.112.11" "2620:fe::fe" "2620:fe::9" ];
|
||||
# services.avahi.enable = false;
|
||||
# networking.resolvconf.enable = lib.mkForce false;
|
||||
# services.resolved = {
|
||||
# enable = true;
|
||||
# dnssec = "true";
|
||||
# domains = [ "~." ];
|
||||
# fallbackDns = [ "9.9.9.9" "149.112.112.11" "2620:fe::fe" "2620:fe::9" ];
|
||||
# dnsovertls = "true";
|
||||
# };
|
||||
# networking.wg-quick.interfaces = {
|
||||
# wg0 = {
|
||||
# address = [ "10.42.98.201/32" ];
|
||||
# privateKeyFile = config.sops.secrets.wg-cloonar-key.path;
|
||||
#
|
||||
# postUp = ''
|
||||
# ${pkgs.systemd}/bin/resolvectl dns wg0 10.42.97.1
|
||||
# ${pkgs.systemd}/bin/resolvectl domain wg0 cloonar.com
|
||||
# ${pkgs.systemd}/bin/resolvectl dnsovertls wg0 true
|
||||
# '';
|
||||
#
|
||||
# peers = [
|
||||
# {
|
||||
# publicKey = "TKQVDmBnf9av46kQxLQSBDhAeaK8r1zh8zpU64zuc1Q=";
|
||||
# allowedIPs = [
|
||||
# "10.42.96.0/20"
|
||||
# # wohnservice-wien
|
||||
# "10.254.240.0/24"
|
||||
# # epicenter.works
|
||||
# "10.14.0.0/16"
|
||||
# "10.25.0.0/16"
|
||||
# ];
|
||||
# endpoint = "vpn.cloonar.com:51822";
|
||||
# persistentKeepalive = 25;
|
||||
# }
|
||||
# ];
|
||||
# };
|
||||
# };
|
||||
|
||||
|
||||
nix = {
|
||||
settings.auto-optimise-store = true;
|
||||
@@ -259,6 +300,11 @@ in {
|
||||
'';
|
||||
};
|
||||
|
||||
services.xserver.desktopManager.gnome.extraGSettingsOverrides = ''
|
||||
[org.gnome.desktop.interface]
|
||||
cursor-size=24
|
||||
'';
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
13
hosts/nb/modules/nvim/chatgpt.nix
Normal file
13
hosts/nb/modules/nvim/chatgpt.nix
Normal file
@@ -0,0 +1,13 @@
|
||||
self: super: {
|
||||
vimPlugins = super.vimPlugins // {
|
||||
chatgpt-nvim = super.vimUtils.buildVimPlugin {
|
||||
pname = "chatgpt-nvim";
|
||||
version = "1.0.0";
|
||||
src = super.fetchgit {
|
||||
url = "https://git.cloonar.com/Cloonar/chatgpt.vim.git";
|
||||
rev = "9a9868c991d2c9d0d2f5bc4c065a839f79c8daa1";
|
||||
sha256 = "sha256-ip+VFMJJWNaoSOyY2nLNVLEb3h37akUdk0du5u81dvc=";
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
0
hosts/nb/modules/nvim/config/chatgpt.lua
Normal file
0
hosts/nb/modules/nvim/config/chatgpt.lua
Normal file
@@ -1,6 +1,9 @@
|
||||
{ pkgs, ... }:
|
||||
{
|
||||
environment.variables = { EDITOR = "vim"; };
|
||||
nixpkgs.overlays = [
|
||||
(import ./chatgpt.nix)
|
||||
];
|
||||
|
||||
environment.systemPackages = with pkgs; [
|
||||
nodePackages.typescript-language-server
|
||||
@@ -11,6 +14,8 @@
|
||||
gopls
|
||||
lazygit
|
||||
ripgrep
|
||||
lua
|
||||
luaPackages.lyaml
|
||||
(neovim.override {
|
||||
vimAlias = true;
|
||||
configure = {
|
||||
@@ -19,6 +24,7 @@
|
||||
bigfile-nvim
|
||||
bufferline-nvim
|
||||
catppuccin-nvim
|
||||
chatgpt-nvim
|
||||
cmp-buffer
|
||||
cmp-nvim-lsp
|
||||
cmp-path
|
||||
@@ -66,6 +72,7 @@
|
||||
luaConfig = builtins.concatStringsSep "\n" (map luaRequire [
|
||||
"init"
|
||||
"keymappings"
|
||||
"chatgpt"
|
||||
"copilot"
|
||||
"icons"
|
||||
"lspconfig"
|
||||
@@ -86,6 +93,7 @@
|
||||
EOF
|
||||
'';
|
||||
};
|
||||
extraLuaPackages = luaPackages: [ luaPackages.lyaml ];
|
||||
}
|
||||
)];
|
||||
}
|
||||
|
||||
7
hosts/nb/modules/nvim/nvim.nix
Normal file
7
hosts/nb/modules/nvim/nvim.nix
Normal file
@@ -0,0 +1,7 @@
|
||||
self: super: {
|
||||
neovim = super.neovim.override {
|
||||
luaPackages = super.luaPackages // {
|
||||
lyaml = super.luaPackages.lyaml;
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
# font for window titles and bar
|
||||
font pango:Source Sans Pro 14
|
||||
seat * xcursor_theme Adwaita 24
|
||||
|
||||
# use win key
|
||||
set $mod Mod4
|
||||
@@ -311,7 +312,7 @@ exec mako --default-timeout=5000
|
||||
|
||||
# wallpaper
|
||||
# output eDP-1 scale 2 scale_filter linear
|
||||
output eDP-1 mode 1680x1050
|
||||
output eDP-1 mode 2880x1920 scale 2
|
||||
output eDP-1 bg #282a36 solid_color
|
||||
output eDP-1 bg ~/.wallpaper.png center
|
||||
output DP-4 bg #282a36 solid_color
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user