many changes

This commit is contained in:
2025-01-26 10:55:38 +01:00
parent 12ef36af33
commit a2d482e16d
98 changed files with 419 additions and 27402 deletions

View File

@@ -38,6 +38,8 @@ interval:
# Enable Home Assistant API # Enable Home Assistant API
api: api:
logger:
ota: ota:
platform: esphome platform: esphome

View File

@@ -38,6 +38,8 @@ interval:
# Enable Home Assistant API # Enable Home Assistant API
api: api:
logger:
ota: ota:
platform: esphome platform: esphome

View File

@@ -38,6 +38,8 @@ interval:
# Enable Home Assistant API # Enable Home Assistant API
api: api:
logger:
ota: ota:
platform: esphome platform: esphome

View File

@@ -38,6 +38,8 @@ interval:
# Enable Home Assistant API # Enable Home Assistant API
api: api:
logger:
ota: ota:
platform: esphome platform: esphome

View File

@@ -38,6 +38,8 @@ interval:
# Enable Home Assistant API # Enable Home Assistant API
api: api:
logger:
ota: ota:
platform: esphome platform: esphome

View File

@@ -38,6 +38,8 @@ interval:
# Enable Home Assistant API # Enable Home Assistant API
api: api:
logger:
ota: ota:
platform: esphome platform: esphome

View File

@@ -23,9 +23,9 @@ let
cfg = { cfg = {
remote-control.control-enable = true; remote-control.control-enable = true;
server = { server = {
include = [ # include = [
"\"${adblockLocalZones}\"" # "\"${adblockLocalZones}\""
]; # ];
interface = [ "0.0.0.0" "::0" ]; interface = [ "0.0.0.0" "::0" ];
interface-automatic = "yes"; interface-automatic = "yes";
access-control = [ access-control = [
@@ -47,6 +47,8 @@ let
"\"fw.cloonar.com A ${config.networkPrefix}.97.1\"" "\"fw.cloonar.com A ${config.networkPrefix}.97.1\""
"\"fw A ${config.networkPrefix}.97.1\"" "\"fw A ${config.networkPrefix}.97.1\""
"\"www.7-zip.org A 49.12.202.237\""
"\"pc.cloonar.com IN A ${config.networkPrefix}.96.5\"" "\"pc.cloonar.com IN A ${config.networkPrefix}.96.5\""
"\"omada.cloonar.com IN A ${config.networkPrefix}.97.2\"" "\"omada.cloonar.com IN A ${config.networkPrefix}.97.2\""
"\"switch.cloonar.com IN A ${config.networkPrefix}.97.10\"" "\"switch.cloonar.com IN A ${config.networkPrefix}.97.10\""

View File

@@ -0,0 +1,44 @@
{ config, lib, pkgs, ... }:
{
imports = [
./hardware-configuration.nix
./sway/sway.nix
./nvim/default.nix
./utils/bento.nix
./utils/modules/sops.nix
./utils/modules/nur.nix
./utils/modules/autoupgrade.nix
./users
# Import our new steam-deck-mode module
./modules/steam-deck-mode.nix
];
networking.hostName = "gpd-win4";
time.timeZone = "Europe/Vienna";
nixpkgs.config.allowUnfree = true;
nixpkgs.config.allowBroken = true;
console.keyMap = "de";
services.openssh.enable = true;
security.polkit.enable = true;
networking.networkmanager.enable = true;
users.users.dominik = {
isNormalUser = true;
hashedPassword = ""; # Replace with real hash
extraGroups = [ "wheel" "video" "audio" "input" ];
};
powerManagement.cpuFreqGovernor = "powersave";
# In case you want a persistent /home or other directories:
# environment.persistence."/nix/persist" = {
# hideMounts = true;
# directories = [ "/home" ];
# };
# This system tries to unify the "Steam Deck Mode" and "Sway" approach
# with toggling via systemd user services.
system.stateVersion = "24.05";
}

View File

@@ -0,0 +1,50 @@
{ config, lib, pkgs, modulesPath, ... }:
{
imports = [
(modulesPath + "/installer/scan/not-detected.nix")
];
boot.loader.systemd-boot.enable = true;
boot.loader.efi.canTouchEfiVariables = true;
boot.initrd.kernelModules = [ "amdgpu" "kvm-amd" ];
hardware.cpu.amd.updateMicrocode = lib.mkDefault true;
fileSystems."/" =
{
device = "none";
fsType = "tmpfs";
options = [ "size=16G" "mode=755" ];
};
fileSystems."/nix" =
{
device = "/dev/disk/by-uuid/856e1ebe-832f-422d-8d91-d43a5d852abb";
fsType = "f2fs";
};
boot.initrd = {
luks.devices."enc" = {
crypttabExtraOpts = [ "fido2-device=auto" ];
device = "/dev/disk/by-uuid/08897ecb-23ce-4352-a1fc-fa442b9e0f72";
};
systemd.enable = true;
};
fileSystems."/boot" =
{
device = "/dev/disk/by-uuid/1521-B173";
fsType = "vfat";
options = [ "fmask=0022" "dmask=0022" ];
};
hardware.graphics = {
enable = true;
extraPackages = with pkgs; [
vaapiVdpau
libvdpau-va-gl
libva
libva-utils
];
};
}

1
hosts/gpd-win4/modules/nvim Symbolic link
View File

@@ -0,0 +1 @@
../../nb/modules/nvim

View File

@@ -0,0 +1,129 @@
{ config, lib, pkgs, ... }:
let
cfgUser = "dominik"; # Adjust to your username
in {
#### 1) Provide two scripts:
#### - `steam-deck-mode.sh`: Runs Steam Big Picture with Gamescope (Wayland).
#### - `sway-session.sh`: Starts Sway.
environment.etc."steam-deck-mode.sh".text = ''
#!/usr/bin/env bash
# This script launches Steam in Big Picture mode under Gamescope (Wayland).
# Once Steam (or Gamescope) exits, the systemd user service stops.
# The ExecStopPost hook in the user service will then start Sway automatically.
# For safety, kill any existing Steam instance
pgrep steam && steam -shutdown || true
sleep 1
# Use Gamescope in fullscreen mode, exit on Steam exit, run Steam in Gamepad UI
exec gamescope -W 1280 -H 800 -f -e -- steam -gamepadui
'';
environment.etc."sway-session.sh".text = ''
#!/usr/bin/env bash
# This script starts a Sway session. When Sway exits, the user service stops,
# which triggers ExecStopPost to start Steam Big Picture again.
exec sway
'';
#### Make these scripts executable via a simple systemd service:
systemd.services."make-scripts-executable" = {
description = "Make steam-deck-mode.sh and sway-session.sh executable";
wantedBy = [ "multi-user.target" ];
serviceConfig.ExecStart = [
"${pkgs.coreutils}/bin/chmod +x /etc/steam-deck-mode.sh"
"${pkgs.coreutils}/bin/chmod +x /etc/sway-session.sh"
];
};
#### 2) Create two systemd *user* services:
#### - steam-deck-mode: On stop, automatically start sway
#### - sway: On stop, automatically start steam-deck-mode
systemd.user.services."steam-deck-mode" = {
description = "Steam Deck Mode (Wayland Gamescope + Steam Big Picture)";
wantedBy = [ "default.target" ]; # So we can enable it for the user
serviceConfig = {
Type = "simple";
ExecStart = "/etc/steam-deck-mode.sh";
# On exit, automatically trigger Sway
ExecStopPost = "${pkgs.systemd}/bin/systemctl --user start sway";
Restart = "no"; # If Steam crashes, you can change to 'on-failure' if desired
};
};
systemd.user.services."sway" = {
description = "Sway WM Session";
wantedBy = [ ]; # We won't start this on login by default, but from steam or a script
serviceConfig = {
Type = "simple";
ExecStart = "/etc/sway-session.sh";
# On exit, automatically trigger Steam Deck Mode
ExecStopPost = "${pkgs.systemd}/bin/systemctl --user start steam-deck-mode";
Restart = "no";
};
};
#### 3) Provide a script & desktop entry to let you switch from Sway to Game Mode easily
#### (i.e., stop the 'sway' service, which triggers Steam).
environment.etc."switch-to-game-mode.sh".text = ''
#!/usr/bin/env bash
# This script stops Sway, causing the user service to exit
# The ExecStopPost of that service will start steam-deck-mode automatically.
${pkgs.systemd}/bin/systemctl --user stop sway
'';
systemd.services."make-switch-to-game-mode-executable" = {
description = "Make switch-to-game-mode.sh executable";
wantedBy = [ "multi-user.target" ];
serviceConfig.ExecStart = [
"${pkgs.coreutils}/bin/chmod +x /etc/switch-to-game-mode.sh"
];
};
environment.etc."xdg/applications/switch-to-game-mode.desktop".text = ''
[Desktop Entry]
Name=Switch to Game Mode
Comment=Stop Sway and start Steam Big Picture (Gamescope)
Exec=/etc/switch-to-game-mode.sh
Terminal=false
Type=Application
Categories=Game;
'';
#### 4) If you want to start directly in Steam Deck Mode on boot (no display manager),
#### enable auto-login on TTY and run the user service for "dominik".
#### For example (uncomment if you want an immediate console login):
# services.getty.autologinUser = cfgUser;
# systemd.user.services."steam-deck-mode".wantedBy = [ "default.target" ]; # already set
# You'd do 'systemctl --user enable steam-deck-mode' as that user to start it on login.
#### 5) Additional recommended gaming packages if not set elsewhere:
environment.systemPackages = with pkgs; [
steam
gamemode
mangohud
vulkan-tools
vulkan-loader
vulkan-headers
# ...
];
#### 6) Enable 32-bit support for Steam
hardware.opengl.enable = true;
hardware.opengl.driSupport32Bit = true;
hardware.graphics.enable = true;
hardware.graphics.enable32Bit = true;
hardware.graphics.extraPackages = [
pkgs.amdvlk
pkgs.driversi686Linux.amdvlk
];
#### 7) Optionally handle udev rules for Steam/Controllers if needed
environment.etc."udev/rules.d/99-steamdeck-controller.rules".text = ''
SUBSYSTEM=="usb", ATTRS{idVendor}=="28de", MODE="0666"
KERNEL=="uinput", MODE="0660", GROUP="input", OPTIONS+="static_node=uinput"
'';
}

View File

@@ -0,0 +1,80 @@
{ config, lib, pkgs, ... }:
let
# For GPD Win4s AMD APU, we assume AMD Vulkan drivers:
amdPackages = [
pkgs.amdvlk
pkgs.driversi686Linux.amdvlk
];
in
{
options.services.steamDeckMode = {
enable = lib.mkOption {
type = lib.types.bool;
default = false;
description = "Launch Steam in a Deck-like session upon login (auto-logins and starts Steam in gamepad UI mode).";
};
};
config = {
# Enable Steam and Gamescope
programs.gamescope = {
enable = true;
capSysNice = true;
};
programs.steam = {
enable = true;
# The gamescopeSession creates a special session for Steam on Wayland
gamescopeSession.enable = true;
};
# Add some helpful gaming utilities
environment.systemPackages = with pkgs; [
mangohud
steam-tui
steamcmd
vulkan-tools
vulkan-headers
vulkan-loader
wlroots
libdecor
gamemode
];
# Enable 32-bit support for libraries (often required by Steam)
hardware.opengl.enable = true;
hardware.opengl.driSupport32Bit = true;
# Additional AMD drivers if needed
hardware.graphics.enable = true;
hardware.graphics.enable32Bit = true;
hardware.graphics.extraPackages = amdPackages;
# Example udev rules for Steam Deck controllers, optional
environment.etc."udev/rules.d/99-steamdeck-controller.rules".text = ''
# Valve Controller devices
SUBSYSTEM=="usb", ATTRS{idVendor}=="28de", MODE="0666"
KERNEL=="uinput", MODE="0660", GROUP="input", OPTIONS+="static_node=uinput"
'';
# Provide a “Steam Deck Mode” session in the display manager
services.xserver.displayManager.session = {
"steam-deck-mode" = {
name = "Steam Deck Mode";
start = ''
#!/usr/bin/env bash
# On X11, you could also remove or adjust the following env variable if you prefer Wayland
export XDG_SESSION_TYPE=x11
# Fullscreen + close gamescope on exit, then run Steam in gamepad UI
exec gamescope -f -e -- steam -gamepadui
'';
};
};
}
// lib.mkIf config.services.steamDeckMode.enable {
# Auto-login to the user of your choice and start in Steam Deck Mode
services.xserver.displayManager.autoLogin.enable = true;
services.xserver.displayManager.autoLogin.user = "dominik"; # or your preferred user
services.xserver.autoRestartXServer = true;
services.xserver.displayManager.defaultSession = "steam-deck-mode";
};

1
hosts/gpd-win4/modules/sway Symbolic link
View File

@@ -0,0 +1 @@
../../nb/modules/sway

1
hosts/gpd-win4/users Symbolic link
View File

@@ -0,0 +1 @@
../nb/users

1
hosts/gpd-win4/utils Symbolic link
View File

@@ -0,0 +1 @@
../../utils

View File

@@ -31,7 +31,7 @@ in {
./cachix.nix ./cachix.nix
./users ./users
./modules/steam.nix # ./modules/steam.nix
./hardware-configuration.nix ./hardware-configuration.nix
@@ -41,6 +41,8 @@ in {
(import ./utils/overlays/packages.nix) (import ./utils/overlays/packages.nix)
]; ];
services.gvfs.enable = true;
fonts.packages = with pkgs; [ fonts.packages = with pkgs; [
git git
git-lfs git-lfs
@@ -52,6 +54,7 @@ in {
zsh-completions zsh-completions
zsh-syntax-highlighting zsh-syntax-highlighting
zsh-history-substring-search zsh-history-substring-search
creality-print
]; ];
programs.zsh = { programs.zsh = {
@@ -172,7 +175,6 @@ in {
environment.systemPackages = with pkgs; [ environment.systemPackages = with pkgs; [
bento bento
creality-print
docker-compose docker-compose
drone-cli drone-cli
git-filter-repo git-filter-repo
@@ -184,7 +186,7 @@ in {
wineWowPackages.stable wineWowPackages.stable
wineWowPackages.fonts wineWowPackages.fonts
winetricks winetricks
ykfde # ykfde
]; ];
environment.variables = { environment.variables = {

View File

@@ -5,8 +5,8 @@ self: super: {
version = "1.0.0"; version = "1.0.0";
src = super.fetchgit { src = super.fetchgit {
url = "https://git.cloonar.com/Cloonar/chatgpt.vim.git"; url = "https://git.cloonar.com/Cloonar/chatgpt.vim.git";
rev = "162ab2d82054897ac0d371d7047811abcd510ab5"; rev = "59540981edeebd7faf9894e2ba40cbe4fb02f31c";
sha256 = "sha256-0BvVCGXO4GAUumv36+/9/S8pGMKCl/V3rxEKeiKW5xo="; sha256 = "sha256-uBfdR8ezwrcPJeCs+hAnz0w7nE9N8rfqST/SuGlcoTs=";
}; };
}; };
}; };

View File

@@ -42,7 +42,7 @@ vim.opt.titlestring = "%<%F%=%l/%L - nvim" -- what the title of the window will
vim.opt.undodir = vim.fn.stdpath "cache" .. "/undo" vim.opt.undodir = vim.fn.stdpath "cache" .. "/undo"
vim.opt.undofile = true -- enable persistent undo vim.opt.undofile = true -- enable persistent undo
vim.opt.updatetime = 300 -- faster completion vim.opt.updatetime = 300 -- faster completion
vim.opt.writebackup = false -- if a file is being edited by another program (or was written to file while editing with another program) it is not allowed to be edited vim.opt.writebackup = false -- if a file is being edited by another program it is not allowed to be edited
vim.opt.expandtab = true -- convert tabs to spaces vim.opt.expandtab = true -- convert tabs to spaces
vim.opt.shiftwidth = 2 -- the number of spaces inserted for each indentation vim.opt.shiftwidth = 2 -- the number of spaces inserted for each indentation
vim.opt.tabstop = 2 -- insert 2 spaces for a tab vim.opt.tabstop = 2 -- insert 2 spaces for a tab
@@ -50,9 +50,38 @@ vim.opt.cursorline = true -- highlight the current line
vim.opt.number = true -- set numbered lines vim.opt.number = true -- set numbered lines
vim.opt.relativenumber = false -- set relative numbered lines vim.opt.relativenumber = false -- set relative numbered lines
vim.opt.numberwidth = 4 -- set number column width to 2 {default 4} vim.opt.numberwidth = 4 -- set number column width to 2 {default 4}
vim.opt.signcolumn = "yes" -- always show the sign column otherwise it would shift the text each time vim.opt.signcolumn = "yes" -- always show the sign column otherwise text shifts each time
vim.opt.wrap = false -- display lines as one long line vim.opt.wrap = false -- display lines as one long line
vim.opt.spell = false vim.opt.spell = false
vim.opt.spelllang = "en" vim.opt.spelllang = "en"
vim.opt.scrolloff = 8 -- is one of my fav vim.opt.scrolloff = 8 -- keep 8 lines above/below the cursor
vim.opt.sidescrolloff = 8 vim.opt.sidescrolloff = 8
-- Automatically disable heavy features for very large files
local largefile_group = vim.api.nvim_create_augroup("LargeFile", { clear = true })
vim.api.nvim_create_autocmd("BufReadPre", {
group = largefile_group,
pattern = "*",
callback = function(args)
local max_filesize = 1 * 1024 * 1024 -- 1 MB in bytes
local file = vim.fn.expand("<afile>")
if vim.fn.getfsize(file) > max_filesize then
-- Turn off syntax highlighting
vim.cmd("syntax off")
-- Disable Treesitter's highlight for this buffer
pcall(vim.cmd, "TSBufDisable highlight")
-- Optionally disable LSP for this buffer
for _, client in pairs(vim.lsp.get_active_clients()) do
if client ~= nil and client.attached_buffers[args.buf] then
client.detach(args.buf)
end
end
-- You can also disable or reduce other settings if needed, e.g.:
vim.opt.foldmethod = "manual"
vim.opt.wrap = false
vim.opt.hlsearch = false
end
end,
})

View File

@@ -0,0 +1,17 @@
{ config, pkgs, ... }:
{
environment.systemPackages = with pkgs; [
bitwarden
bitwarden-cli
];
environment.shellAliases = {
bw-epicenter = "BITWARDENCLI_APPDATA_DIR=~/.config/bitwarden-cli-epicenter ${pkgs.bitwarden-cli}/bin/bw";
bw-cloonar = "BITWARDENCLI_APPDATA_DIR=~/.config/bitwarden-cli-cloonar ${pkgs.bitwarden-cli}/bin/bw";
};
environment.shellInit = ''
mkdir -p ~/.config/bitwarden-cli-epicenter ~/.config/bitwarden-cli-cloonar
'';
}

View File

@@ -28,9 +28,6 @@ let
apache-ds-pin = import (builtins.fetchTarball { apache-ds-pin = import (builtins.fetchTarball {
url = "https://github.com/NixOS/nixpkgs/archive/9aec01027f7ea2bca07bb51d5ed83e78088871c1.tar.gz"; url = "https://github.com/NixOS/nixpkgs/archive/9aec01027f7ea2bca07bb51d5ed83e78088871c1.tar.gz";
}) {}; }) {};
# ddev-pin = import (builtins.fetchTarball {
# url = "https://github.com/NixOS/nixpkgs/archive/34a626458d686f1b58139620a8b2793e9e123bba.tar.gz";
# }) {};
in { in {
imports = [ imports = [
./social.nix ./social.nix
@@ -38,6 +35,7 @@ in {
# ./parsec.nix # ./parsec.nix
# ./rustdesk.nix # ./rustdesk.nix
./thunderbird.nix ./thunderbird.nix
./bitwarden.nix
]; ];
environment.variables.XCURSOR_SIZE = "24"; environment.variables.XCURSOR_SIZE = "24";
@@ -78,11 +76,11 @@ in {
alsa-utils alsa-utils
audacity audacity
apache-ds-pin.apache-directory-studio apache-ds-pin.apache-directory-studio
bitwarden
bitwarden-cli
rofi-rbw-wayland rofi-rbw-wayland
# cryptomator # TODO: remove at 25.05
unstable.cryptomator
fontforge fontforge
hypnotix
code-cursor code-cursor

View File

@@ -1,5 +1,8 @@
/home/dominik/projects/cloonar/chatgpt.vim /home/dominik/projects/cloonar/chatgpt.vim
/home/dominik/projects/cloonar/gitea.nvim /home/dominik/projects/cloonar/gitea.nvim
/home/dominik/projects/cloonar/glazewm
/home/dominik/projects/cloonar/phishguard
/home/dominik/projects/cloonar/phishguard-frontend
/home/dominik/projects/cloonar/typo3-basic /home/dominik/projects/cloonar/typo3-basic
/home/dominik/projects/cloonar/renovate-config /home/dominik/projects/cloonar/renovate-config
/home/dominik/projects/cloonar/bento /home/dominik/projects/cloonar/bento
@@ -27,5 +30,7 @@
/home/dominik/projects/epicenter.works/spenden.akvorrat.at /home/dominik/projects/epicenter.works/spenden.akvorrat.at
/home/dominik/projects/epicenter.works/dearmep-website /home/dominik/projects/epicenter.works/dearmep-website
/home/dominik/projects/epicenter.works/padexporter /home/dominik/projects/epicenter.works/padexporter
/home/dominik/projects/epicenter.works/ansible-pull
/home/dominik/projects/cloonar/lena-schilling-website /home/dominik/projects/cloonar/lena-schilling-website
/home/dominik/projects/cloonar/imperfect-perfect.com
/home/dominik/projects/cloonar/yaapi /home/dominik/projects/cloonar/yaapi

View File

@@ -317,6 +317,12 @@ in
# https://git-scm.com/book/en/v2/Git-Tools-Rerere # https://git-scm.com/book/en/v2/Git-Tools-Rerere
rerere.enabled = true; rerere.enabled = true;
}; };
extraConfig = {
"url.gitea@git.cloonar.com:" = {
insteadOf = "https://git.cloonar.com/";
};
};
}; };
programs.thunderbird = { programs.thunderbird = {
@@ -542,6 +548,9 @@ in
git clone gitea@git.cloonar.com:Cloonar/chatgpt.vim.git ${persistHome}/cloonar/chatgpt.vim 2>/dev/null git clone gitea@git.cloonar.com:Cloonar/chatgpt.vim.git ${persistHome}/cloonar/chatgpt.vim 2>/dev/null
git clone gitea@git.cloonar.com:Cloonar/gitea.nvim.git ${persistHome}/cloonar/gitea.nvim 2>/dev/null git clone gitea@git.cloonar.com:Cloonar/gitea.nvim.git ${persistHome}/cloonar/gitea.nvim 2>/dev/null
git clone gitea@git.cloonar.com:myhidden.life/web.git ${persistHome}/projects/myhidden.life/myhidden.life-web 2>/dev/null git clone gitea@git.cloonar.com:myhidden.life/web.git ${persistHome}/projects/myhidden.life/myhidden.life-web 2>/dev/null
git clone git@github.com:dpolakovics/glazewm.git ${persistHome}/cloonar/glazewm 2>/dev/null
git clone gitea@git.cloonar.com:Cloonar/phishguard.git ${persistHome}/projects/cloonar/phishguard 2>/dev/null
git clone gitea@git.cloonar.com:Cloonar/phishguard-frontend.git ${persistHome}/projects/cloonar/phishguard-frontend 2>/dev/null
git clone gitea@git.cloonar.com:dominik.polakovics/typo3-basic.git ${persistHome}/cloonar/typo3-basic 2>/dev/null git clone gitea@git.cloonar.com:dominik.polakovics/typo3-basic.git ${persistHome}/cloonar/typo3-basic 2>/dev/null
git clone gitea@git.cloonar.com:renovate/renovate-config.git ${persistHome}/cloonar/renovate-config 2>/dev/null git clone gitea@git.cloonar.com:renovate/renovate-config.git ${persistHome}/cloonar/renovate-config 2>/dev/null
@@ -561,6 +570,7 @@ in
git clone gitea@git.cloonar.com:hilgenberg/website.git ${persistHome}/projects/cloonar/hilgenberg-website 2>/dev/null git clone gitea@git.cloonar.com:hilgenberg/website.git ${persistHome}/projects/cloonar/hilgenberg-website 2>/dev/null
git clone gitea@git.cloonar.com:Cloonar/korean-skin.care.git ${persistHome}/projects/cloonar/korean-skin.care 2>/dev/null git clone gitea@git.cloonar.com:Cloonar/korean-skin.care.git ${persistHome}/projects/cloonar/korean-skin.care 2>/dev/null
git clone gitea@git.cloonar.com:Cloonar/lena-schilling-website.git ${persistHome}/projects/cloonar/lena-schilling-website 2>/dev/null git clone gitea@git.cloonar.com:Cloonar/lena-schilling-website.git ${persistHome}/projects/cloonar/lena-schilling-website 2>/dev/null
git clone gitea@git.cloonar.com:Cloonar/imperfect-perfect.com.git ${persistHome}/projects/cloonar/imperfect-perfect.com 2>/dev/null
git clone gitea@git.cloonar.com:socialgrow.tech/sgt-api.git ${persistHome}/projects/socialgrow.tech/sgt-api 2>/dev/null git clone gitea@git.cloonar.com:socialgrow.tech/sgt-api.git ${persistHome}/projects/socialgrow.tech/sgt-api 2>/dev/null
@@ -574,6 +584,7 @@ in
git clone git@github.com:AKVorrat/spenden.akvorrat.at.git ${persistHome}/projects/epicenter.works/spenden.akvorrat.at 2>/dev/null git clone git@github.com:AKVorrat/spenden.akvorrat.at.git ${persistHome}/projects/epicenter.works/spenden.akvorrat.at 2>/dev/null
git clone git@github.com:AKVorrat/dearmep-website.git ${persistHome}/projects/epicenter.works/dearmep-website 2>/dev/null git clone git@github.com:AKVorrat/dearmep-website.git ${persistHome}/projects/epicenter.works/dearmep-website 2>/dev/null
git clone git@github.com:AKVorrat/padexporter.git ${persistHome}/projects/epicenter.works/padexporter 2>/dev/null git clone git@github.com:AKVorrat/padexporter.git ${persistHome}/projects/epicenter.works/padexporter 2>/dev/null
git clone git@github.com:AKVorrat/ansible-config.git ${persistHome}/projects/epicenter.works/ansible-pull 2>/dev/null
set -eu set -eu
''; '';

View File

@@ -7,4 +7,20 @@
]; ];
phpPackage = pkgs.php83; phpPackage = pkgs.php83;
}; };
services.nginx.virtualHosts."www.lena-schilling.at" = {
enableACME = true;
forceSSL = true;
acmeRoot = "/var/lib/acme/acme-challenge";
locations."/" = {
return = "301 https://lena-schilling.at$request_uri";
};
serverAliases = [
"lena-schilling.com"
"lena-schilling.eu"
"lenaschilling.at"
"lenaschilling.com"
"lenaschilling.eu"
];
};
} }

View File

@@ -1,36 +0,0 @@
{
lib,
pkgs,
...
}: let
create_users = host: {
users.users."${host.username}" = {
createHome = false;
home = "/home/chroot/" + host.username;
isNormalUser = false;
isSystemUser = true;
group = "sftp_users";
openssh.authorizedKeys.keys = [host.key];
shell = null;
};
};
users = [
{
username = "notebook";
key = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDN/2SAFm50kraB1fepAizox/QRXxB7WbqVbH+5OPalDT47VIJGNKOKhixQoqhABHxEoLxdf/C83wxlCVlPV9poLfDgVkA3Lyt5r3tSFQ6QjjOJAgchWamMsxxyGBedhKvhiEzcr/Lxytnoz3kjDG8fqQJwEpdqMmJoMUfyL2Rqp16u+FQ7d5aJtwO8EUqovhMaNO7rggjPpV/uMOg+tBxxmscliN7DLuP4EMTA/FwXVzcFNbOx3K9BdpMRAaSJt4SWcJO2cS2KHA5n/H+PQI7nz5KN3Yr/upJN5fROhi/SHvK39QOx12Pv7FCuWlc+oR68vLaoCKYhnkl3DnCfc7A7";
}
];
in {
imports = builtins.map create_users users;
users.groups = {sftp_users = {};};
services.openssh.extraConfig = ''
Match Group sftp_users
X11Forwarding no
AllowTcpForwarding no
ChrootDirectory %h
ForceCommand internal-sftp
'';
}

View File

@@ -1,116 +0,0 @@
{
pkgs,
config,
...
}: let
ldapConfig = {
vaultwarden_url = "https://bitwarden.cloonar.com";
vaultwarden_admin_token = "@ADMIN_TOKEN@";
ldap_host = "localhost";
ldap_bind_dn = "cn=vmail,dc=cloonar,dc=com";
ldap_bind_password = "@LDAP_PASSWORD@";
ldap_search_base_dn = "dc=cloonar,dc=com";
ldap_search_filter = "(&(objectClass=inetOrgPerson))";
ldap_sync_interval_seconds = 3600;
};
ldapConfigFile =
pkgs.runCommand "config.toml"
{
buildInputs = [pkgs.remarshal];
preferLocalBuild = true;
} ''
remarshal -if json -of toml \
< ${pkgs.writeText "config.json" (builtins.toJSON ldapConfig)} \
> $out
'';
in {
packageOverrides = pkgs: {
nur = import (builtins.fetchTarball "https://github.com/nix-community/NUR/archive/master.tar.gz") {
inherit pkgs;
};
};
environment.systemPackages = with pkgs; [
nur.repos.mic92.vaultwarden_ldap
];
services.vaultwarden = {
enable = true;
dbBackend = "mysql";
config = {
domain = "https://bitwarden.cloonar.com";
signupsAllowed = false;
rocketPort = 3011;
databaseUrl = "mysql://bitwarden:<${config.sops.secrets.bitwarden-db-password.path}@localhost/bitwarden";
enableDbWal = "false";
websocketEnabled = true;
smtpHost = "smtp.cloonar.com";
smtpFrom = "bitwarden@cloonar.com";
smtpUsername = "bitwarden@cloonar.com";
};
};
systemd.services.vaultwarden.serviceConfig = {
EnvironmentFile = [config.sops.secrets.bitwarden-smtp-password.path];
};
systemd.services.vaultwarden_ldap = {
wantedBy = ["multi-user.target"];
preStart = ''
sed \
-e "s=@LDAP_PASSWORD@=$(<${config.sops.secrets.bitwarden-ldap-password.path})=" \
-e "s=@ADMIN_TOKEN@=$(<${config.sops.secrets.bitwarden-admin-token.path})=" \
${ldapConfigFile} \
> /run/vaultwarden_ldap/config.toml
'';
serviceConfig = {
Restart = "on-failure";
RestartSec = "2s";
ExecStart = "${config.nur.repos.mic92.vaultwarden_ldap}/bin/vaultwarden_ldap";
Environment = "CONFIG_PATH=/run/vaultwarden_ldap/config.toml";
RuntimeDirectory = ["vaultwarden_ldap"];
User = "vaultwarden_ldap";
};
};
services.nginx = {
virtualHosts."bitwarden.cloonar.com" = {
forceSSL = true;
enableACME = true;
acmeRoot = null;
extraConfig = ''
client_max_body_size 128M;
'';
locations."/" = {
proxyPass = "http://localhost:3011";
proxyWebsockets = true;
};
locations."/notifications/hub" = {
proxyPass = "http://localhost:3012";
proxyWebsockets = true;
};
locations."/notifications/hub/negotiate" = {
proxyPass = "http://localhost:3011";
proxyWebsockets = true;
};
};
};
sops.secrets = {
bitwarden-admin-token.owner = "vaultwarden_ldap";
bitwarden-ldap-password.owner = "vaultwarden_ldap";
bitwarden-db-password.owner = "vaultwarden";
bitwarden-smtp-password.owner = "vaultwarden";
};
users.users.vaultwarden_ldap = {
isSystemUser = true;
group = "vaultwarden_ldap";
};
users.groups.vaultwarden_ldap = {};
}

View File

@@ -1,38 +0,0 @@
let
# NixOS 22.11 as of 2023-01-12
nixpkgs = builtins.getFlake "github:nixos/nixpkgs/54644f409ab471e87014bb305eac8c50190bcf48";
sys = nixpkgs.lib.nixosSystem {
system = "x86_64-linux";
modules = [
({ config, pkgs, lib, modulesPath, ... }: {
imports = [
(modulesPath + "/installer/netboot/netboot-minimal.nix")
];
config = {
## Some useful options for setting up a new system
# services.getty.autologinUser = lib.mkForce "root";
# users.users.root.openssh.authorizedKeys.keys = [ ... ];
# console.keyMap = "de";
# hardware.video.hidpi.enable = true;
system.stateVersion = config.system.nixos.release;
};
})
];
};
run-pixiecore = let
hostPkgs = if sys.pkgs.system == builtins.currentSystem
then sys.pkgs
else nixpkgs.legacyPackages.${builtins.currentSystem};
build = sys.config.system.build;
in hostPkgs.writers.writeBash "run-pixiecore" ''
exec ${hostPkgs.pixiecore}/bin/pixiecore \
boot ${build.kernel}/bzImage ${build.netbootRamdisk}/initrd \
--cmdline "init=${build.toplevel}/init loglevel=4" \
--debug --dhcp-no-bind \
--port 64172 --status-port 64172 "$@"
'';
in
run-pixiecore

View File

@@ -1,55 +0,0 @@
{ config, lib, pkgs, ... }:
with lib;
let
uuid = "";
cfg = config.services.clevis;
in
{
options.services.clevis = {
uuid = mkOption {
type = types.str;
default = "";
description = lib.mdDoc ''
UUID of device to decrypt with clevis.
'';
};
};
config = {
environment.systemPackages = with pkgs; [
clevis
];
boot.initrd.extraUtilsCommands = ''
# clevis dependencies
copy_bin_and_libs ${pkgs.curl}/bin/curl
copy_bin_and_libs ${pkgs.bash}/bin/bash
copy_bin_and_libs ${pkgs.jose}/bin/jose
# clevis scripts and binaries
for i in ${pkgs.clevis}/bin/* ${pkgs.clevis}/bin/.clevis-wrapped; do
copy_bin_and_libs "$i"
done
'';
boot.initrd.luks.devices."nixos-enc" = {
device = "/dev/disk/by-uuid/${cfg.uuid}";
preOpenCommands = with pkgs; ''
# what would be a sensible way of automating this? at the very least the versions should not be hard coded
ln -s ../.. /nix/store/eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee-${bash.name}
ln -s ../.. /nix/store/eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee-${clevis.name}
ln -s ../.. /nix/store/eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee-${coreutils.name}
# this runs in the background so that /crypt-ramfs/device gets set up, which implies crypt-askpass
# is ready to receive an input which it will write to /crypt-ramfs/passphrase.
# for some reason writing that file directly does not seem to work, which is why the pipe is used.
# the clevis_luks_unlock_device function is equivalent to the clevis-luks-pass command but avoid
# needing to pass the slot argument.
# using clevis-luks-unlock directly can successfully open the luks device but requires the name
# argument to be passed and will not be detected by the stage-1 luks root stuff.
bash -e -c 'while [ ! -f /crypt-ramfs/device ]; do sleep 1; done; . /bin/clevis-luks-common-functions; clevis_luks_unlock_device "$(cat /crypt-ramfs/device)" | cryptsetup-askpass' &
'';
};
};
}

View File

@@ -1,28 +0,0 @@
{ config, pkgs, ... }:
{
hardware.pulseaudio.enable = false;
services.xserver = {
enable = true;
libinput.enable = true;
displayManager.gdm.enable = true;
displayManager.defaultSession = "sway";
desktopManager.gnome = {
enable = true;
extraGSettingsOverrides = ''
[org.gnome.desktop.interface]
gtk-theme='Dracula'
'';
};
};
environment.systemPackages = with pkgs; [
dracula-theme
gnome.gnome-tweaks
gnome.dconf-editor
gnomeExtensions.vitals
gnomeExtensions.forge
];
}

View File

@@ -1,15 +0,0 @@
{
services.gogs = {
enable = true;
domain = "git.cloonar.com";
rootUrl = "http://git.cloonar.com/";
httpAddress = "git.cloonar.com";
httpPort = 3000;
extraConfig = ''
[server]
EXTERNAL_URL = http://git.cloonar.com/
[auth]
DISABLE_REGISTRATION = true
'';
};
}

View File

@@ -1,90 +0,0 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.howdy;
ircfg = config.services.ir-toggle;
# `dark_threshold` is required for X1 Carbon 7th to work
configINI = pkgs.runCommand "config.ini" { } ''
cat ${cfg.package}/lib/security/howdy/config.ini > $out
substituteInPlace $out --replace 'device_path = none' 'device_path = ${cfg.device}'
substituteInPlace $out --replace 'dark_threshold = 50' 'dark_threshold = ${
toString cfg.dark-threshold
}'
substituteInPlace $out --replace 'certainty = 3.5' 'certainty = ${
toString cfg.certainty
}'
'';
pam-rule = pkgs.lib.mkDefault (pkgs.lib.mkBefore
"auth sufficient ${pkgs.pam_python}/lib/security/pam_python.so ${config.services.howdy.package}/lib/security/howdy/pam.py");
in {
options = {
services.ir-toggle = {
enable = mkOption {
type = types.bool;
default = false;
description = ''
Whether to enable Chicony IR Emitter toggler.
'';
};
};
services.howdy = {
enable = mkOption {
type = types.bool;
default = false;
description = ''
Whether to enable howdy and PAM module for face recognition.
'';
};
package = mkOption {
type = types.package;
default = pkgs.howdy;
defaultText = "pkgs.howdy";
description = ''
Howdy package to use.
'';
};
device = mkOption {
type = types.path;
default = "/dev/video0";
description = ''
Device file connected to the IR sensor.
'';
};
certainty = mkOption {
type = types.int;
default = 3.5;
description = ''
The certainty of the detected face belonging to the user of the account. On a scale from 1 to 10, values above 5 are not recommended.
'';
};
dark-threshold = mkOption {
type = types.int;
default = 50;
description = ''
Because of flashing IR emitters, some frames can be completely unlit. Skip the frame if the lowest 1/8 of the histogram is above this percentage of the total. The lower this setting is, the more dark frames are ignored.
'';
};
};
};
config = mkIf cfg.enable {
environment.systemPackages = [ cfg.package pkgs.ir_toggle ];
environment.etc."howdy/config.ini".source = configINI;
security.pam.services = {
sudo.text = pam-rule; # Sudo
login.text = pam-rule; # User login
polkit-1.text = pam-rule; # PolKit
i3lock.text = pam-rule; # i3lock
};
powerManagement.resumeCommands =
"${pkgs.ir_toggle}/bin/chicony-ir-toggle on";
services.udev.packages = [ pkgs.ir_toggle ];
};
}

View File

@@ -1,45 +0,0 @@
{
core = {
detection_notice = false;
timeout_notice = true;
no_confirmation = false;
suppress_unknown = false;
abort_if_ssh = true;
abort_if_lid_closed = true;
disabled = false;
use_cnn = false;
workaround = "off";
};
video = {
certainty = 3.5;
timeout = 4;
device_path = "/dev/video2";
warn_no_device = true;
max_height = 320;
frame_width = -1;
frame_height = -1;
dark_threshold = 60;
recording_plugin = "opencv";
device_format = "v4l2";
force_mjpeg = false;
exposure = -1;
rotate = 0;
};
snapshots = {
save_failed = false;
save_successful = false;
};
rubberstamps = {
enabled = false;
stamp_rules = "nod 5s failsafe min_distance=12";
};
debug = {
end_report = false;
verbose_stamps = false;
gtk_stdout = false;
};
}

View File

@@ -1,123 +0,0 @@
{ config, lib, pkgs, ... }:
with lib;
let
pam-rule = pkgs.lib.mkDefault (pkgs.lib.mkBefore
''
auth sufficient pam_unix.so try_first_pass nullok
auth sufficient ${config.services.howdy.package}/lib/security/pam_howdy.so
'');
pam-sudo-rule = pkgs.lib.mkDefault (pkgs.lib.mkBefore
''
auth sufficient ${config.services.howdy.package}/lib/security/pam_howdy.so
'');
cfg = config.services.howdy;
irCfg = config.services.linux-enable-ir-emitter;
settingsType = pkgs.formats.ini { };
in {
options = {
services.howdy = {
enable = mkOption {
type = types.bool;
default = false;
description = ''
Whether to enable howdy and PAM module for face recognition.
'';
};
package = mkOption {
type = types.package;
default = pkgs.howdy;
defaultText = "pkgs.howdy";
description = ''
Howdy package to use.
'';
};
settings = mkOption {
inherit (settingsType) type;
default = import ./config.nix;
description = mdDoc ''
Howdy configuration file. Refer to
<https://github.com/boltgolt/howdy/blob/beta/howdy/src/config.ini>
for options.
'';
};
};
services.linux-enable-ir-emitter = {
enable = mkEnableOption (mdDoc "") // {
description = mdDoc ''
Whether to enable IR emitter hardware. Designed to be used with the
Howdy facial authentication. After enabling the service, configure
the emitter with `sudo linux-enable-ir-emitter configure`.
'';
};
package = mkPackageOptionMD pkgs "linux-enable-ir-emitter" {} // {
description = mdDoc ''
Package to use for the Linux Enable IR Emitter service.
'';
};
device = mkOption {
type = types.str;
default = "video2";
description = mdDoc ''
IR camera device to depend on. For example, for `/dev/video2`
the value would be `video2`. Find this with the command
{command}`realpath /dev/v4l/by-path/<generated-driver-name>`.
'';
};
};
};
config = mkIf cfg.enable {
# environment.systemPackages = [ cfg.package pkgs.ir_toggle ];
# environment.etc."howdy/config.ini".source = configINI;
# security.pam.services = {
# sudo.text = pam-rule; # Sudo
# login.text = pam-rule; # User login
# polkit-1.text = pam-rule; # PolKit
# i3lock.text = pam-rule; # i3lock
# };
# powerManagement.resumeCommands =
# "${pkgs.ir_toggle}/bin/chicony-ir-toggle on";
# services.udev.packages = [ pkgs.ir_toggle ];
environment.systemPackages = [ cfg.package irCfg.package ];
security.pam.services = {
sudo.text = pam-sudo-rule; # Sudo
login.text = pam-rule; # User login
polkit-1.text = pam-rule; # PolKit
swaylock.text = pam-rule; # i3lock
# gdm-password.text = pam-rule; # i3lock
};
systemd.services.linux-enable-ir-emitter = rec {
description = "Enable the infrared emitter";
script = "${getExe irCfg.package} run";
wantedBy = [
"multi-user.target"
"suspend.target"
"hybrid-sleep.target"
"hibernate.target"
"suspend-then-hibernate.target"
];
after = wantedBy ++ [ "dev-${irCfg.device}.device" ];
};
systemd.tmpfiles.rules = [
"d /var/lib/linux-enable-ir-emitter 0755 root root - -"
];
environment.etc."linux-enable-ir-emitter".source = "/var/lib/linux-enable-ir-emitter";
environment.etc."howdy/config.ini".source = settingsType.generate "howdy-config.ini" cfg.settings;
};
}

View File

@@ -1,22 +0,0 @@
{ config, pkgs, callPackage, ... }:
{
environment.pathsToLink = [ "/libexec" ]; # links /libexec from derivations to /run/current-system/sw
services.xserver = {
enable = true;
desktopManager = {
xterm.enable = false;
};
windowManager.i3 = {
enable = true;
extraPackages = with pkgs; [
dmenu #application launcher most people use
i3status # gives you the default i3 status bar
i3lock #default i3 screen locker
i3blocks #if you are planning on using i3blocks over i3status
];
};
};
}

View File

@@ -1,21 +0,0 @@
{ config, ... }: {
services.influxdb = {
enable = true;
extraConfig = {
http = {
auth-enabled = true;
log-enabled = false;
https-enabled = true;
https-certificate = "/var/lib/acme/influxdb.cloonar.com/fullchain.pem";
https-private-key = "/var/lib/acme/influxdb.cloonar.com/key.pem";
};
};
};
networking.firewall.allowedTCPPorts = [ 8086 ];
security.acme.certs."influxdb.cloonar.com" = {
postRun = "systemctl restart influxdb.service";
group = "influxdb";
};
}

View File

@@ -1,78 +0,0 @@
{ pkgs, ... }:
let
mysqlCreateDatabase = pkgs.writeShellScriptBin "mysql-create-database" ''
#!/usr/bin/env bash
if [ $# -lt 2 ]
then
echo "Usage: $0 <database> <host>"
exit 1
fi
if ! [ $EUID -eq 0 ]
then
echo "Must be root!" >&2
exit 1
fi
DB="$1"
HOST="$2"
PASSWORD="$(tr -dc A-Za-z0-9 < /dev/urandom | head -c 64 | xargs)"
cat <<EOF | mysql --host localhost --user root
create database $DB;
grant usage on $DB.* to '$DB'@'$HOST' identified by '$PASSWORD';
grant all privileges on $DB.* to '$DB'@'$HOST';
EOF
echo
echo "Password for user $DB is:"
echo
echo $PASSWORD
echo
'';
mysqlDeleteDatabase = pkgs.writeShellScriptBin "mysql-delete-database" ''
#!/usr/bin/env bash
if [ $# -lt 1 ]
then
echo "Usage: $0 <database>"
exit 1
fi
if ! [ $EUID -eq 0 ]
then
echo "Must be root!" >&2
exit 1
fi
DB="$1"
PASSWORD="$(tr -dc A-Za-z0-9 < /dev/urandom | head -c 64 | xargs)"
cat <<EOF | mysql --host localhost --user root
drop database $DB;
drop user '$DB';
EOF
echo
echo "Dropped database $DB!"
echo
'';
in {
environment.systemPackages = [
mysqlCreateDatabase
mysqlDeleteDatabase
];
services.mysql = {
enable = true;
package = pkgs.mariadb;
settings = {
mysqld = {
max_allowed_packet = "64M";
};
};
};
services.mysqlBackup.enable = true;
services.mysqlBackup.databases = [ "mysql" ];
}

View File

@@ -1,13 +0,0 @@
{ pkgs, ... }:
let
mysql-scripts = pkgs.callPackage ./pkgs/mysql-scripts.nix {};
in {
environment.systemPackages = [
mysql-scripts
];
services.mysql = {
enable = true;
package = pkgs.mariadb;
};
}

View File

@@ -1,14 +0,0 @@
{
stdenv,
lib,
bash,
}:
stdenv.mkDerivation {
name = "mysql-scripts";
src = ./scripts;
buildInputs = [ bash ];
#nativeBuildInputs = [lib.makeWrapper];
installPhase = ''
install -D --target $out/bin *
'';
}

View File

@@ -1,28 +0,0 @@
#!/usr/bin/env bash
if [ $# -lt 1 ]
then
echo "Usage: $0 <database>"
exit 1
fi
if ! [ $EUID -eq 0 ]
then
echo "Must be root!" >&2
exit 1
fi
DB="$1"
PASSWORD="$(tr -dc A-Za-z0-9 < /dev/urandom | head -c 64 | xargs)"
cat <<EOF | mysql --host localhost --user root
create database $DB;
grant usage on $DB.* to '$DB'@'%' identified by '$PASSWORD';
grant all privileges on $DB.* to '$DB'@'%';
EOF
echo
echo "Password for user $DB is:"
echo
echo $PASSWORD
echo

View File

@@ -1,25 +0,0 @@
#!/usr/bin/env bash
if [ $# -lt 1 ]
then
echo "Usage: $0 <database>"
exit 1
fi
if ! [ $EUID -eq 0 ]
then
echo "Must be root!" >&2
exit 1
fi
DB="$1"
PASSWORD="$(tr -dc A-Za-z0-9 < /dev/urandom | head -c 64 | xargs)"
cat <<EOF | mysql --host localhost --user root
drop database $DB;
drop user '$DB';
EOF
echo
echo "Dropped database $DB!"
echo

View File

@@ -1,135 +0,0 @@
{ pkgs, lib, config, ... }:
let
domain = "cloonar.dev";
dataDir = "/var/www/${domain}";
in {
systemd.services."phpfpm-${domain}".serviceConfig.ProtectHome = lib.mkForce false;
services.phpfpm.pools."${domain}" = {
user = domain;
settings = {
"listen.owner" = config.services.nginx.user;
"pm" = "dynamic";
"pm.max_children" = 32;
"pm.max_requests" = 500;
"pm.start_servers" = 2;
"pm.min_spare_servers" = 2;
"pm.max_spare_servers" = 5;
"php_admin_value[error_log]" = "stderr";
"php_admin_flag[log_errors]" = true;
"catch_workers_output" = true;
"access.log" = "/var/log/$pool.access.log";
};
phpPackage = pkgs.php81;
phpEnv."PATH" = lib.makeBinPath [ pkgs.php81 ];
};
services.nginx.virtualHosts."cloonar.dev" = {
forceSSL = true;
enableACME = true;
acmeRoot = null;
root = "${dataDir}";
locations."/favicon.ico".extraConfig = ''
log_not_found off;
access_log off;
'';
# TYPO3 - Rule for versioned static files, configured through:
# - $GLOBALS['TYPO3_CONF_VARS']['BE']['versionNumberInFilename']
# - $GLOBALS['TYPO3_CONF_VARS']['FE']['versionNumberInFilename']
extraConfig = ''
if (!-e $request_filename) {
rewrite ^/(.+)\.(\d+)\.(php|js|css|png|jpg|gif|gzip)$ /$1.$3 last;
}
'';
# TYPO3 - Block access to composer files
locations."~* composer\.(?:json|lock)".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to flexform files
locations."~* flexform[^.]*\.xml".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to language files
locations."~* locallang[^.]*\.(?:xml|xlf)$".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to static typoscript files
locations."~* ext_conf_template\.txt|ext_typoscript_constants\.txt|ext_typoscript_setup\.txt".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to miscellaneous protected files
locations."~* /.*\.(?:bak|co?nf|cfg|ya?ml|ts|typoscript|tsconfig|dist|fla|in[ci]|log|sh|sql|sqlite)$".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to recycler and temporary directories
locations."~ _(?:recycler|temp)_/".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to configuration files stored in fileadmin
locations."~ fileadmin/(?:templates)/.*\.(?:txt|ts|typoscript)$".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to libraries, source and temporary compiled data
locations."~ ^(?:vendor|typo3_src|typo3temp/var)".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to protected extension directories
locations."~ (?:typo3conf/ext|typo3/sysext|typo3/ext)/[^/]+/(?:Configuration|Resources/Private|Tests?|Documentation|docs?)/".extraConfig = ''
deny all;
'';
locations."/".extraConfig = ''
index index.php index.html;
try_files $uri $uri/ /index.php$is_args$args;
'';
# TYPO3 Backend URLs
locations."/typo3".extraConfig = ''
rewrite ^ /typo3/;
'';
locations."/typo3/".extraConfig = ''
try_files $uri /typo3/index.php$is_args$args;
'';
locations."~ [^/]\.php(/|$)".extraConfig = ''
fastcgi_split_path_info ^(.+?\.php)(/.*)$;
if (!-f $document_root$fastcgi_script_name) {
return 404;
}
include ${pkgs.nginx}/conf/fastcgi_params;
include ${pkgs.nginx}/conf/fastcgi.conf;
fastcgi_buffer_size 32k;
fastcgi_buffers 8 16k;
fastcgi_connect_timeout 240s;
fastcgi_read_timeout 240s;
fastcgi_send_timeout 240s;
fastcgi_pass unix:${config.services.phpfpm.pools."${domain}".socket};
fastcgi_index index.php;
'';
};
users.users."${domain}" = {
isSystemUser = true;
createHome = true;
home = dataDir;
homeMode= "770";
#home = "/home/${domain}";
group = "nginx";
};
users.groups.${domain} = {};
}

View File

@@ -1,55 +0,0 @@
{ config, pkgs, ... }:
{
imports = [
./cloonar.dev.nix
];
environment.systemPackages = with pkgs; [
imagemagick
ghostscript
];
systemd.services.nginx.serviceConfig.ProtectHome = "read-only";
services.nginx = {
enable = true;
recommendedGzipSettings = true;
recommendedOptimisation = true;
recommendedProxySettings = true;
recommendedTlsSettings = true;
sslCiphers = "AES256+EECDH:AES256+EDH:!aNULL";
commonHttpConfig = ''
# Add HSTS header with preloading to HTTPS requests.
# Adding this header to HTTP requests is discouraged
map $scheme $hsts_header {
https "max-age=31536000; includeSubdomains; preload";
}
add_header Strict-Transport-Security $hsts_header;
# Enable CSP for your services.
#add_header Content-Security-Policy "script-src 'self'; object-src 'none'; base-uri 'none';" always;
# Minimize information leaked to other domains
add_header 'Referrer-Policy' 'origin-when-cross-origin';
# Disable embedding as a frame
add_header X-Frame-Options DENY;
# Prevent injection of code in other mime types (XSS Attacks)
add_header X-Content-Type-Options nosniff;
# Enable XSS protection of the browser.
# May be unnecessary when CSP is configured properly (see above)
add_header X-XSS-Protection "1; mode=block";
# This might create errors
proxy_cookie_path / "/; secure; HttpOnly; SameSite=strict";
'';
};
}

View File

@@ -1,48 +0,0 @@
{ config, ... }:
{
imports = [
./cloonar.dev.nix
];
services.nginx = {
enable = true;
recommendedGzipSettings = true;
recommendedOptimisation = true;
recommendedProxySettings = true;
recommendedTlsSettings = true;
sslCiphers = "AES256+EECDH:AES256+EDH:!aNULL";
commonHttpConfig = ''
# Add HSTS header with preloading to HTTPS requests.
# Adding this header to HTTP requests is discouraged
map $scheme $hsts_header {
https "max-age=31536000; includeSubdomains; preload";
}
add_header Strict-Transport-Security $hsts_header;
# Enable CSP for your services.
#add_header Content-Security-Policy "script-src 'self'; object-src 'none'; base-uri 'none';" always;
# Minimize information leaked to other domains
add_header 'Referrer-Policy' 'origin-when-cross-origin';
# Disable embedding as a frame
add_header X-Frame-Options DENY;
# Prevent injection of code in other mime types (XSS Attacks)
add_header X-Content-Type-Options nosniff;
# Enable XSS protection of the browser.
# May be unnecessary when CSP is configured properly (see above)
add_header X-XSS-Protection "1; mode=block";
# This might create errors
proxy_cookie_path / "/; secure; HttpOnly; SameSite=strict";
'';
};
}

View File

@@ -1,2 +0,0 @@
#(import <nixpkgs> {}).callPackage (builtins.fetchurl "https://raw.githubusercontent.com/delroth/infra.delroth.net/master/pkgs/parsec.nix") {}
#(import <nixpkgs> {}).callPackage (builtins.fetchurl "https://raw.githubusercontent.com/delroth/infra.delroth.net/38a040e4bbfef7ee13c4b0a75dc79c77ddfdc759/pkgs/parsec.nix") {}

View File

@@ -1,112 +0,0 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.room-assistant;
in
{
options = {
services.room-assistant = {
enable = mkEnableOption (lib.mdDoc "room-assistant");
name = mkOption {
type = with types; uniq string;
description = "
...
";
default = "room";
};
mqttHost = mkOption {
type = with types; uniq string;
description = "
...
";
default = "";
};
mqttUser = mkOption {
type = with types; uniq string;
description = "
...
";
default = "espresense";
};
mqttPassword = mkOption {
type = with types; uniq string;
description = "
...
";
default = "insecure-password";
};
};
};
config = mkIf cfg.enable {
hardware = {
bluetooth.enable = true;
deviceTree.filter = "bcm2711-rpi-*.dtb";
};
systemd.services = {
btattach = {
before = [ "bluetooth.service" ];
after = [ "dev-ttyAMA0.device" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = {
ExecStart = "${pkgs.bluez}/bin/btattach -B /dev/ttyAMA0 -P bcm -S 3000000";
};
};
};
virtualisation.docker.enable = true;
environment.etc."room-assistant.yml" = {
text = ''
global:
instanceName: ${cfg.name}
integrations:
- homeAssistant
- bluetoothClassic
homeAssistant:
mqttUrl: 'mqtt://${cfg.mqttHost}'
mqttOptions:
username: ${cfg.mqttUser}
password: ${cfg.mqttPassword}
bluetoothClassic:
addresses:
- A8:5B:B7:98:84:F0
- 00:24:E4:E6:FE:AD
'';
# The UNIX file mode bits
mode = "0440";
};
systemd.services."room-assistant" = {
description = "room-assistant";
wantedBy = [ "multi-user.target" ];
after = [ "docker.service" "docker.socket" ];
requires = [ "docker.service" "docker.socket" ];
script = ''
exec ${pkgs.docker}/bin/docker run \
--rm \
--name=room-assistant \
--network=host \
-v /var/run/dbus:/var/run/dbus \
-v /etc/room-assistant.yml:/room-assistant/config/local.yml \
--cap-add=NET_ADMIN \
mkerix/room-assistant:2.20.0
'';
preStop = "${pkgs.docker}/bin/docker stop room-assistant";
reload = "${pkgs.docker}/bin/docker restart room-assistant";
serviceConfig = {
ExecStartPre = "-${pkgs.docker}/bin/docker rm -f room-assistant";
ExecStopPost = "-${pkgs.docker}/bin/docker rm -f room-assistant";
TimeoutStartSec = 0;
TimeoutStopSec = 120;
Restart = "always";
};
};
};
}

View File

@@ -1,22 +0,0 @@
{ pkgs, ... }:
let
domain = "mail-test.cloonar.com";
in
{
services.roundcube = {
enable = true;
hostName = "${domain}";
extraConfig = ''
$config['imap_host'] = 'tls://imap-test.cloonar.com';
$config['smtp_server'] = "tls://mail-test.cloonar.com";
$config['smtp_user'] = "%u";
$config['smtp_pass'] = "%p";
'';
};
services.nginx.virtualHosts."${domain}" = {
forceSSL = true;
enableACME = true;
acmeRoot = null;
};
}

View File

@@ -1,118 +0,0 @@
{ pkgs, lib, config, ... }:
let
domain = "self-service.cloonar.com";
php = pkgs.php82;
version = "1.5.2";
dataDir = "/var/www/${domain}";
in {
environment.systemPackages = with pkgs; [
smarty3
];
systemd.services."phpfpm-${domain}".serviceConfig.ProtectHome = lib.mkForce false;
systemd.services.selfservicepassword_setup = let
overrideConfig = pkgs.writeText "nextcloud-config.php" ''
<?php
$ldap_url = "ldap://ldap-test.cloonar.com:389";
$ldap_starttls = true;
define("SMARTY", "Smarty.class.php");
$use_tokens = false;
$use_sms = false;
'';
in {
wantedBy = [ "multi-user.target" ];
before = [ "phpfpm-${domain}.service" ];
script = ''
mkdir -p ${dataDir}/public
curl -L https://github.com/ltb-project/self-service-password/archive/refs/tags/v${version}.tar.gz > ${dataDir}/package.tar.gz
/run/current-system/sw/bin/tar xf ${dataDir}/package.tar.gz -C ${dataDir}
mv ${dataDir}/self-service-password-${version}/* ${dataDir}/public/
rm -rf ${dataDir}/self-service-password-${version}
cp ${overrideConfig} ${dataDir}/public/conf/config.inc.local.php
'';
path = [ pkgs.gzip pkgs.curl ];
serviceConfig.Type = "oneshot";
serviceConfig.User = domain;
};
services.phpfpm.pools."${domain}" = {
user = domain;
settings = {
"listen.owner" = config.services.nginx.user;
"pm" = "dynamic";
"pm.max_children" = 32;
"pm.max_requests" = 500;
"pm.start_servers" = 2;
"pm.min_spare_servers" = 2;
"pm.max_spare_servers" = 5;
"php_flag[display_errors]" = "on";
"php_admin_value[error_log]" = "/var/log/${domain}.error.log";
"php_admin_flag[log_errors]" = "on";
"php_value[include_path]" = ".:/usr/share/php:${pkgs.smarty3}";
"catch_workers_output" = "yes";
"access.log" = "/var/log/$pool.access.log";
};
phpPackage = php;
phpEnv."PATH" = lib.makeBinPath [ php ];
};
services.nginx.virtualHosts."${domain}" = {
forceSSL = true;
enableACME = true;
acmeRoot = null;
root = "${dataDir}/public/htdocs";
locations."/favicon.ico".extraConfig = ''
log_not_found off;
access_log off;
'';
# extraConfig = ''
# if (!-e $request_filename) {
# rewrite ^/(.+)\.(\d+)\.(php|js|css|png|jpg|gif|gzip)$ /$1.$3 last;
# }
# '';
locations."/".extraConfig = ''
index index.php index.html;
try_files $uri $uri/ /index.php$is_args$args;
'';
locations."~ [^/]\.php(/|$)".extraConfig = ''
fastcgi_split_path_info ^(.+?\.php)(/.*)$;
if (!-f $document_root$fastcgi_script_name) {
return 404;
}
include ${pkgs.nginx}/conf/fastcgi_params;
include ${pkgs.nginx}/conf/fastcgi.conf;
fastcgi_buffer_size 32k;
fastcgi_buffers 8 16k;
fastcgi_connect_timeout 240s;
fastcgi_read_timeout 240s;
fastcgi_send_timeout 240s;
fastcgi_pass unix:${config.services.phpfpm.pools."${domain}".socket};
fastcgi_index index.php;
'';
# locations."~ /\.".extraConfig = ''
# log_not_found off;
# deny all;
# '';
#
# locations."~ /scripts".extraConfig = ''
# log_not_found off;
# deny all;
# '';
};
users.users."${domain}" = {
#isSystemUser = true;
isNormalUser = true;
createHome = true;
home = dataDir;
homeMode= "770";
group = "nginx";
};
users.groups.${domain} = {};
}

View File

@@ -1,38 +0,0 @@
{ config, pkgs, ... }:
{
# security.rtkit.enable = true;
# services.pipewire = {
# enable = true;
# alsa.enable = true;
# alsa.support32Bit = true;
# pulse.enable = true;
#
# };
sound.enable = true;
hardware.pulseaudio.enable = true;
hardware.pulseaudio.support32Bit = true;
services.getty.autologinUser = "snapclient";
users.groups.snapclient = {};
users.users.snapclient = {
isNormalUser = true;
group = "snapclient";
extraGroups = [ "audio" "pipewire" ];
};
systemd.user.services.snapclient = {
wantedBy = [
"default.target"
];
after = [
"network.target"
];
serviceConfig = {
# User = "snapclient";
# Group = "snapclient";
ExecStart = "${pkgs.snapcast}/bin/snapclient -h mopidy.cloonar.com -p 1704 --player pulse";
Restart = "on-failure";
};
};
}

View File

@@ -1,81 +0,0 @@
{ config, pkgs, ... }:
let
user = "tang";
group = "tang";
in {
environment.systemPackages = with pkgs; [
jose
tang
];
systemd.paths.tangd-update = {
pathConfig = {
PathChanged = "/var/db/tang";
MakeDirectory = true;
DirectoryMode = "0700";
};
};
systemd.services.tangd-update = {
description = "Tang update";
path = [ pkgs.jose ];
serviceConfig = {
Type = "oneshot";
StandardError = "journal";
ExecStart = "${pkgs.tang}/libexec/tangd-update /var/db/tang /var/cache/tang";
};
};
systemd.services.tangd-keygen = {
description = "Tang keygen";
documentation = [ "man:tang(8)" ];
path = [ pkgs.jose ];
serviceConfig = {
Type = "oneshot";
StandardError = "journal";
ExecStart = "${pkgs.tang}/libexec/tangd-keygen /var/db/tang";
};
};
systemd.services."tangd@" = {
description = "Tang Server";
documentation = [ "man:tang(8)" ];
path = [ pkgs.jose ];
serviceConfig = {
StandardInput = "socket";
StandardOutput = "socket";
StandardError = "journal";
ExecStart = "${pkgs.tang}/libexec/tangd /var/cache/tang";
};
};
systemd.sockets.tangd = {
description = "Tang Server socket";
documentation = [ "man:tang(8)" ];
requires = [
"tangd-keygen.service"
"tangd-update.service"
"tangd-update.path"
];
after = [
"tangd-keygen.service"
"tangd-update.service"
];
wantedBy = [ "multi-user.target" ];
socketConfig = {
ListenStream = 8000;
Accept = true;
};
};
# users.groups.tang = {};
# users.users.tang = {
# isSystemUser = true;
# group = "tang";
# home = "/var/db/tang";
# createHome = true;
# description = "Tang system user";
# };
}

View File

@@ -1,13 +0,0 @@
{ config, pkgs, ... }:
let
tuxedo = import (builtins.fetchTarball "https://github.com/blitz/tuxedo-nixos/archive/master.tar.gz");
in {
# ...
imports = [
tuxedo.module
];
hardware.tuxedo-control-center.enable = true;
}

View File

@@ -1,19 +0,0 @@
{ pkgs, ... }:
{
services.resolved.enable = true;
services.openvpn.servers = {
epicenterWorks = {
config = ''
config /etc/nixos/modules/vpn/epicenter.works/vpn.conf
script-security 2
up ${pkgs.update-systemd-resolved}/libexec/openvpn/update-systemd-resolved
up-restart
down ${pkgs.update-systemd-resolved}/libexec/openvpn/update-systemd-resolved
down-pre
'';
};
};
}

View File

@@ -1,40 +0,0 @@
{ pkgs, ... }:
{
networking.firewall = {
allowedUDPPorts = [ 51820 ]; # Clients and peers can use the same port, see listenport
};
# Enable WireGuard
networking.wireguard.interfaces = {
# "wg0" is the network interface name. You can name the interface arbitrarily.
wg0 = {
# Determines the IP address and subnet of the client's end of the tunnel interface.
ips = [ "10.50.60.6/24" ];
listenPort = 51820; # to match firewall allowedUDPPorts (without this wg uses random port numbers)
privateKeyFile = "/run/secrets/wg_private_key";
postSetup = ''printf "search epicenter.works\nnameserver 10.25.0.10" | ${pkgs.openresolv}/bin/resolvconf -a wg0 -m 0'';
postShutdown = "${pkgs.openresolv}/bin/resolvconf -d wg0";
peers = [
# For a client configuration, one peer entry for the server will suffice.
{
# Public key of the server (not a file path).
publicKey = "T7jPGSapSudtKyWwi2nu+2hjjse96I4U3lccRHZWd2s=";
presharedKeyFile = "/run/secrets/wg_preshared_key";
allowedIPs = [ "10.50.60.0/24" "10.25.0.0/24" ];
# Set this to the server IP and port.
endpoint = "5.9.131.17:51821"; # ToDo: route to endpoint not automatically configured https://wiki.archlinux.org/index.php/WireGuard#Loop_routing https://discourse.nixos.org/t/solved-minimal-firewall-setup-for-wireguard-client/7577
# Send keepalives every 25 seconds. Important to keep NAT tables alive.
persistentKeepalive = 25;
}
];
};
};
}

View File

@@ -1,14 +0,0 @@
dev tun
persist-tun
persist-key
cipher AES-128-GCM
auth RSA-SHA256
client
resolv-retry infinite
remote vpn.epicenter.works 1195 udp
lport 0
verify-x509-name "C=AT, ST=Vienna, L=Vienna, O=epicenter_works, emailAddress=team@epicenter.works, CN=epicenter.works VPN Server" subject
remote-cert-tls server
ca /run/secrets/epicenter_vpn_ca
cert /run/secrets/epicenter_vpn_cert
key /run/secrets/epicenter_vpn_key

View File

@@ -1,135 +0,0 @@
{ pkgs, lib, config, ... }:
let
domain = "cloonar.dev";
dataDir = "/var/www/${domain}";
in {
systemd.services."phpfpm-${domain}".serviceConfig.ProtectHome = lib.mkForce false;
services.phpfpm.pools."${domain}" = {
user = domain;
settings = {
"listen.owner" = config.services.nginx.user;
"pm" = "dynamic";
"pm.max_children" = 32;
"pm.max_requests" = 500;
"pm.start_servers" = 2;
"pm.min_spare_servers" = 2;
"pm.max_spare_servers" = 5;
"php_admin_value[error_log]" = "stderr";
"php_admin_flag[log_errors]" = true;
"catch_workers_output" = true;
"access.log" = "/var/log/$pool.access.log";
};
phpPackage = pkgs.php81;
phpEnv."PATH" = lib.makeBinPath [ pkgs.php81 ];
};
services.nginx.virtualHosts."${domain}" = {
forceSSL = true;
enableACME = true;
acmeRoot = null;
root = "${dataDir}";
locations."/favicon.ico".extraConfig = ''
log_not_found off;
access_log off;
'';
# TYPO3 - Rule for versioned static files, configured through:
# - $GLOBALS['TYPO3_CONF_VARS']['BE']['versionNumberInFilename']
# - $GLOBALS['TYPO3_CONF_VARS']['FE']['versionNumberInFilename']
extraConfig = ''
if (!-e $request_filename) {
rewrite ^/(.+)\.(\d+)\.(php|js|css|png|jpg|gif|gzip)$ /$1.$3 last;
}
'';
# TYPO3 - Block access to composer files
locations."~* composer\.(?:json|lock)".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to flexform files
locations."~* flexform[^.]*\.xml".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to language files
locations."~* locallang[^.]*\.(?:xml|xlf)$".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to static typoscript files
locations."~* ext_conf_template\.txt|ext_typoscript_constants\.txt|ext_typoscript_setup\.txt".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to miscellaneous protected files
locations."~* /.*\.(?:bak|co?nf|cfg|ya?ml|ts|typoscript|tsconfig|dist|fla|in[ci]|log|sh|sql|sqlite)$".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to recycler and temporary directories
locations."~ _(?:recycler|temp)_/".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to configuration files stored in fileadmin
locations."~ fileadmin/(?:templates)/.*\.(?:txt|ts|typoscript)$".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to libraries, source and temporary compiled data
locations."~ ^(?:vendor|typo3_src|typo3temp/var)".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to protected extension directories
locations."~ (?:typo3conf/ext|typo3/sysext|typo3/ext)/[^/]+/(?:Configuration|Resources/Private|Tests?|Documentation|docs?)/".extraConfig = ''
deny all;
'';
locations."/".extraConfig = ''
index index.php index.html;
try_files $uri $uri/ /index.php$is_args$args;
'';
# TYPO3 Backend URLs
locations."/typo3".extraConfig = ''
rewrite ^ /typo3/;
'';
locations."/typo3/".extraConfig = ''
try_files $uri /typo3/index.php$is_args$args;
'';
locations."~ [^/]\.php(/|$)".extraConfig = ''
fastcgi_split_path_info ^(.+?\.php)(/.*)$;
if (!-f $document_root$fastcgi_script_name) {
return 404;
}
include ${pkgs.nginx}/conf/fastcgi_params;
include ${pkgs.nginx}/conf/fastcgi.conf;
fastcgi_buffer_size 32k;
fastcgi_buffers 8 16k;
fastcgi_connect_timeout 240s;
fastcgi_read_timeout 240s;
fastcgi_send_timeout 240s;
fastcgi_pass unix:${config.services.phpfpm.pools."${domain}".socket};
fastcgi_index index.php;
'';
};
users.users."${domain}" = {
isSystemUser = true;
createHome = true;
home = dataDir;
homeMode= "770";
#home = "/home/${domain}";
group = "nginx";
};
users.groups.${domain} = {};
}

View File

@@ -1,56 +0,0 @@
{ config, pkgs, ... }:
{
imports = [
./cloonar.dev.nix
./diabetes-austria.cloonar.dev.nix
];
environment.systemPackages = with pkgs; [
imagemagick
ghostscript
];
systemd.services.nginx.serviceConfig.ProtectHome = "read-only";
services.nginx = {
enable = true;
recommendedGzipSettings = true;
recommendedOptimisation = true;
recommendedProxySettings = true;
recommendedTlsSettings = true;
sslCiphers = "AES256+EECDH:AES256+EDH:!aNULL";
commonHttpConfig = ''
# Add HSTS header with preloading to HTTPS requests.
# Adding this header to HTTP requests is discouraged
map $scheme $hsts_header {
https "max-age=31536000; includeSubdomains; preload";
}
add_header Strict-Transport-Security $hsts_header;
# Enable CSP for your services.
#add_header Content-Security-Policy "script-src 'self'; object-src 'none'; base-uri 'none';" always;
# Minimize information leaked to other domains
add_header 'Referrer-Policy' 'origin-when-cross-origin';
# Disable embedding as a frame
add_header X-Frame-Options DENY;
# Prevent injection of code in other mime types (XSS Attacks)
add_header X-Content-Type-Options nosniff;
# Enable XSS protection of the browser.
# May be unnecessary when CSP is configured properly (see above)
add_header X-XSS-Protection "1; mode=block";
# This might create errors
proxy_cookie_path / "/; secure; HttpOnly; SameSite=strict";
'';
};
}

View File

@@ -1,135 +0,0 @@
{ pkgs, lib, config, ... }:
let
domain = "diabetes-austria.cloonar.dev";
dataDir = "/var/www/${domain}";
in {
systemd.services."phpfpm-${domain}".serviceConfig.ProtectHome = lib.mkForce false;
services.phpfpm.pools."${domain}" = {
user = domain;
settings = {
"listen.owner" = config.services.nginx.user;
"pm" = "dynamic";
"pm.max_children" = 32;
"pm.max_requests" = 500;
"pm.start_servers" = 2;
"pm.min_spare_servers" = 2;
"pm.max_spare_servers" = 5;
"php_admin_value[error_log]" = "stderr";
"php_admin_flag[log_errors]" = true;
"catch_workers_output" = true;
"access.log" = "/var/log/$pool.access.log";
};
phpPackage = pkgs.php81;
phpEnv."PATH" = lib.makeBinPath [ pkgs.php81 ];
};
services.nginx.virtualHosts."${domain}" = {
forceSSL = true;
enableACME = true;
acmeRoot = null;
root = "${dataDir}";
locations."/favicon.ico".extraConfig = ''
log_not_found off;
access_log off;
'';
# TYPO3 - Rule for versioned static files, configured through:
# - $GLOBALS['TYPO3_CONF_VARS']['BE']['versionNumberInFilename']
# - $GLOBALS['TYPO3_CONF_VARS']['FE']['versionNumberInFilename']
extraConfig = ''
if (!-e $request_filename) {
rewrite ^/(.+)\.(\d+)\.(php|js|css|png|jpg|gif|gzip)$ /$1.$3 last;
}
'';
# TYPO3 - Block access to composer files
locations."~* composer\.(?:json|lock)".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to flexform files
locations."~* flexform[^.]*\.xml".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to language files
locations."~* locallang[^.]*\.(?:xml|xlf)$".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to static typoscript files
locations."~* ext_conf_template\.txt|ext_typoscript_constants\.txt|ext_typoscript_setup\.txt".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to miscellaneous protected files
locations."~* /.*\.(?:bak|co?nf|cfg|ya?ml|ts|typoscript|tsconfig|dist|fla|in[ci]|log|sh|sql|sqlite)$".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to recycler and temporary directories
locations."~ _(?:recycler|temp)_/".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to configuration files stored in fileadmin
locations."~ fileadmin/(?:templates)/.*\.(?:txt|ts|typoscript)$".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to libraries, source and temporary compiled data
locations."~ ^(?:vendor|typo3_src|typo3temp/var)".extraConfig = ''
deny all;
'';
# TYPO3 - Block access to protected extension directories
locations."~ (?:typo3conf/ext|typo3/sysext|typo3/ext)/[^/]+/(?:Configuration|Resources/Private|Tests?|Documentation|docs?)/".extraConfig = ''
deny all;
'';
locations."/".extraConfig = ''
index index.php index.html;
try_files $uri $uri/ /index.php$is_args$args;
'';
# TYPO3 Backend URLs
locations."/typo3".extraConfig = ''
rewrite ^ /typo3/;
'';
locations."/typo3/".extraConfig = ''
try_files $uri /typo3/index.php$is_args$args;
'';
locations."~ [^/]\.php(/|$)".extraConfig = ''
fastcgi_split_path_info ^(.+?\.php)(/.*)$;
if (!-f $document_root$fastcgi_script_name) {
return 404;
}
include ${pkgs.nginx}/conf/fastcgi_params;
include ${pkgs.nginx}/conf/fastcgi.conf;
fastcgi_buffer_size 32k;
fastcgi_buffers 8 16k;
fastcgi_connect_timeout 240s;
fastcgi_read_timeout 240s;
fastcgi_send_timeout 240s;
fastcgi_pass unix:${config.services.phpfpm.pools."${domain}".socket};
fastcgi_index index.php;
'';
};
users.users."${domain}" = {
isSystemUser = true;
createHome = true;
home = dataDir;
homeMode= "770";
#home = "/home/${domain}";
group = "nginx";
};
users.groups.${domain} = {};
}

View File

@@ -1,9 +1,6 @@
self: super: { self: super: {
bento = (super.callPackage ../pkgs/bento { }); bento = (super.callPackage ../pkgs/bento { });
ykfde = (super.callPackage ../pkgs/ykfde { }); ykfde = (super.callPackage ../pkgs/ykfde { });
sysbox = (super.callPackage ../pkgs/sysbox.nix { });
omada = (super.callPackage ../pkgs/omada.nix { });
creality-print = (super.callPackage ../pkgs/creality-print.nix { }); creality-print = (super.callPackage ../pkgs/creality-print.nix { });
openaudible = (super.callPackage ../pkgs/openaudible.nix { }); openaudible = (super.callPackage ../pkgs/openaudible.nix { });
wow-addon-manager = (super.callPackage ../pkgs/wow-addon-manager { });
} }

View File

@@ -1,48 +0,0 @@
diff --git a/Cargo.lock b/Cargo.lock
index fb17c7e..ef157e5 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -2901,10 +2901,10 @@ dependencies = [
[[package]]
name = "magnum-opus"
version = "0.4.0"
-source = "git+https://github.com/open-trade/magnum-opus#3c3d0b86ae95c84930bebffe4bcb03b3bd83342b"
+source = "git+https://github.com/TheRadioGuy/magnum-opus#171e1d021004626f7444d1e39b98f50bc3cb2604"
dependencies = [
- "bindgen",
- "target_build_utils",
+ "libc",
+ "opusic-sys",
]
[[package]]
@@ -3463,6 +3463,16 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
+[[package]]
+name = "opusic-sys"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5eace752ce07a037241dba8f02c654799f051e431b27028056bcb480e83b54f5"
+dependencies = [
+ "cmake",
+ "libc",
+]
+
[[package]]
name = "os_str_bytes"
version = "6.0.0"
diff --git a/Cargo.toml b/Cargo.toml
index 1b715bd..960e8da 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -47,7 +47,7 @@ libc = "0.2"
parity-tokio-ipc = { git = "https://github.com/open-trade/parity-tokio-ipc" }
flexi_logger = { version = "0.22", features = ["async", "use_chrono_for_offset"] }
runas = "0.2"
-magnum-opus = { git = "https://github.com/open-trade/magnum-opus" }
+magnum-opus = { git = "https://github.com/TheRadioGuy/magnum-opus" }
dasp = { version = "0.11", features = ["signal", "interpolate-linear", "interpolate"], optional = true }
rubato = { version = "0.12", optional = true }
samplerate = { version = "0.2", optional = true }

View File

@@ -1,17 +0,0 @@
self: super:
{
rustdesk = super.rustdesk.overrideAttrs (old: rec {
pname = "rustdesk";
version = "nightly";
src = super.fetchFromGitHub {
owner = "rustdesk";
repo = "rustdesk";
rev = "52ce3dd2c299e262a54af0b5350fa60f66215e60";
sha256 = "01mfzgj3aw0k7sb0d2291inxmicrw09rrgfjl1pn50m2y29v0r40";
};
cargoPatches = [
./rustdesk-cargo.patch
];
});
}

View File

@@ -1,13 +0,0 @@
diff --git a/vite.config.ts b/vite.config.ts
index 659ba3e1..1f0afa5c 100644
--- a/vite.config.ts
+++ b/vite.config.ts
@@ -37,7 +37,7 @@ export default defineConfig(({ mode }) => {
base: "./",
build: {
sourcemap,
- outDir: "../internal/server/public_html",
+ outDir: "dist",
emptyOutDir: true,
assetsDir: "static",
rollupOptions: {

View File

@@ -1,78 +0,0 @@
#! /usr/bin/env nix-shell
#! nix-shell -I nixpkgs=./. -i bash -p coreutils gnused curl nix jq nodePackages.npm
set -euo pipefail
DRV_DIR="$(dirname "${BASH_SOURCE[0]}")"
DRV_DIR=$(realpath "$DRV_DIR")
NIXPKGS_ROOT="$DRV_DIR/../../.."
NIXPKGS_ROOT=$(realpath "$NIXPKGS_ROOT")
instantiateClean() {
nix-instantiate --eval --strict -E "with import ./. {}; $1" | cut -d\" -f2
}
fetchNewSha() {
set +eo pipefail
nix-build -A "$1" 2>&1 >/dev/null | grep "got:" | cut -d':' -f2 | sed 's| ||g'
set -eo pipefail
}
replace() {
sed -i "s@$1@$2@g" "$3"
}
grab_version() {
instantiateClean "authelia.version"
}
# provide a github token so you don't get rate limited
# if you use gh cli you can use:
# `export GITHUB_TOKEN="$(cat ~/.config/gh/config.yml | yq '.hosts."github.com".oauth_token' -r)"`
# or just set your token by hand:
# `read -s -p "Enter your token: " GITHUB_TOKEN; export GITHUB_TOKEN`
# (we use read so it doesn't show in our shell history and in secret mode so the token you paste isn't visible)
if [ -z "${GITHUB_TOKEN:-}" ]; then
echo "no GITHUB_TOKEN provided - you could meet API request limiting" >&2
fi
OLD_VERSION=$(instantiateClean "authelia.version")
LATEST_TAG=$(curl ${GITHUB_TOKEN:+" -u \":$GITHUB_TOKEN\""} --silent https://api.github.com/repos/authelia/authelia/releases/latest | jq -r '.tag_name')
NEW_VERSION=$(echo ${LATEST_TAG} | sed 's/^v//')
TMP_HASH="sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
OLD_SRC_HASH="$(instantiateClean authelia.src.outputHash)"
echo "Old src hash $OLD_SRC_HASH"
replace "$OLD_SRC_HASH" "$TMP_HASH" "$DRV_DIR/sources.nix"
NEW_SRC_HASH="$(fetchNewSha authelia.src)"
echo "New src hash $NEW_SRC_HASH"
replace "$TMP_HASH" "$NEW_SRC_HASH" "$DRV_DIR/sources.nix"
# after updating src the next focus is the web dependencies
# build package-lock.json since authelia uses pnpm
WEB_DIR=$(mktemp -d)
clean_up() {
rm -rf "$WEB_DIR"
}
trap clean_up EXIT
# OLD_PWD=$PWD
# cd $WEB_DIR
# OUT=$(nix-build -E "with import $NIXPKGS_ROOT {}; authelia.src" --no-out-link)
# cp -r $OUT/web/package.json .
# npm install --package-lock-only --legacy-peer-deps --ignore-scripts
# mv package-lock.json "$DRV_DIR/"
# cd $OLD_PWD
OLD_NPM_DEPS_HASH="$(instantiateClean authelia.web.npmDepsHash)"
echo "Old npm deps hash $OLD_NPM_DEPS_HASH"
replace "$OLD_NPM_DEPS_HASH" "$TMP_HASH" "$DRV_DIR/sources.nix"
NEW_NPM_DEPS_HASH="$(fetchNewSha authelia.web)"
echo "New npm deps hash $NEW_NPM_DEPS_HASH"
replace "$TMP_HASH" "$NEW_NPM_DEPS_HASH" "$DRV_DIR/sources.nix"
clean_up
OLD_GO_VENDOR_HASH="$(instantiateClean authelia.vendorHash)"
echo "Old go vendor hash $OLD_GO_VENDOR_HASH"
replace "$OLD_GO_VENDOR_HASH" "$TMP_HASH" "$DRV_DIR/sources.nix"
NEW_GO_VENDOR_HASH="$(fetchNewSha authelia.go-modules)"
echo "New go vendor hash $NEW_GO_VENDOR_HASH"
replace "$TMP_HASH" "$NEW_GO_VENDOR_HASH" "$DRV_DIR/sources.nix"

View File

@@ -1,77 +0,0 @@
{ lib, fetchFromGitHub, buildGoModule, installShellFiles, callPackage, nixosTests }:
let
inherit (import ./sources.nix { inherit fetchFromGitHub; }) pname version src vendorHash;
web = callPackage ./web.nix { };
in
buildGoModule rec {
inherit pname version src vendorHash;
nativeBuildInputs = [ installShellFiles ];
postPatch = ''
cp -r ${web}/share/authelia-web/* internal/server/public_html
'';
subPackages = [ "cmd/authelia" ];
ldflags =
let
p = "github.com/authelia/authelia/v${lib.versions.major version}/internal/utils";
in
[
"-s"
"-w"
"-X ${p}.BuildTag=v${version}"
"-X '${p}.BuildState=tagged clean'"
"-X ${p}.BuildBranch=v${version}"
"-X ${p}.BuildExtra=nixpkgs"
];
# several tests with networking and several that want chromium
doCheck = false;
postInstall = ''
mkdir -p $out/etc/authelia
cp config.template.yml $out/etc/authelia
installShellCompletion --cmd authelia \
--bash <($out/bin/authelia completion bash) \
--fish <($out/bin/authelia completion fish) \
--zsh <($out/bin/authelia completion zsh)
'';
doInstallCheck = true;
installCheckPhase = ''
runHook preInstallCheck
$out/bin/authelia --help
$out/bin/authelia --version | grep "v${version}"
$out/bin/authelia build-info | grep 'v${version}\|nixpkgs'
runHook postInstallCheck
'';
passthru = {
# if overriding replace the postPatch to put your web UI output in internal/server/public_html
inherit web;
updateScript = ./update.sh;
tests = { inherit (nixosTests) authelia; };
};
meta = with lib; {
homepage = "https://www.authelia.com/";
changelog = "https://github.com/authelia/authelia/releases/tag/v${version}";
description = "A Single Sign-On Multi-Factor portal for web apps";
longDescription = ''
Authelia is an open-source authentication and authorization server
providing two-factor authentication and single sign-on (SSO) for your
applications via a web portal. It acts as a companion for reverse proxies
like nginx, Traefik, caddy or HAProxy to let them know whether requests
should either be allowed or redirected to Authelia's portal for
authentication.
'';
license = licenses.asl20;
maintainers = with maintainers; [ jk raitobezarius dit7ya ];
};
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,14 +0,0 @@
{ fetchFromGitHub }:
rec {
pname = "authelia";
version = "4.37.5";
src = fetchFromGitHub {
owner = "authelia";
repo = "authelia";
rev = "0987da8524bd2e0c7cba5a00884beae67e0a6eb6";
hash = "sha256-xsdBnyPHFIimhp2rcudWqvVR36WN4vBXbxRmvgqMcDw=";
};
vendorHash = "sha256-mzGE/T/2TT4+7uc2axTqG3aeLMnt1r9Ya7Zj2jIkw/w=";
npmDepsHash = "sha256-MGs6UAxT5QZd8S3AO75mxuCb6U0UdRkGEjenOVj+Oqs=";
}

View File

@@ -1,85 +0,0 @@
#! /usr/bin/env nix-shell
#! nix-shell -I nixpkgs=./. -i bash -p coreutils gnused curl nix jq nodePackages.npm
set -euo pipefail
DRV_DIR="$(dirname "${BASH_SOURCE[0]}")"
DRV_DIR=$(realpath "$DRV_DIR")
NIXPKGS_ROOT="$DRV_DIR/../../.."
NIXPKGS_ROOT=$(realpath "$NIXPKGS_ROOT")
instantiateClean() {
nix-instantiate --eval --strict -E "with import ./. {}; $1" | cut -d\" -f2
}
fetchNewSha() {
set +eo pipefail
nix-build -A "$1" 2>&1 >/dev/null | grep "got:" | cut -d':' -f2 | sed 's| ||g'
set -eo pipefail
}
replace() {
sed -i "s@$1@$2@g" "$3"
}
grab_version() {
instantiateClean "authelia.version"
}
# provide a github token so you don't get rate limited
# if you use gh cli you can use:
# `export GITHUB_TOKEN="$(cat ~/.config/gh/config.yml | yq '.hosts."github.com".oauth_token' -r)"`
# or just set your token by hand:
# `read -s -p "Enter your token: " GITHUB_TOKEN; export GITHUB_TOKEN`
# (we use read so it doesn't show in our shell history and in secret mode so the token you paste isn't visible)
if [ -z "${GITHUB_TOKEN:-}" ]; then
echo "no GITHUB_TOKEN provided - you could meet API request limiting" >&2
fi
OLD_VERSION=$(instantiateClean "authelia.version")
LATEST_TAG=$(curl ${GITHUB_TOKEN:+" -u \":$GITHUB_TOKEN\""} --silent https://api.github.com/repos/authelia/authelia/releases/latest | jq -r '.tag_name')
NEW_VERSION=$(echo ${LATEST_TAG} | sed 's/^v//')
if [[ "$OLD_VERSION" == "$NEW_VERSION" ]]; then
echo "already up to date"
exit
fi
TMP_HASH="sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
echo "New version $NEW_VERSION"
replace "$OLD_VERSION" "$NEW_VERSION" "$DRV_DIR/sources.nix"
OLD_SRC_HASH="$(instantiateClean authelia.src.outputHash)"
echo "Old src hash $OLD_SRC_HASH"
replace "$OLD_SRC_HASH" "$TMP_HASH" "$DRV_DIR/sources.nix"
NEW_SRC_HASH="$(fetchNewSha authelia.src)"
echo "New src hash $NEW_SRC_HASH"
replace "$TMP_HASH" "$NEW_SRC_HASH" "$DRV_DIR/sources.nix"
# after updating src the next focus is the web dependencies
# build package-lock.json since authelia uses pnpm
WEB_DIR=$(mktemp -d)
clean_up() {
rm -rf "$WEB_DIR"
}
trap clean_up EXIT
# OLD_PWD=$PWD
# cd $WEB_DIR
# OUT=$(nix-build -E "with import $NIXPKGS_ROOT {}; authelia.src" --no-out-link)
# cp -r $OUT/web/package.json .
# npm install --package-lock-only --legacy-peer-deps --ignore-scripts
# mv package-lock.json "$DRV_DIR/"
# cd $OLD_PWD
OLD_NPM_DEPS_HASH="$(instantiateClean authelia.web.npmDepsHash)"
echo "Old npm deps hash $OLD_NPM_DEPS_HASH"
replace "$OLD_NPM_DEPS_HASH" "$TMP_HASH" "$DRV_DIR/sources.nix"
NEW_NPM_DEPS_HASH="$(fetchNewSha authelia.web)"
echo "New npm deps hash $NEW_NPM_DEPS_HASH"
replace "$TMP_HASH" "$NEW_NPM_DEPS_HASH" "$DRV_DIR/sources.nix"
clean_up
OLD_GO_VENDOR_HASH="$(instantiateClean authelia.vendorHash)"
echo "Old go vendor hash $OLD_GO_VENDOR_HASH"
replace "$OLD_GO_VENDOR_HASH" "$TMP_HASH" "$DRV_DIR/sources.nix"
NEW_GO_VENDOR_HASH="$(fetchNewSha authelia.go-modules)"
echo "New go vendor hash $NEW_GO_VENDOR_HASH"
replace "$TMP_HASH" "$NEW_GO_VENDOR_HASH" "$DRV_DIR/sources.nix"

View File

@@ -1,30 +0,0 @@
{ buildNpmPackage, fetchFromGitHub }:
let
inherit (import ./sources.nix { inherit fetchFromGitHub; }) pname version src npmDepsHash;
in
buildNpmPackage {
pname = "${pname}-web";
inherit src version npmDepsHash;
sourceRoot = "source/web";
patches = [
./change-web-out-dir.patch
];
postPatch = ''
cp ${./package-lock.json} ./package-lock.json
'';
npmFlags = [ "--legacy-peer-deps" ];
installPhase = ''
runHook preInstall
mkdir -p $out/share
mv dist $out/share/authelia-web
runHook postInstall
'';
}

View File

@@ -1,13 +1,13 @@
{ appimageTools, fetchurl }: { appimageTools, fetchurl }:
let let
pname = "creality-print"; pname = "creality-print";
version = "4.3.7.6627"; version = "5.1.7";
src = fetchurl { src = fetchurl {
url = "https://file2-cdn.creality.com/file/05a4538e0c7222ce547eb8d58ef0251e/Creality_Print-v4.3.7.6627-x86_64-Release.AppImage"; url = "https://github.com/CrealityOfficial/CrealityPrint/releases/download/v5.1.7/Creality_Print-v5.1.7.10514-x86_64-Release.AppImage";
# nix-prefetch-url --type sha256 --name Creality_Print-v4.3.7.6627-x86_64-Release.AppImage https://file2-cdn.creality.com/file/05a4538e0c7222ce547eb8d58ef0251e/Creality_Print-v4.3.7.6627-x86_64-Release.AppImage # nix-prefetch-url --type sha256 --name Creality_Print-v4.3.7.6627-x86_64-Release.AppImage https://file2-cdn.creality.com/file/05a4538e0c7222ce547eb8d58ef0251e/Creality_Print-v4.3.7.6627-x86_64-Release.AppImage
# nix-hash --type sha256 --to-sri # nix-hash --type sha256 --to-sri
sha256 = "sha256-WUsL7UbxSY94H4F1Ww8vLsfRyeg2/DZ+V4B6eH3M6+M="; sha256 = "sha256-IrVBlNbYs/Lmb9y8Yb/Xfpz+Rsx56nmK+4GkuMHh9zc=";
}; };
in in
appimageTools.wrapType2 { appimageTools.wrapType2 {

View File

@@ -1,3 +0,0 @@
final: prev: rec {
playactor = final.callPackage ./playactor.nix {};
}

View File

@@ -1,135 +0,0 @@
{ stdenv
, lib
, bzip2
, fetchFromGitHub
, fetchurl
, fmt
, gettext
, inih
, installShellFiles
, libevdev
, meson
, ninja
, pam
, pkg-config
, python3
}:
let
data = let
baseurl = "https://github.com/davisking/dlib-models/raw/daf943f7819a3dda8aec4276754ef918dc26491f";
in {
"dlib_face_recognition_resnet_model_v1.dat" = fetchurl {
url = "${baseurl}/dlib_face_recognition_resnet_model_v1.dat.bz2";
sha256 = "0fjm265l1fz5zdzx5n5yphl0v0vfajyw50ffamc4cd74848gdcdb";
};
"mmod_human_face_detector.dat" = fetchurl {
url = "${baseurl}/mmod_human_face_detector.dat.bz2";
sha256 = "117wv582nsn585am2n9mg5q830qnn8skjr1yxgaiihcjy109x7nv";
};
"shape_predictor_5_face_landmarks.dat" = fetchurl {
url = "${baseurl}/shape_predictor_5_face_landmarks.dat.bz2";
sha256 = "0wm4bbwnja7ik7r28pv00qrl3i1h6811zkgnjfvzv7jwpyz7ny3f";
};
};
py = python3.withPackages (p: [
p.face_recognition
(p.opencv4.override { enableGtk3 = true; })
]);
in
stdenv.mkDerivation {
pname = "howdy";
version = "unstable-2023-02-28";
src = fetchFromGitHub {
owner = "boltgolt";
repo = "howdy";
rev = "e881cc25935c7d39a074e9701a06b1fce96cc185";
hash = "sha256-BHS1J0SUNbCeAnTXrOQCtBJTaSYa5jtYYtTgfycv7VM=";
};
patches = [
# Change directory with configuration from `/etc` to `/var/lib`, since the service is expected to modify it.
./howdy.patch
];
postPatch =
let
howdypath = "${placeholder "out"}/lib/security/howdy";
in
''
substituteInPlace howdy/src/cli/add.py --replace "@pkgdatadir@" "${howdypath}"
substituteInPlace howdy/src/cli/config.py --replace '/bin/nano' 'nano'
substituteInPlace howdy/src/cli/test.py --replace "@pkgdatadir@" "${howdypath}"
substituteInPlace howdy/src/pam/main.cc \
--replace "python3" "${py}/bin/python" \
--replace "/lib/security/howdy/compare.py" "${howdypath}/compare.py"
substituteInPlace howdy/src/compare.py \
--replace "/lib/security/howdy" "${howdypath}" \
--replace "@pkgdatadir@" "${howdypath}"
'';
nativeBuildInputs = [
bzip2
installShellFiles
meson
ninja
pkg-config
];
buildInputs = [
fmt
gettext
inih
libevdev
pam
py
];
# build howdy_pam
preConfigure = ''
cd howdy/src/pam
# works around hardcoded install_dir: '/lib/security'.
# See https://github.com/boltgolt/howdy/blob/30728a6d3634479c24ffd4e094c34a30bbb43058/howdy/src/pam/meson.build#L22
export DESTDIR=$out
'';
postInstall =
let
libDir = "$out/lib/security/howdy";
inherit (lib) mapAttrsToList concatStrings;
in
''
# done with howdy_pam, go back to source root
cd ../../../..
mkdir -p $out/share/licenses/howdy
install -Dm644 LICENSE $out/share/licenses/howdy/LICENSE
rm -rf howdy/src/pam
mkdir -p ${libDir}
cp -r howdy/src/* ${libDir}
rm -rf ${libDir}/pam-config ${libDir}/dlib-data/*
${concatStrings (mapAttrsToList (n: v: ''
bzip2 -dc ${v} > ${libDir}/dlib-data/${n}
'') data)}
mkdir -p $out/bin
ln -s ${libDir}/cli.py $out/bin/howdy
mkdir -p "$out/share/bash-completion/completions"
installShellCompletion --bash howdy/src/autocomplete/howdy
'';
meta = {
description = "Windows Hello style facial authentication for Linux";
homepage = "https://github.com/boltgolt/howdy";
license = lib.licenses.mit;
platforms = lib.platforms.linux;
maintainers = with lib.maintainers; [ fufexan ];
};
}

View File

@@ -1,78 +0,0 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.howdy;
howdy = pkgs.callPackage ./howdy.nix { };
pam_python = pkgs.callPackage ./pam-python.nix { };
# `dark_threshold` is required for X1 Carbon 7th to work
configINI = pkgs.runCommand "config.ini" { } ''
cat ${cfg.package}/lib/security/howdy/config.ini > $out
substituteInPlace $out --replace 'device_path = none' 'device_path = ${cfg.device}'
substituteInPlace $out --replace 'dark_threshold = 50' 'dark_threshold = ${
toString cfg.dark-threshold
}'
substituteInPlace $out --replace 'certainty = 3.5' 'certainty = ${
toString cfg.certainty
}'
'';
pam-rule = pkgs.lib.mkDefault (pkgs.lib.mkBefore
"auth sufficient ${pam_python}/lib/security/pam_python.so ${config.services.howdy.package}/lib/security/howdy/pam.py");
in {
options = {
services.howdy = {
enable = mkOption {
type = types.bool;
default = false;
description = ''
Whether to enable howdy and PAM module for face recognition.
'';
};
package = mkOption {
type = types.package;
default = howdy;
defaultText = "howdy";
description = ''
Howdy package to use.
'';
};
device = mkOption {
type = types.path;
default = "/dev/video0";
description = ''
Device file connected to the IR sensor.
'';
};
certainty = mkOption {
type = types.int;
default = 4;
description = ''
The certainty of the detected face belonging to the user of the account. On a scale from 1 to 10, values above 5 are not recommended.
'';
};
dark-threshold = mkOption {
type = types.int;
default = 50;
description = ''
Because of flashing IR emitters, some frames can be completely unlit. Skip the frame if the lowest 1/8 of the histogram is above this percentage of the total. The lower this setting is, the more dark frames are ignored.
'';
};
};
};
config = mkIf cfg.enable {
environment.systemPackages = [ cfg.package ];
environment.etc."howdy/config.ini".source = configINI;
security.pam.services = {
sudo.text = pam-rule; # Sudo
login.text = pam-rule; # User login
polkit-1.text = pam-rule; # PolKit
};
};
}

View File

@@ -1,135 +0,0 @@
{ stdenv
, lib
, bzip2
, fetchFromGitHub
, fetchurl
, fmt
, gettext
, inih
, installShellFiles
, libevdev
, meson
, ninja
, pam
, pkg-config
, python3
}:
let
data = let
baseurl = "https://github.com/davisking/dlib-models/raw/daf943f7819a3dda8aec4276754ef918dc26491f";
in {
"dlib_face_recognition_resnet_model_v1.dat" = fetchurl {
url = "${baseurl}/dlib_face_recognition_resnet_model_v1.dat.bz2";
sha256 = "0fjm265l1fz5zdzx5n5yphl0v0vfajyw50ffamc4cd74848gdcdb";
};
"mmod_human_face_detector.dat" = fetchurl {
url = "${baseurl}/mmod_human_face_detector.dat.bz2";
sha256 = "117wv582nsn585am2n9mg5q830qnn8skjr1yxgaiihcjy109x7nv";
};
"shape_predictor_5_face_landmarks.dat" = fetchurl {
url = "${baseurl}/shape_predictor_5_face_landmarks.dat.bz2";
sha256 = "0wm4bbwnja7ik7r28pv00qrl3i1h6811zkgnjfvzv7jwpyz7ny3f";
};
};
py = python3.withPackages (p: [
p.face_recognition
(p.opencv4.override { enableGtk3 = true; })
]);
in
stdenv.mkDerivation {
pname = "howdy";
version = "unstable-2023-02-28";
src = fetchFromGitHub {
owner = "boltgolt";
repo = "howdy";
rev = "e881cc25935c7d39a074e9701a06b1fce96cc185";
hash = "sha256-BHS1J0SUNbCeAnTXrOQCtBJTaSYa5jtYYtTgfycv7VM=";
};
patches = [
# Change directory with configuration from `/etc` to `/var/lib`, since the service is expected to modify it.
./howdy.patch
];
postPatch =
let
howdypath = "${placeholder "out"}/lib/security/howdy";
in
''
substituteInPlace howdy/src/cli/add.py --replace "@pkgdatadir@" "${howdypath}"
substituteInPlace howdy/src/cli/config.py --replace '/bin/nano' 'nano'
substituteInPlace howdy/src/cli/test.py --replace "@pkgdatadir@" "${howdypath}"
substituteInPlace howdy/src/pam/main.cc \
--replace "python3" "${py}/bin/python" \
--replace "/lib/security/howdy/compare.py" "${howdypath}/compare.py"
substituteInPlace howdy/src/compare.py \
--replace "/lib/security/howdy" "${howdypath}" \
--replace "@pkgdatadir@" "${howdypath}"
'';
nativeBuildInputs = [
bzip2
installShellFiles
meson
ninja
pkg-config
];
buildInputs = [
fmt
gettext
inih
libevdev
pam
py
];
# build howdy_pam
preConfigure = ''
cd howdy/src/pam
# works around hardcoded install_dir: '/lib/security'.
# See https://github.com/boltgolt/howdy/blob/30728a6d3634479c24ffd4e094c34a30bbb43058/howdy/src/pam/meson.build#L22
export DESTDIR=$out
'';
postInstall =
let
libDir = "$out/lib/security/howdy";
inherit (lib) mapAttrsToList concatStrings;
in
''
# done with howdy_pam, go back to source root
cd ../../../..
mkdir -p $out/share/licenses/howdy
install -Dm644 LICENSE $out/share/licenses/howdy/LICENSE
rm -rf howdy/src/pam
mkdir -p ${libDir}
cp -r howdy/src/* ${libDir}
rm -rf ${libDir}/pam-config ${libDir}/dlib-data/*
${concatStrings (mapAttrsToList (n: v: ''
bzip2 -dc ${v} > ${libDir}/dlib-data/${n}
'') data)}
mkdir -p $out/bin
ln -s ${libDir}/cli.py $out/bin/howdy
mkdir -p "$out/share/bash-completion/completions"
installShellCompletion --bash howdy/src/autocomplete/howdy
'';
meta = {
description = "Windows Hello style facial authentication for Linux";
homepage = "https://github.com/boltgolt/howdy";
license = lib.licenses.mit;
platforms = lib.platforms.linux;
maintainers = with lib.maintainers; [ fufexan ];
};
}

View File

@@ -1,155 +0,0 @@
diff --git a/howdy/src/cli/add.py b/howdy/src/cli/add.py
index 8951e31..4f793d7 100644
--- a/howdy/src/cli/add.py
+++ b/howdy/src/cli/add.py
@@ -30,9 +30,9 @@ import cv2
config_path = "/etc/howdy"
# Test if at lest 1 of the data files is there and abort if it's not
-if not os.path.isfile(config_path + "/dlib-data/shape_predictor_5_face_landmarks.dat"):
+if not os.path.isfile("@pkgdatadir@/dlib-data/shape_predictor_5_face_landmarks.dat"):
print(_("Data files have not been downloaded, please run the following commands:"))
- print("\n\tcd " + config_path + "/dlib-data")
+ print("\n\tcd " + "@pkgdatadir@/dlib-data")
print("\tsudo ./install.sh\n")
sys.exit(1)
@@ -42,23 +42,23 @@ config.read(config_path + "/config.ini")
use_cnn = config.getboolean("core", "use_cnn", fallback=False)
if use_cnn:
- face_detector = dlib.cnn_face_detection_model_v1(config_path + "/dlib-data/mmod_human_face_detector.dat")
+ face_detector = dlib.cnn_face_detection_model_v1("@pkgdatadir@/dlib-data/mmod_human_face_detector.dat")
else:
face_detector = dlib.get_frontal_face_detector()
-pose_predictor = dlib.shape_predictor(config_path + "/dlib-data/shape_predictor_5_face_landmarks.dat")
-face_encoder = dlib.face_recognition_model_v1(config_path + "/dlib-data/dlib_face_recognition_resnet_model_v1.dat")
+pose_predictor = dlib.shape_predictor("@pkgdatadir@/dlib-data/shape_predictor_5_face_landmarks.dat")
+face_encoder = dlib.face_recognition_model_v1("@pkgdatadir@/dlib-data/dlib_face_recognition_resnet_model_v1.dat")
user = builtins.howdy_user
# The permanent file to store the encoded model in
-enc_file = config_path + "/models/" + user + ".dat"
+enc_file = "/var/lib/howdy/models/" + user + ".dat"
# Known encodings
encodings = []
# Make the ./models folder if it doesn't already exist
-if not os.path.exists(config_path + "/models"):
+if not os.path.exists("/var/lib/howdy/models"):
print(_("No face model folder found, creating one"))
- os.makedirs(config_path + "/models")
+ os.makedirs("/var/lib/howdy/models")
# To try read a premade encodings file if it exists
try:
diff --git a/howdy/src/cli/clear.py b/howdy/src/cli/clear.py
index 6fa5f3e..fc7676c 100644
--- a/howdy/src/cli/clear.py
+++ b/howdy/src/cli/clear.py
@@ -8,7 +8,7 @@ import builtins
from i18n import _
# Get the full path to this file
-path = "/etc/howdy/models"
+path = "/var/lib/howdy/models"
# Get the passed user
user = builtins.howdy_user
diff --git a/howdy/src/cli/list.py b/howdy/src/cli/list.py
index 3532e9f..b9e2a31 100644
--- a/howdy/src/cli/list.py
+++ b/howdy/src/cli/list.py
@@ -10,7 +10,7 @@ import builtins
from i18n import _
# Get the absolute path and the username
-path = "/etc/howdy"
+path = "/var/lib/howdy"
user = builtins.howdy_user
# Check if the models file has been created yet
diff --git a/howdy/src/cli/remove.py b/howdy/src/cli/remove.py
index 6321e0b..7c13d79 100644
--- a/howdy/src/cli/remove.py
+++ b/howdy/src/cli/remove.py
@@ -9,7 +9,7 @@ import builtins
from i18n import _
# Get the absolute path and the username
-path = "/etc/howdy"
+path = "/var/lib/howdy"
user = builtins.howdy_user
# Check if enough arguments have been passed
diff --git a/howdy/src/cli/test.py b/howdy/src/cli/test.py
index d54929a..fa45500 100644
--- a/howdy/src/cli/test.py
+++ b/howdy/src/cli/test.py
@@ -59,20 +59,20 @@ use_cnn = config.getboolean('core', 'use_cnn', fallback=False)
if use_cnn:
face_detector = dlib.cnn_face_detection_model_v1(
- path + "/dlib-data/mmod_human_face_detector.dat"
+ "@pkgdatadir@/dlib-data/mmod_human_face_detector.dat"
)
else:
face_detector = dlib.get_frontal_face_detector()
-pose_predictor = dlib.shape_predictor(path + "/dlib-data/shape_predictor_5_face_landmarks.dat")
-face_encoder = dlib.face_recognition_model_v1(path + "/dlib-data/dlib_face_recognition_resnet_model_v1.dat")
+pose_predictor = dlib.shape_predictor("@pkgdatadir@/dlib-data/shape_predictor_5_face_landmarks.dat")
+face_encoder = dlib.face_recognition_model_v1("@pkgdatadir@/dlib-data/dlib_face_recognition_resnet_model_v1.dat")
encodings = []
models = None
try:
user = builtins.howdy_user
- models = json.load(open(path + "/models/" + user + ".dat"))
+ models = json.load(open("/var/lib/howdy/models/" + user + ".dat"))
for model in models:
encodings += model["data"]
diff --git a/howdy/src/compare.py b/howdy/src/compare.py
index be19464..86a8d8f 100644
--- a/howdy/src/compare.py
+++ b/howdy/src/compare.py
@@ -48,22 +48,22 @@ def init_detector(lock):
global face_detector, pose_predictor, face_encoder
# Test if at lest 1 of the data files is there and abort if it's not
- if not os.path.isfile(PATH + "/dlib-data/shape_predictor_5_face_landmarks.dat"):
+ if not os.path.isfile("@pkgdatadir@/dlib-data/shape_predictor_5_face_landmarks.dat"):
print(_("Data files have not been downloaded, please run the following commands:"))
- print("\n\tcd " + PATH + "/dlib-data")
+ print("\n\tcd " + "@pkgdatadir@/dlib-data")
print("\tsudo ./install.sh\n")
lock.release()
exit(1)
# Use the CNN detector if enabled
if use_cnn:
- face_detector = dlib.cnn_face_detection_model_v1(PATH + "/dlib-data/mmod_human_face_detector.dat")
+ face_detector = dlib.cnn_face_detection_model_v1("@pkgdatadir@/dlib-data/mmod_human_face_detector.dat")
else:
face_detector = dlib.get_frontal_face_detector()
# Start the others regardless
- pose_predictor = dlib.shape_predictor(PATH + "/dlib-data/shape_predictor_5_face_landmarks.dat")
- face_encoder = dlib.face_recognition_model_v1(PATH + "/dlib-data/dlib_face_recognition_resnet_model_v1.dat")
+ pose_predictor = dlib.shape_predictor("@pkgdatadir@/dlib-data/shape_predictor_5_face_landmarks.dat")
+ face_encoder = dlib.face_recognition_model_v1("@pkgdatadir@/dlib-data/dlib_face_recognition_resnet_model_v1.dat")
# Note the time it took to initialize detectors
timings["ll"] = time.time() - timings["ll"]
@@ -129,7 +129,7 @@ face_encoder = None
# Try to load the face model from the models folder
try:
- models = json.load(open(PATH + "/models/" + user + ".dat"))
+ models = json.load(open("/var/lib/howdy/models/" + user + ".dat"))
for model in models:
encodings += model["data"]

View File

@@ -1,28 +0,0 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.ir-toggle;
ir_toggle = pkgs.callPackage ./ir-toggle.nix { };
in {
options = {
services.ir-toggle = {
enable = mkOption {
type = types.bool;
default = false;
description = ''
Whether to enable Chicony IR Emitter toggler.
'';
};
};
};
config = mkIf cfg.enable {
# Udev rules to start it on boot.
environment.systemPackages = [ ir_toggle ];
# Re-toggle the IR emitter after the sleep so that it could work perfectly
powerManagement.resumeCommands =
"${ir_toggle}/bin/chicony-ir-toggle on";
services.udev.packages = [ ir_toggle ];
};
}

View File

@@ -1,17 +0,0 @@
{ stdenv, fetchFromGitHub, cmake }:
stdenv.mkDerivation rec {
name = "chicony-ir-toggle";
src = fetchFromGitHub {
owner = "PetePriority";
repo = name;
rev = "5758112ae7f502035d48f24123347ba37cdbdb34";
sha256 = "1ihxkvhjbryhw5xjnw5a36f5w8nn4lnf07dzmzi6jzrn5ax131hw";
};
nativeBuildInputs = [ cmake ];
preInstall = ''
substituteInPlace ../CMakeLists.txt --replace /lib $out/lib
'';
}

View File

@@ -1,17 +0,0 @@
{ stdenv, python2, python2Packages, fetchurl, pam }:
let outPath = placeholder "out";
in stdenv.mkDerivation rec {
pname = "pam-python";
version = "1.0.7";
src = fetchurl {
url =
"https://downloads.sourceforge.net/project/pam-python/pam-python-1.0.7-1/pam-python-1.0.7.tar.gz";
sha256 = "01vylk8vmzsvxf0iwn2nizwkhdzk0vpyqh5m1rybh0sv6pz75kln";
};
buildInputs = [ python2 python2Packages.sphinx pam ];
preBuild = ''
patchShebangs .
substituteInPlace src/Makefile --replace '-Werror' '-O -Werror=cpp'
'';
makeFlags = [ "PREFIX=${outPath}" "LIBDIR=${outPath}/lib/security" ];
}

View File

@@ -1,47 +0,0 @@
{ lib, stdenv, fetchurl, makeDesktopItem, ant, jdk8 }:
stdenv.mkDerivation rec {
pname = "jxplorer";
version = "3.3.1.2";
src = fetchurl {
url = "https://github.com/pegacat/${pname}/releases/download/v${version}/${pname}-${version}-project.tar.bz2";
sha256 = "/lWkavH51OqNFSLpgT+4WcQcfW3WvnnOkB03jB7bE/s=";
};
jxplorerItem = makeDesktopItem {
name = "JXplorer";
exec = "jxplorer";
comment = "A Java Ldap Browser";
desktopName = "JXplorer";
genericName = "Java Ldap Browser";
icon = "jxplorer";
};
configurePhase = ''
cat >"${pname}" << EOF
#!/bin/sh
cd "$out/opt/jxplorer"
export JAVA_HOME="${jdk8}"
sh jxplorer.sh "\$@"
EOF
chmod +x "${pname}"
'';
installPhase = ''
install -d "$out/opt/jxplorer" "$out/bin" "$out/share/pixmaps" "$out/share/applications"
cp -r ./. "$out/opt/jxplorer"
install -Dm755 "${pname}" "$out/bin/${pname}"
cp -r "${jxplorerItem}/." "$out"
install -Dm644 images/JX128.png "$out/share/pixmaps/${pname}.png"
'';
meta = with lib; {
description = "A Java Ldap Browser";
homepage = "https://sourceforge.net/projects/jxplorer/";
license = "CA Open Source Licence Version 1.0";
maintainers = with maintainers; [ benwbooth ];
platforms = platforms.linux;
};
}

View File

@@ -1,51 +0,0 @@
{ stdenv
, lib
, makeWrapper
, fetchFromGitHub
, fetchurl
, meson
, ninja
, pkg-config
, python3
, opencv
, usbutils
}:
stdenv.mkDerivation rec {
pname = "linux-enable-ir-emitter";
version = "4.5.0";
src = fetchFromGitHub {
owner = "EmixamPP";
repo = pname;
rev = version;
hash = "sha256-Dv1ukn2TkXfBk1vc+6Uq7tw8WwCAfIcKl13BoOifz+Q=";
};
patches = [
# Prevent `linux-enable-ir-emitter configure` from trying to enable systemd service, NixOS manages those declaratively.
./remove-boot-set.patch
];
nativeBuildInputs = [
makeWrapper
meson
ninja
pkg-config
];
buildInputs = [
python3
opencv
];
postInstall = ''
wrapProgram $out/bin/${pname} --prefix PATH : ${lib.makeBinPath [usbutils]}
'';
meta = {
description = "Provides support for infrared cameras that are not directly enabled out-of-the box";
homepage = "https://github.com/EmixamPP/linux-enable-ir-emitter";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ fufexan ];
platforms = lib.platforms.linux;
};
}

View File

@@ -1,11 +0,0 @@
diff --git a/sources/command/configure.py b/sources/command/configure.py
index 2cd20d2..84ffb73 100644
--- a/sources/command/configure.py
+++ b/sources/command/configure.py
@@ -25,6 +25,5 @@ def configure(device: str, emitters: int, neg_answer_limit: int) -> NoReturn:
logging.info("Do not hesitate to visit the GitHub ! https://github.com/EmixamPP/linux-enable-ir-emitter/wiki")
else:
logging.info("The driver has been successfully generated.")
- boot("enable")
exit(exit_code)

View File

@@ -1,59 +0,0 @@
{ stdenv
, lib
, fetchurl
, dpkg
, libredirect
, makeWrapper
, busybox
, jsvc
, mongodb
, jre8_headless
}:
stdenv.mkDerivation rec {
pname = "omada-sdn";
version = "5.13.22";
src = fetchurl {
url = "https://static.tp-link.com/upload/software/2023/202312/20231201/Omada_SDN_Controller_v${version}_Linux_x64.deb";
sha256 = "sha256-rXtkY1QsiOm4k86ILBjkEV8tPlngUeJG1Lp9R197cbM=";
};
nativeBuildInputs = [ dpkg makeWrapper ];
unpackPhase = ''
runHook preUnpack
dpkg -x $src ./src
runHook postUnpack
'';
installPhase = ''
runHook preInstall
mkdir -p "$out"
cp -r src/* "$out"
mv "$out/usr/"* "$out/"
rmdir "$out/usr"
for f in "$out/lib/systemd/system/"*.service; do
substituteInPlace "$f" \
--replace "/usr/" "$out/"
done
for p in "$out/bin/"*; do
wrapProgram "$p" \
--set NIX_REDIRECTS "/usr/share=$out/share:/usr/bin=$out/bin" \
--prefix PATH : "${lib.makeBinPath [ busybox jsvc mongodb jre8_headless ]}"
done
runHook postInstall
'';
meta = with lib; {
description = "Omada SDN Controller Software";
platforms = with platforms; [ "x86_64-linux" ];
};
}

View File

@@ -1,17 +0,0 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-14_x"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

View File

@@ -1,689 +0,0 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
else {
packageObj.devDependencies = {};
}
replaceDependencies(packageObj.optionalDependencies);
replaceDependencies(packageObj.peerDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "reconstructpackagelock.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 2,
requires: true,
packages: {
"": {
name: packageObj.name,
version: packageObj.version,
license: packageObj.license,
bin: packageObj.bin,
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
}
},
dependencies: {}
};
function augmentPackageJSON(filePath, packages, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
packages[filePath] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
};
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, packages, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, packages, dependencies);
}
});
} else {
augmentPackageJSON(filePath, packages, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
# Script that links bins defined in package.json to the node_modules bin directory
# NPM does not do this for top-level packages itself anymore as of v7
linkBinsScript = writeTextFile {
name = "linkbins.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
if(packageObj.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
if(typeof packageObj.bin == "object") {
Object.keys(packageObj.bin).forEach(function(exe) {
if(fs.existsSync(packageObj.bin[exe])) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin[exe]),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
else {
if(fs.existsSync(packageObj.bin)) {
console.log("linking bin '" + packageObj.bin + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin),
path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
);
}
else {
console.log("skipping non-existent bin '" + packageObj.bin + "'");
}
}
}
else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.directories.bin, exe),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
runHook postRebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
fi
# Link executables defined in package.json
node ${linkBinsScript}
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Fixup all executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
if isScript "$file"
then
sed -i 's/\r$//' "$file" # convert crlf to lf
fi
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

View File

@@ -1,719 +0,0 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{nodeEnv, fetchurl, fetchgit, nix-gitignore, stdenv, lib, globalBuildInputs ? []}:
let
sources = {
"@sindresorhus/is-4.6.0" = {
name = "_at_sindresorhus_slash_is";
packageName = "@sindresorhus/is";
version = "4.6.0";
src = fetchurl {
url = "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz";
sha512 = "t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==";
};
};
"@szmarczak/http-timer-4.0.6" = {
name = "_at_szmarczak_slash_http-timer";
packageName = "@szmarczak/http-timer";
version = "4.0.6";
src = fetchurl {
url = "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz";
sha512 = "4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==";
};
};
"@types/cacheable-request-6.0.3" = {
name = "_at_types_slash_cacheable-request";
packageName = "@types/cacheable-request";
version = "6.0.3";
src = fetchurl {
url = "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz";
sha512 = "IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==";
};
};
"@types/http-cache-semantics-4.0.1" = {
name = "_at_types_slash_http-cache-semantics";
packageName = "@types/http-cache-semantics";
version = "4.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz";
sha512 = "SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==";
};
};
"@types/keyv-3.1.4" = {
name = "_at_types_slash_keyv";
packageName = "@types/keyv";
version = "3.1.4";
src = fetchurl {
url = "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.4.tgz";
sha512 = "BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==";
};
};
"@types/node-13.13.52" = {
name = "_at_types_slash_node";
packageName = "@types/node";
version = "13.13.52";
src = fetchurl {
url = "https://registry.npmjs.org/@types/node/-/node-13.13.52.tgz";
sha512 = "s3nugnZumCC//n4moGGe6tkNMyYEdaDBitVjwPxXmR5lnMG5dHePinH2EdxkG3Rh1ghFHHixAG4NJhpJW1rthQ==";
};
};
"@types/node-18.16.1" = {
name = "_at_types_slash_node";
packageName = "@types/node";
version = "18.16.1";
src = fetchurl {
url = "https://registry.npmjs.org/@types/node/-/node-18.16.1.tgz";
sha512 = "DZxSZWXxFfOlx7k7Rv4LAyiMroaxa3Ly/7OOzZO8cBNho0YzAi4qlbrx8W27JGqG57IgR/6J7r+nOJWw6kcvZA==";
};
};
"@types/responselike-1.0.0" = {
name = "_at_types_slash_responselike";
packageName = "@types/responselike";
version = "1.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.0.tgz";
sha512 = "85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA==";
};
};
"ansi-regex-3.0.1" = {
name = "ansi-regex";
packageName = "ansi-regex";
version = "3.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.1.tgz";
sha512 = "+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==";
};
};
"ansi-styles-3.2.1" = {
name = "ansi-styles";
packageName = "ansi-styles";
version = "3.2.1";
src = fetchurl {
url = "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz";
sha512 = "VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==";
};
};
"ansi-styles-4.3.0" = {
name = "ansi-styles";
packageName = "ansi-styles";
version = "4.3.0";
src = fetchurl {
url = "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz";
sha512 = "zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==";
};
};
"asn1-0.2.6" = {
name = "asn1";
packageName = "asn1";
version = "0.2.6";
src = fetchurl {
url = "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz";
sha512 = "ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==";
};
};
"at-least-node-1.0.0" = {
name = "at-least-node";
packageName = "at-least-node";
version = "1.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz";
sha512 = "+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==";
};
};
"cacheable-lookup-5.0.4" = {
name = "cacheable-lookup";
packageName = "cacheable-lookup";
version = "5.0.4";
src = fetchurl {
url = "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz";
sha512 = "2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==";
};
};
"cacheable-request-7.0.2" = {
name = "cacheable-request";
packageName = "cacheable-request";
version = "7.0.2";
src = fetchurl {
url = "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.2.tgz";
sha512 = "pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew==";
};
};
"chalk-2.4.2" = {
name = "chalk";
packageName = "chalk";
version = "2.4.2";
src = fetchurl {
url = "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz";
sha512 = "Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==";
};
};
"chalk-4.1.2" = {
name = "chalk";
packageName = "chalk";
version = "4.1.2";
src = fetchurl {
url = "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz";
sha512 = "oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==";
};
};
"clime-0.5.14" = {
name = "clime";
packageName = "clime";
version = "0.5.14";
src = fetchurl {
url = "https://registry.npmjs.org/clime/-/clime-0.5.14.tgz";
sha512 = "+q7UDQ+EcruHtZRtd2QVs+t/jf9MpuOyhHbuHcYWvjX8jdly6AU7z3/7MiI0Kj0hzueIEX9JM7vPnkzBrG082Q==";
};
};
"clone-response-1.0.3" = {
name = "clone-response";
packageName = "clone-response";
version = "1.0.3";
src = fetchurl {
url = "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz";
sha512 = "ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==";
};
};
"color-convert-1.9.3" = {
name = "color-convert";
packageName = "color-convert";
version = "1.9.3";
src = fetchurl {
url = "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz";
sha512 = "QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==";
};
};
"color-convert-2.0.1" = {
name = "color-convert";
packageName = "color-convert";
version = "2.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz";
sha512 = "RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==";
};
};
"color-name-1.1.3" = {
name = "color-name";
packageName = "color-name";
version = "1.1.3";
src = fetchurl {
url = "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz";
sha512 = "72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==";
};
};
"color-name-1.1.4" = {
name = "color-name";
packageName = "color-name";
version = "1.1.4";
src = fetchurl {
url = "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz";
sha512 = "dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==";
};
};
"debug-4.3.4" = {
name = "debug";
packageName = "debug";
version = "4.3.4";
src = fetchurl {
url = "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz";
sha512 = "PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==";
};
};
"decompress-response-6.0.0" = {
name = "decompress-response";
packageName = "decompress-response";
version = "6.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz";
sha512 = "aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==";
};
};
"defer-to-connect-2.0.1" = {
name = "defer-to-connect";
packageName = "defer-to-connect";
version = "2.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz";
sha512 = "4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==";
};
};
"define-lazy-prop-2.0.0" = {
name = "define-lazy-prop";
packageName = "define-lazy-prop";
version = "2.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz";
sha512 = "Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==";
};
};
"end-of-stream-1.4.4" = {
name = "end-of-stream";
packageName = "end-of-stream";
version = "1.4.4";
src = fetchurl {
url = "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz";
sha512 = "+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==";
};
};
"escape-string-regexp-1.0.5" = {
name = "escape-string-regexp";
packageName = "escape-string-regexp";
version = "1.0.5";
src = fetchurl {
url = "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz";
sha512 = "vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==";
};
};
"extendable-error-0.1.7" = {
name = "extendable-error";
packageName = "extendable-error";
version = "0.1.7";
src = fetchurl {
url = "https://registry.npmjs.org/extendable-error/-/extendable-error-0.1.7.tgz";
sha512 = "UOiS2in6/Q0FK0R0q6UY9vYpQ21mr/Qn1KOnte7vsACuNJf514WvCCUHSRCPcgjPT2bAhNIJdlE6bVap1GKmeg==";
};
};
"fs-extra-9.1.0" = {
name = "fs-extra";
packageName = "fs-extra";
version = "9.1.0";
src = fetchurl {
url = "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz";
sha512 = "hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==";
};
};
"get-stream-5.2.0" = {
name = "get-stream";
packageName = "get-stream";
version = "5.2.0";
src = fetchurl {
url = "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz";
sha512 = "nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==";
};
};
"got-11.8.6" = {
name = "got";
packageName = "got";
version = "11.8.6";
src = fetchurl {
url = "https://registry.npmjs.org/got/-/got-11.8.6.tgz";
sha512 = "6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==";
};
};
"graceful-fs-4.2.11" = {
name = "graceful-fs";
packageName = "graceful-fs";
version = "4.2.11";
src = fetchurl {
url = "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz";
sha512 = "RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==";
};
};
"has-flag-3.0.0" = {
name = "has-flag";
packageName = "has-flag";
version = "3.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz";
sha512 = "sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==";
};
};
"has-flag-4.0.0" = {
name = "has-flag";
packageName = "has-flag";
version = "4.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz";
sha512 = "EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==";
};
};
"http-cache-semantics-4.1.1" = {
name = "http-cache-semantics";
packageName = "http-cache-semantics";
version = "4.1.1";
src = fetchurl {
url = "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz";
sha512 = "er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==";
};
};
"http2-wrapper-1.0.3" = {
name = "http2-wrapper";
packageName = "http2-wrapper";
version = "1.0.3";
src = fetchurl {
url = "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-1.0.3.tgz";
sha512 = "V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==";
};
};
"hyphenate-0.2.5" = {
name = "hyphenate";
packageName = "hyphenate";
version = "0.2.5";
src = fetchurl {
url = "https://registry.npmjs.org/hyphenate/-/hyphenate-0.2.5.tgz";
sha512 = "mSY0+dVLWFq7iIUgiID3EWo5S8rLnZa595mcuWiN8di91n6eL+WS8HKmcpiZZIX7iElri0a/2hOYpwzldsY4gQ==";
};
};
"is-docker-2.2.1" = {
name = "is-docker";
packageName = "is-docker";
version = "2.2.1";
src = fetchurl {
url = "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz";
sha512 = "F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==";
};
};
"is-wsl-2.2.0" = {
name = "is-wsl";
packageName = "is-wsl";
version = "2.2.0";
src = fetchurl {
url = "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz";
sha512 = "fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==";
};
};
"ix-4.6.1" = {
name = "ix";
packageName = "ix";
version = "4.6.1";
src = fetchurl {
url = "https://registry.npmjs.org/ix/-/ix-4.6.1.tgz";
sha512 = "W4aSy2cJxEgPgtr7aNOPNp/gobmWxoNUrMqH4Wowc80FFX71kqtnGMsJnIPiVN9c5tlVbOUNzjhhKcuYxsL1qQ==";
};
};
"json-buffer-3.0.1" = {
name = "json-buffer";
packageName = "json-buffer";
version = "3.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz";
sha512 = "4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==";
};
};
"jsonfile-6.1.0" = {
name = "jsonfile";
packageName = "jsonfile";
version = "6.1.0";
src = fetchurl {
url = "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz";
sha512 = "5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==";
};
};
"keyv-4.5.2" = {
name = "keyv";
packageName = "keyv";
version = "4.5.2";
src = fetchurl {
url = "https://registry.npmjs.org/keyv/-/keyv-4.5.2.tgz";
sha512 = "5MHbFaKn8cNSmVW7BYnijeAVlE4cYA/SVkifVgrh7yotnfhKmjuXpDKjrABLnT0SfHWV21P8ow07OGfRrNDg8g==";
};
};
"lowercase-keys-2.0.0" = {
name = "lowercase-keys";
packageName = "lowercase-keys";
version = "2.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz";
sha512 = "tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==";
};
};
"mimic-response-1.0.1" = {
name = "mimic-response";
packageName = "mimic-response";
version = "1.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz";
sha512 = "j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==";
};
};
"mimic-response-3.1.0" = {
name = "mimic-response";
packageName = "mimic-response";
version = "3.1.0";
src = fetchurl {
url = "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz";
sha512 = "z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==";
};
};
"ms-2.1.2" = {
name = "ms";
packageName = "ms";
version = "2.1.2";
src = fetchurl {
url = "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz";
sha512 = "sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==";
};
};
"node-rsa-1.1.1" = {
name = "node-rsa";
packageName = "node-rsa";
version = "1.1.1";
src = fetchurl {
url = "https://registry.npmjs.org/node-rsa/-/node-rsa-1.1.1.tgz";
sha512 = "Jd4cvbJMryN21r5HgxQOpMEqv+ooke/korixNNK3mGqfGJmy0M77WDDzo/05969+OkMy3XW1UuZsSmW9KQm7Fw==";
};
};
"normalize-url-6.1.0" = {
name = "normalize-url";
packageName = "normalize-url";
version = "6.1.0";
src = fetchurl {
url = "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz";
sha512 = "DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==";
};
};
"once-1.4.0" = {
name = "once";
packageName = "once";
version = "1.4.0";
src = fetchurl {
url = "https://registry.npmjs.org/once/-/once-1.4.0.tgz";
sha512 = "lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==";
};
};
"open-8.4.2" = {
name = "open";
packageName = "open";
version = "8.4.2";
src = fetchurl {
url = "https://registry.npmjs.org/open/-/open-8.4.2.tgz";
sha512 = "7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==";
};
};
"p-cancelable-2.1.1" = {
name = "p-cancelable";
packageName = "p-cancelable";
version = "2.1.1";
src = fetchurl {
url = "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz";
sha512 = "BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==";
};
};
"parse-messy-time-2.1.0" = {
name = "parse-messy-time";
packageName = "parse-messy-time";
version = "2.1.0";
src = fetchurl {
url = "https://registry.npmjs.org/parse-messy-time/-/parse-messy-time-2.1.0.tgz";
sha512 = "zErlfRLhoiJpCYLdEwTXU9bBwc750TK70TzjUA5h0vfLGw3595tj4Dl2LMoUTrEmFWZwtaIcJ/FjGIVv7nx/HA==";
};
};
"pump-3.0.0" = {
name = "pump";
packageName = "pump";
version = "3.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz";
sha512 = "LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==";
};
};
"quick-lru-5.1.1" = {
name = "quick-lru";
packageName = "quick-lru";
version = "5.1.1";
src = fetchurl {
url = "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz";
sha512 = "WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==";
};
};
"reflect-metadata-0.1.13" = {
name = "reflect-metadata";
packageName = "reflect-metadata";
version = "0.1.13";
src = fetchurl {
url = "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.1.13.tgz";
sha512 = "Ts1Y/anZELhSsjMcU605fU9RE4Oi3p5ORujwbIKXfWa+0Zxs510Qrmrce5/Jowq3cHSZSJqBjypxmHarc+vEWg==";
};
};
"resolve-alpn-1.2.1" = {
name = "resolve-alpn";
packageName = "resolve-alpn";
version = "1.2.1";
src = fetchurl {
url = "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz";
sha512 = "0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==";
};
};
"responselike-2.0.1" = {
name = "responselike";
packageName = "responselike";
version = "2.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/responselike/-/responselike-2.0.1.tgz";
sha512 = "4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==";
};
};
"safer-buffer-2.1.2" = {
name = "safer-buffer";
packageName = "safer-buffer";
version = "2.1.2";
src = fetchurl {
url = "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz";
sha512 = "YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==";
};
};
"strip-ansi-4.0.0" = {
name = "strip-ansi";
packageName = "strip-ansi";
version = "4.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz";
sha512 = "4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==";
};
};
"supports-color-5.5.0" = {
name = "supports-color";
packageName = "supports-color";
version = "5.5.0";
src = fetchurl {
url = "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz";
sha512 = "QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==";
};
};
"supports-color-7.2.0" = {
name = "supports-color";
packageName = "supports-color";
version = "7.2.0";
src = fetchurl {
url = "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz";
sha512 = "qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==";
};
};
"tslib-2.5.0" = {
name = "tslib";
packageName = "tslib";
version = "2.5.0";
src = fetchurl {
url = "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz";
sha512 = "336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==";
};
};
"universalify-2.0.0" = {
name = "universalify";
packageName = "universalify";
version = "2.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz";
sha512 = "hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==";
};
};
"villa-0.2.11" = {
name = "villa";
packageName = "villa";
version = "0.2.11";
src = fetchurl {
url = "https://registry.npmjs.org/villa/-/villa-0.2.11.tgz";
sha512 = "ZDQx/lhi8F2FdHWvjv5Ig+VbxPvNROHM11C+RSogiUF69amUhCLoGOYPzj/YhCv8pEUbH3ybasqNPjU4498elQ==";
};
};
"wrappy-1.0.2" = {
name = "wrappy";
packageName = "wrappy";
version = "1.0.2";
src = fetchurl {
url = "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz";
sha512 = "l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==";
};
};
};
in
{
playactor = nodeEnv.buildNodePackage {
name = "playactor";
packageName = "playactor";
version = "0.4.1";
src = fetchurl {
url = "https://registry.npmjs.org/playactor/-/playactor-0.4.1.tgz";
sha512 = "3uNLv5SWeg3dKTSQaP/SOnhUbaGOaENzZd9fJ8RbqKdNmF3TMvAU2s+jd3gH5nErAOxO9KcdLZ9YRMoXomsUSQ==";
};
dependencies = [
sources."@sindresorhus/is-4.6.0"
sources."@szmarczak/http-timer-4.0.6"
sources."@types/cacheable-request-6.0.3"
sources."@types/http-cache-semantics-4.0.1"
sources."@types/keyv-3.1.4"
sources."@types/node-18.16.1"
sources."@types/responselike-1.0.0"
sources."ansi-regex-3.0.1"
sources."ansi-styles-4.3.0"
sources."asn1-0.2.6"
sources."at-least-node-1.0.0"
sources."cacheable-lookup-5.0.4"
sources."cacheable-request-7.0.2"
sources."chalk-4.1.2"
(sources."clime-0.5.14" // {
dependencies = [
sources."ansi-styles-3.2.1"
sources."chalk-2.4.2"
sources."color-convert-1.9.3"
sources."color-name-1.1.3"
sources."has-flag-3.0.0"
sources."supports-color-5.5.0"
];
})
sources."clone-response-1.0.3"
sources."color-convert-2.0.1"
sources."color-name-1.1.4"
sources."debug-4.3.4"
(sources."decompress-response-6.0.0" // {
dependencies = [
sources."mimic-response-3.1.0"
];
})
sources."defer-to-connect-2.0.1"
sources."define-lazy-prop-2.0.0"
sources."end-of-stream-1.4.4"
sources."escape-string-regexp-1.0.5"
sources."extendable-error-0.1.7"
sources."fs-extra-9.1.0"
sources."get-stream-5.2.0"
sources."got-11.8.6"
sources."graceful-fs-4.2.11"
sources."has-flag-4.0.0"
sources."http-cache-semantics-4.1.1"
sources."http2-wrapper-1.0.3"
sources."hyphenate-0.2.5"
sources."is-docker-2.2.1"
sources."is-wsl-2.2.0"
(sources."ix-4.6.1" // {
dependencies = [
sources."@types/node-13.13.52"
];
})
sources."json-buffer-3.0.1"
sources."jsonfile-6.1.0"
sources."keyv-4.5.2"
sources."lowercase-keys-2.0.0"
sources."mimic-response-1.0.1"
sources."ms-2.1.2"
sources."node-rsa-1.1.1"
sources."normalize-url-6.1.0"
sources."once-1.4.0"
sources."open-8.4.2"
sources."p-cancelable-2.1.1"
sources."parse-messy-time-2.1.0"
sources."pump-3.0.0"
sources."quick-lru-5.1.1"
sources."reflect-metadata-0.1.13"
sources."resolve-alpn-1.2.1"
sources."responselike-2.0.1"
sources."safer-buffer-2.1.2"
sources."strip-ansi-4.0.0"
sources."supports-color-7.2.0"
sources."tslib-2.5.0"
sources."universalify-2.0.0"
sources."villa-0.2.11"
sources."wrappy-1.0.2"
];
buildInputs = globalBuildInputs;
meta = {
description = "play around with your playstation devices";
homepage = "https://github.com/dhleong/playactor#readme";
license = "ISC";
};
production = true;
bypassCache = true;
reconstructLock = true;
};
}

View File

@@ -1,12 +0,0 @@
{ buildPythonPackage, fetchPypi }:
buildPythonPackage rec {
pname = "pyelectroluxconnect";
version = "0.3.12";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-g9UxkWuTIqJe0/CDk3kwU3dSmc+GXlfDMxdzu6CqyY0=";
};
doCheck = false;
}

View File

@@ -1,17 +0,0 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-14_x"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

View File

@@ -1,686 +0,0 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
else {
packageObj.devDependencies = {};
}
replaceDependencies(packageObj.optionalDependencies);
replaceDependencies(packageObj.peerDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "reconstructpackagelock.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 2,
requires: true,
packages: {
"": {
name: packageObj.name,
version: packageObj.version,
license: packageObj.license,
bin: packageObj.bin,
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
}
},
dependencies: {}
};
function augmentPackageJSON(filePath, packages, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
packages[filePath] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
};
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, packages, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, packages, dependencies);
}
});
} else {
augmentPackageJSON(filePath, packages, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
# Script that links bins defined in package.json to the node_modules bin directory
# NPM does not do this for top-level packages itself anymore as of v7
linkBinsScript = writeTextFile {
name = "linkbins.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
if(packageObj.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
if(typeof packageObj.bin == "object") {
Object.keys(packageObj.bin).forEach(function(exe) {
if(fs.existsSync(packageObj.bin[exe])) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin[exe]),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
else {
if(fs.existsSync(packageObj.bin)) {
console.log("linking bin '" + packageObj.bin + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin),
path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
);
}
else {
console.log("skipping non-existent bin '" + packageObj.bin + "'");
}
}
}
else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.directories.bin, exe),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
runHook postRebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
fi
# Link executables defined in package.json
node ${linkBinsScript}
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Patch the shebang lines of all the executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
patchShebangs "$file"
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

View File

@@ -1,5 +0,0 @@
[
{ "room-assistant" : "2.20.0" },
{ "@nuxt/opencollective" : "0.3.2" },
{ "canvas": "2.11.0" }
]

File diff suppressed because it is too large Load Diff

View File

@@ -1,33 +0,0 @@
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem}:
let
nodePackages = import ./default.nix {
inherit pkgs system;
};
in
nodePackages // {
"room-assistant-2.20.0" = nodePackages."room-assistant-2.20.0".override {
buildInputs = [
pkgs.pkgconfig
nodePackages."@nuxt/opencollective-0.3.2"
# nodePackages."canvas-2.11.0"
pkgs.bluez
pkgs.libudev-zero
pkgs.nodePackages.node-pre-gyp
pkgs.nodePackages.node-gyp
# canvas stuff
pkgs.cairo
pkgs.pango
pkgs.libpng
pkgs.libuuid
pkgs.libGL
# pkgs.libjpeg_original
pkgs.libjpeg
pkgs.giflib
# pkgs.librsvg
pkgs.netsurf.libsvgtiny
];
};
}

View File

@@ -1,95 +0,0 @@
{ lib, stdenv, fetchurl }:
let
generic = {
version, sha256,
eol ? false, extraVulnerabilities ? []
}: let
major = lib.versions.major version;
in stdenv.mkDerivation rec {
pname = "selfServicePassword";
inherit version;
src = fetchurl {
url = "https://github.com/ltb-project/self-service-password/archive/refs/tags/v${version}.tar.gz";
inherit sha256;
};
installPhase = ''
runHook preInstall
mkdir -p $out/
cp -R . $out/
runHook postInstall
'';
meta = with lib; {
description = "PHP application that allows users to change their password in an LDAP directory.";
homepage = "https://github.com/ltb-project/self-service-password";
license = licenses.agpl3Plus;
platforms = with platforms; unix;
};
};
in {
selfServicePassword = generic {
version = "1.5.2";
sha256 = "dcef404e6b715f16bda71381647af38052a67deef4d387312856596ef131e030";
};
systemd.services = {
# When upgrading the Nextcloud package, Nextcloud can report errors such as
# "The files of the app [all apps in /var/lib/nextcloud/apps] were not replaced correctly"
# Restarting phpfpm on Nextcloud package update fixes these issues (but this is a workaround).
phpfpm-selfservicepassword.restartTriggers = [ selfServicePassword ];
selfservicepassword-setup = let
c = cfg.config;
writePhpArrary = a: "[${concatMapStringsSep "," (val: ''"${toString val}"'') a}]";
overrideConfig = pkgs.writeText "config.inc.local.php" ''
<?php
$lang = "en";
$allowed_lang = array("en", "de");
$show_menu = true;
$header_name_preset_login = "Auth-User";
'';
in {
wantedBy = [ "multi-user.target" ];
before = [ "phpfpm-selfservicepassword.service" ];
script = ''
ln -sf ${overrideConfig} ${datadir}/config/override.config.php
'';
serviceConfig.Type = "oneshot";
serviceConfig.User = "selfservicepassword";
};
};
services.phpfpm = {
pools.selfServicePassword = {
user = "selfservicepassword";
group = "selfservicepassword";
phpPackage = phpPackage;
phpEnv = {
PATH = "/run/wrappers/bin:/nix/var/nix/profiles/default/bin:/run/current-system/sw/bin:/usr/bin:/bin";
};
settings = mapAttrs (name: mkDefault) {
"listen.owner" = config.services.nginx.user;
"listen.group" = config.services.nginx.group;
};
extraConfig = cfg.poolConfig;
};
};
users.users.selfservicepassword = {
home = "${cfg.home}";
group = "selfservicepassword";
isSystemUser = true;
};
users.groups.selfservicepassword.members = [ "selfservicepassword" config.services.nginx.user ];
services.nginx.enable = mkDefault true;
services.nginx.virtualHosts.${cfg.hostName} = {
root = cfg.package;
locations = {};
};
}

View File

@@ -1,28 +0,0 @@
{ stdenv
, makeWrapper
, rspamd
,
}:
stdenv.mkDerivation {
name = "sieve-rspamd-filter";
nativeBuildInputs = [ makeWrapper ];
src = ./src;
installPhase = ''
for sieve in $src/*.sieve; do
install -D "$sieve" "$out/share/sieve-rspamd-filter/$(basename $sieve)"
done
mkdir $out/bin
cat > $out/bin/learn-spam.sh <<'EOF'
#!/bin/sh
exec ${rspamd}/bin/rspamc -h /run/rspamd.sock learn_spam
EOF
cat > $out/bin/learn-ham.sh <<'EOF'
#!/bin/sh
exec ${rspamd}/bin/rspamc -h /run/rspamd.sock learn_ham
EOF
chmod +x $out/bin/*.sh
'';
}

View File

@@ -1,5 +0,0 @@
require ["fileinto"];
if header :is "X-Spam" "Yes" {
fileinto "Spam";
}

View File

@@ -1,15 +0,0 @@
require ["vnd.dovecot.pipe", "copy", "imapsieve", "environment", "variables"];
if environment :matches "imap.mailbox" "*" {
set "mailbox" "${1}";
}
if string "${mailbox}" "Trash" {
stop;
}
if environment :matches "imap.user" "*" {
set "username" "${1}";
}
pipe :copy "learn-ham.sh" [ "${username}" ];

View File

@@ -1,7 +0,0 @@
require ["vnd.dovecot.pipe", "copy", "imapsieve", "environment", "variables"];
if environment :matches "imap.user" "*" {
set "username" "${1}";
}
pipe :copy "learn-spam.sh" [ "${username}" ];

View File

@@ -1,65 +0,0 @@
{ stdenv
, lib
, fetchurl
, dpkg
, libredirect
, makeWrapper
, gzip
, fuse
, lsb-release
, rsync
, iptables
, jq
, kmod
}:
stdenv.mkDerivation rec {
pname = "sysbox";
version = "0.6.2-0";
src = fetchurl {
url = "https://downloads.nestybox.com/sysbox/releases/v0.6.2/sysbox-ce_${version}.linux_amd64.deb";
sha256 = "sha256-/Sh/LztaBytiw3j54e7uqizK0iu0jLOB0w2MhVxRtAE=";
};
nativeBuildInputs = [ dpkg makeWrapper ];
unpackPhase = ''
runHook preUnpack
dpkg -x $src ./src
runHook postUnpack
'';
installPhase = ''
runHook preInstall
mkdir -p "$out"
cp -r src/* "$out"
mv "$out/usr/"* "$out/"
rmdir "$out/usr"
for f in "$out/lib/systemd/system/"*.service; do
substituteInPlace "$f" \
--replace "/usr/" "$out/"
done
for p in "$out/bin/"*; do
wrapProgram "$p" \
--set NIX_REDIRECTS "/usr/share=$out/share:/usr/bin=$out/bin" \
--prefix PATH : "${lib.makeBinPath [ fuse rsync iptables lsb-release jq kmod ]}"
done
runHook postInstall
'';
meta = with lib; {
description = "Improves container isolation";
homepage = "https://github.com/nestybox/sysbox";
license = licenses.asl20;
platforms = with platforms; [ "x86_64-linux" ];
mainProgram = "sysbox-runc";
};
}

View File

@@ -1,205 +0,0 @@
# Update instructions:
#
# To update `thunderbird-bin`'s `release_sources.nix`, run from the nixpkgs root:
#
# nix-shell maintainers/scripts/update.nix --argstr package pkgs.thunderbird-bin-unwrapped
{ lib, stdenv, fetchurl, config, wrapGAppsHook
, alsa-lib
, atk
, cairo
, curl
, cups
, dbus-glib
, dbus
, fontconfig
, freetype
, gdk-pixbuf
, glib
, glibc
, gtk2
, gtk3
, libkrb5
, libX11
, libXScrnSaver
, libxcb
, libXcomposite
, libXcursor
, libXdamage
, libXext
, libXfixes
, libXi
, libXinerama
, libXrender
, libXrandr
, libXt
, libXtst
, libcanberra
, libnotify
, adwaita-icon-theme
, libGLU, libGL
, nspr
, nss_latest
, pango
, pipewire
, pciutils
, heimdal
, libpulseaudio
, systemd
, writeScript
, writeText
, xidel
, coreutils
, gnused
, gnugrep
, gnupg
, ffmpeg
, runtimeShell
, mesa # thunderbird wants gbm for drm+dmabuf
, systemLocale ? config.i18n.defaultLocale or "en_US"
, generated
}:
let
mozillaPlatforms = {
i686-linux = "linux-i686";
x86_64-linux = "linux-x86_64";
};
policies = { DisableAppUpdate = true; } // config.thunderbird.policies or { };
policiesJson = writeText "thunderbird-policies.json" (builtins.toJSON { inherit policies; });
mozLocale =
if systemLocale == "ca_ES@valencia"
then "ca-valencia"
else lib.replaceStrings ["_"] ["-"] systemLocale;
version = "112.0b7";
in
with import <nixpkgs> {};
stdenv.mkDerivation {
pname = "thunderbird-bin";
inherit version;
src = fetchurl {
url = "https://download-installer.cdn.mozilla.net/pub/thunderbird/releases/${version}/linux-x86_64/en-US/thunderbird-${version}.tar.bz2";
};
libPath = lib.makeLibraryPath
[ stdenv.cc.cc
alsa-lib
atk
cairo
curl
cups
dbus-glib
dbus
fontconfig
freetype
gdk-pixbuf
glib
glibc
gtk2
gtk3
libkrb5
mesa
libX11
libXScrnSaver
libXcomposite
libXcursor
libxcb
libXdamage
libXext
libXfixes
libXi
libXinerama
libXrender
libXrandr
libXt
libXtst
libcanberra
libnotify
libGLU libGL
nspr
nss_latest
pango
pipewire
pciutils
heimdal
libpulseaudio
systemd
ffmpeg
] + ":" + lib.makeSearchPathOutput "lib" "lib64" [
stdenv.cc.cc
];
inherit gtk3;
nativeBuildInputs = [ wrapGAppsHook ];
buildInputs = [ gtk3 adwaita-icon-theme ];
# "strip" after "patchelf" may break binaries.
# See: https://github.com/NixOS/patchelf/issues/10
dontStrip = true;
dontPatchELF = true;
patchPhase = ''
# Don't download updates from Mozilla directly
echo 'pref("app.update.auto", "false");' >> defaults/pref/channel-prefs.js
'';
# See "Note on GPG support" in `../thunderbird/default.nix` for explanations
# on adding `gnupg` and `gpgme` into PATH/LD_LIBRARY_PATH.
installPhase =
''
mkdir -p "$prefix/usr/lib/thunderbird-bin-${version}"
cp -r * "$prefix/usr/lib/thunderbird-bin-${version}"
mkdir -p "$out/bin"
ln -s "$prefix/usr/lib/thunderbird-bin-${version}/thunderbird" "$out/bin/"
for executable in \
thunderbird thunderbird-bin plugin-container \
updater crashreporter webapprt-stub
do
if [ -e "$out/usr/lib/thunderbird-bin-${version}/$executable" ]; then
patchelf --interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" \
"$out/usr/lib/thunderbird-bin-${version}/$executable"
fi
done
find . -executable -type f -exec \
patchelf --set-rpath "$libPath" \
"$out/usr/lib/thunderbird-bin-${version}/{}" \;
# wrapThunderbird expects "$out/lib" instead of "$out/usr/lib"
ln -s "$out/usr/lib" "$out/lib"
gappsWrapperArgs+=(--argv0 "$out/bin/.thunderbird-wrapped")
# See: https://github.com/mozilla/policy-templates/blob/master/README.md
mkdir -p "$out/lib/thunderbird-bin-${version}/distribution";
ln -s ${policiesJson} "$out/lib/thunderbird-bin-${version}/distribution/policies.json";
'';
passthru.updateScript = import ./../../browsers/firefox-bin/update.nix {
inherit writeScript xidel coreutils gnused gnugrep curl gnupg runtimeShell;
pname = "thunderbird-bin";
baseName = "thunderbird";
channel = "release";
basePath = "pkgs/applications/networking/mailreaders/thunderbird-bin";
baseUrl = "http://archive.mozilla.org/pub/thunderbird/releases/";
};
meta = with lib; {
changelog = "https://www.thunderbird.net/en-US/thunderbird/${version}/releasenotes/";
description = "Mozilla Thunderbird, a full-featured email client (binary package)";
homepage = "http://www.mozilla.org/thunderbird/";
sourceProvenance = with sourceTypes; [ binaryNativeCode ];
license = licenses.mpl20;
maintainers = with lib.maintainers; [ lovesegfault ];
platforms = builtins.attrNames mozillaPlatforms;
hydraPlatforms = [ ];
};
}

View File

@@ -1,14 +0,0 @@
{ pkgs, lib, stdenv, makeWrapper }:
stdenv.mkDerivation {
name = "wow-addon-manager";
src = ./src;
nativeBuildInputs = [ makeWrapper ];
installPhase = ''
mkdir -p $out/bin
mkdir -p $out/share
install -Dm555 wow-addon-manager $out/bin/
install -Dm444 addons.list.sample $out/share/
install -Dm444 addons.classic.list.sample $out/share/
'';
}

View File

@@ -1,36 +0,0 @@
https://github.com/RagedUnicorn/wow-vanilla-gearmenu.git
https://wowinterface.com/downloads/info15636-tullaRange.html
https://www.curseforge.com/wow/addons/bagsync/download
https://www.curseforge.com/wow/addons/quest_completist/download
http://www.curseforge.com/wow/addons/mmz/download
http://www.curseforge.com/wow/addons/clique/download
http://www.curseforge.com/wow/addons/grail/download
http://www.curseforge.com/wow/addons/tomtom/download
https://github.com/TekNoLogic/VendorBait.git
https://github.com/ColbyWanShinobi/gsReloadUI.git
https://github.com/ColbyWanShinobi/gsNoGryphons.git
https://github.com/ColbyWanShinobi/gsQuestSounds.git
https://www.curseforge.com/wow/addons/tradeskill-master/download
https://www.curseforge.com/wow/addons/monkey-speed/download
https://www.curseforge.com/wow/addons/cursortrail/download
http://wowinterface.com/downloads/info12995-NPCScan.html
https://www.curseforge.com/wow/addons/advancedinterfaceoptions/download
https://www.curseforge.com/wow/addons/weaponswingtimer/download
https://www.curseforge.com/wow/addons/azeroth-auto-pilot-classic/download
https://www.curseforge.com/wow/addons/details/download
https://www.curseforge.com/wow/addons/big-wigs/download
https://www.curseforge.com/wow/addons/bartender4/download
https://www.curseforge.com/wow/addons/little-wigs/download
https://www.curseforge.com/wow/addons/omni-cc/download
https://www.curseforge.com/wow/addons/questie/download
https://www.curseforge.com/wow/addons/atlaslootclassic/download
https://www.curseforge.com/wow/addons/mapster/download
https://www.curseforge.com/wow/addons/vendor-price/download
https://www.curseforge.com/wow/addons/leatrix-plus-classic/download
https://www.curseforge.com/wow/addons/inventorian/download
https://www.curseforge.com/wow/addons/bagnon/download
https://www.wowinterface.com/downloads/info25006-ClassicAuraDurations.html
https://www.wowinterface.com/downloads/info24958-AuctionatorClassicquickfix.html
https://www.wowinterface.com/downloads/info25036-DruidBarClassic.html
https://www.curseforge.com/wow/addons/outfitter/download
https://wowinterface.com/downloads/info24944-WeakAuras2Classic.html

View File

@@ -1,17 +0,0 @@
https://wowinterface.com/downloads/info15636-tullaRange.html
https://addon.theunderminejournal.com/TheUndermineJournal.zip
https://github.com/TekNoLogic/VendorBait.git
https://github.com/ColbyWanShinobi/gsReloadUI.git
https://github.com/ColbyWanShinobi/gsNoGryphons.git
https://github.com/ColbyWanShinobi/gsQuestSounds.git
http://wowinterface.com/downloads/info12995-NPCScan.html
https://wowinterface.com/downloads/info5108-Clique.html
https://wowinterface.com/downloads/info20804-Grail.html
https://wowinterface.com/downloads/info7032-TomTom.html
https://wowinterface.com/downloads/info24910-WeakAuras2.html
https://wowinterface.com/downloads/info7296-Pawn.html
https://wowinterface.com/downloads/info20805-Wholly.html
https://wowinterface.com/downloads/info24802-ChampionCommander.html
https://www.wowinterface.com/downloads/info25313-Dejunk.html
https://www.wowinterface.com/downloads/info10089-ItemID.html
https://www.wowinterface.com/downloads/info24508-ImprovedNameplates.html

View File

@@ -1,264 +0,0 @@
#!/bin/bash
set -e
function getAddonProvider {
#echo "Finding Addon Provider for URL: ${GREEN}$1${CRESET}"
local PROVIDER="$(echo $1 | grep -E -o '\w+\.com')"
echo $PROVIDER
#PROVIDER="$(echo $1 | grep -E -o 'w+.com')"
#echo $PROVIDER
}
function printList {
ADDONCOUNT=0
for i in "${ADDONS[@]}";
do
echo "$ADDONCOUNT - $i"
ADDONCOUNT=$((ADDONCOUNT + 1))
done
exit
}
function parseFileName {
local FILENAME="$(echo $1 | grep -E -o '[^\/]+$')"
echo "$FILENAME"
}
function parseCurseFileNameFromListURL {
local FILENAME="$(echo $1 | grep -E -o 'addons/.+/download' | cut -f2 -d'/')"
echo "${FILENAME}.zip"
}
function parseDirName {
local DIRNAME="$(echo $1 | sed -E 's/.{4}$/ /g')"
}
function parseAddonDirName {
echo "parse!"
#Get the name of the addon directory from the zip file
#local ADDONDIR="$(unzip -l /tmp/$ZFILE | grep -E -o ' \w+\/' | sort | uniq | grep -E -o '\w+')"
#echo "Searching Addon archive and found directory named: ${GREEN}$ADDONDIR${CRESET}"
}
function dlCurseAddon {
echo "Updating Addon from curseforge.com..."
#Get the URL to download the file
local DOMAIN="https://www.curseforge.com"
local CURSELINK="$(wget --random-wait -q $1 -O - | grep -i "If your download" | grep -E -o 'href=\".+\"' | cut -f2 -d'"')"
echo "CurseLink: ${GREEN}$CURSELINK${CRESET}"
local DLURL="${DOMAIN}${CURSELINK}"
local DLURL="https://mediafilez.forgecdn.net/files/4750/139/Sage-2.11.zip"
#if [ "$DLURL" != '' ]
#then
echo "Download URL: ${GREEN}$DLURL${CRESET}"
#Get the name of the file itself
local ZFILE=$(parseCurseFileNameFromListURL "$DLURL")
echo "Zip File: ${GREEN}$ZFILE${CRESET}"
#Get the name of just the zip file
local ZDIRNAME=$(parseDirName $ZFILE)
#Remove the temp dir if it exists
rm -rf /tmp/CoS/tmpAddon
#Re-create the dir
mkdir -p /tmp/CoS/tmpAddon
#Download the file
echo "Downloading file: ${GREEN}$DLURL${CRESET}"
cd /tmp/CoS
wget --random-wait -N -O ${ZFILE} "$DLURL"
#Unzip the file to a temp directory
ZDIRNAME=tmpCurseDl
echo "Unzipping file: ${GREEN}/tmp/$ZFILE${CRESET} to ${GREEN}/tmp/$ZDIRNAME${CRESET}"
unzip -o "/tmp/CoS/$ZFILE" -d /tmp/CoS/tmpAddon
#Copy only new files into the Addon directory
rsync -hvrPt /tmp/CoS/tmpAddon/ "$ADDONPATH"
#else
#echo "Download failed for: $1"
#fi
}
function dlIndy {
echo "Updating Independent Addon..."
#Get the URL to download the file
local DLURL=$1
echo "Download URL: ${GREEN}$DLURL${CRESET}"
#Get the name of the file itself
local ZFILE=$(parseFileName "$DLURL")
echo "Zip File: ${GREEN}$ZFILE${CRESET}"
#Get the name of just the zip file
local ZDIRNAME=$(parseDirName $ZFILE)
#Remove the temp dir if it exists
rm -rf /tmp/CoS/tmpAddon
#Re-create the dir
mkdir -p /tmp/CoS/tmpAddon
#Download the file
echo "Downloading file: ${GREEN}$DLURL${CRESET}"
cd /tmp/CoS
wget --random-wait -N $DLURL
#Unzip the file to a temp directory
ZDIRNAME=tmpCurseDl
echo "Unzipping file: ${GREEN}/tmp/$ZFILE${CRESET} to ${GREEN}/tmp/$ZDIRNAME${CRESET}"
unzip -o "/tmp/CoS/$ZFILE" -d /tmp/CoS/tmpAddon
#Copy only new files into the Addon directory
rsync -hvrPt /tmp/CoS/tmpAddon/ "$ADDONPATH"
}
function dlGitAddon {
echo "Updating Addon using git..."
#Get the URL to download the file
local DLURL=${1}
echo "Download URL: ${GREEN}${DLURL}${CRESET}"
#Get the name of just the zip file
local GDIRNAME=$(echo ${DLURL} | grep -E -o '[-[:alnum:]]+.git' | cut -f1 -d.)
if [ -d "${ADDONPATH}/${GDIRNAME}" ]
then
#Is this a healthy git folder?
if [ -d "${ADDONPATH}/${GDIRNAME}/.git" ]
then
echo "pull from healthy git directory (${ADDONPATH}/${GDIRNAME}) for : ${GREEN}$GDIRNAME${CRESET}"
git -C "${ADDONPATH}/${GDIRNAME}" pull ${DLURL}
else
echo "Removing git directory (${ADDONPATH}/${GDIRNAME}) for : ${GREEN}${GDIRNAME}${CRESET}"
rm -rfv "${ADDONPATH}/${GDIRNAME}"
echo "Cloning from git repository for : ${GREEN}${GDIRNAME}${CRESET}"
git -C "${ADDONPATH}" clone ${DLURL}
fi
else
echo "Removing git directory (${ADDONPATH}/${GDIRNAME}) for : ${GREEN}${GDIRNAME}${CRESET}"
rm -rfv "${ADDONPATH}/${GDIRNAME}"
echo "Cloning from git repository for : ${GREEN}${GDIRNAME}${CRESET}"
git -C "${ADDONPATH}" clone ${DLURL}
fi
if [ ! -f "${ADDONPATH}/${GDIRNAME}/${GDIRNAME}.toc" ]
then
for dir in "${ADDONPATH}/${GDIRNAME}"/*/
do
dir=${dir%*/}
cp -r "${dir}" "${ADDONPATH}"
done
fi
}
function dlWowIAddon {
echo "Updating Addon from wowinterface.com..."
#Get the URL to download the file
local DLURL="https://www.wowinterface.com/downloads/getfile.php?id=$(wget --random-wait -q $1 -O - | grep landing | grep -E -o 'fileid=[[:digit:]]+' | uniq | cut -f2 -d=)"
echo "Download URL: ${GREEN}$DLURL${CRESET}"
#Get the name of the file itself
local ZFILE=$(curl -Is $DLURL | grep content-disposition | cut -f2 -d\")
echo "Zip File: ${GREEN}$ZFILE${CRESET}"
#Get the name of just the zip file
local ZDIRNAME=$(parseDirName $ZFILE)
#Remove the temp dir if it exists
rm -rf /tmp/CoS/tmpAddon
#Re-create the dir
mkdir -p /tmp/CoS/tmpAddon
#Download the file
echo "Downloading file: ${GREEN}$DLURL${CRESET}"
cd /tmp/CoS
wget --content-disposition --random-wait -N $DLURL
#Unzip the file to a temp directory
ZDIRNAME=tmpCurseDl
echo "Unzipping file: ${GREEN}/tmp/$ZFILE${CRESET} to ${GREEN}/tmp/$ZDIRNAME${CRESET}"
unzip -o "/tmp/CoS/$ZFILE" -d /tmp/CoS/tmpAddon
#Copy only new files into the Addon directory
rsync -hvrPt /tmp/CoS/tmpAddon/ "$ADDONPATH"
}
function dlAddon {
echo "Finding Addon Provider for URL: ${GREEN}$1${CRESET}"
PROVIDER=$(getAddonProvider $1)
echo "Found Provider: ${GREEN}$PROVIDER${CRESET}"
if [ "$PROVIDER" == "curseforge.com" ]
then
dlCurseAddon $1
elif [ "$PROVIDER" == "wowinterface.com" ]
then
dlWowIAddon $1
elif [ "$PROVIDER" == "github.com" ]
then
dlGitAddon $1
else
dlIndy $1
fi
}
REMEMBERPATH="$(pwd)"
SCRIPTDIR="$(echo $0 | sed 's/\/cullingOfStratholme.sh//g')"
ADDONLIST=addon.list
ADDONPATH="/home/dominik/.local/share/Steam/steamapps/compatdata/3525601306/pfx/drive_c/Program Files (x86)/World of Warcraft/_retail_/Interface/AddOns"
if [ "$1" == "classic" ]
then
echo "Install mods for classic..."
ADDONLIST=addon.classic.list
ADDONPATH="/home/dominik/.local/share/Steam/steamapps/compatdata/3525601306/pfx/drive_c/Program Files (x86)/World of Warcraft/_classic_era_/Interface/AddOns"
echo ${ADDONLIST}
echo ${ADDONPATH}
fi
mkdir -p "${ADDONPATH}"
ALFULL=/etc/wow-addon-manager/$ADDONLIST
#Check to see if the text file exists
if [ ! -f $ALFULL ]
then
echo "Could not find file: $ADDONLIST"
fi
declare -a ADDONS
ADDONCOUNT=0
while IFS= read -r f || [ -n "${f}" ]
do
ADDONS[$ADDONCOUNT]=$f
ADDONCOUNT=$(($ADDONCOUNT + 1))
done < $ALFULL
if [ "$1" == "list" ]
then
printList
cd ${REMEMBERPATH}
exit
fi
if [ "$1" == "test" ]
then
ADDONURL=${ADDONS[$1]}
dlAddon $ADDONURL
cd ${REMEMBERPATH}
exit
fi
for i in "${ADDONS[@]}";
do
dlAddon $i
done
cd ${REMEMBERPATH}