Initial commit

This commit is contained in:
Peder Bergebakken Sundt 2022-10-17 22:40:48 +02:00
commit a6a0b16300
25 changed files with 10884 additions and 0 deletions

9
.editorconfig Normal file
View File

@ -0,0 +1,9 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
[*.nix]
indent_size = 2
indent_style = space

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
result

12
README.md Normal file
View File

@ -0,0 +1,12 @@
# Initial setup
```
nixos-generate-config
```
# TODO:
* [ ] Multiple user profiles, headless, nixpkgs-dev, desktop, hpc, pvv, etc
* [ ] Split stuff into multiple files
* [ ] Some system for multiple hosts with different configs
* [ ] Make a flake

498
configuration.nix Normal file
View File

@ -0,0 +1,498 @@
{ config, pkgs, lib, ... }:
{
#nixpkgs.overlays = overlays;
nixpkgs.config.allowUnfreePredicate = (pkg: true);
nixpkgs.config.allowUnfree = true;
system.autoUpgrade.enable = true; # daily nixos-rebuild switch, no reboot by default
nix.distributedBuilds = true;
# useful when the builder has a faster internet connection than i do
nix.extraOptions = ''
experimental-features = nix-command flakes
builders-use-substitutes = true
'';
nix.buildMachines = [
/**/
{
system = "x86_64-linux"; # can be a list
hostName = "rocm.pbsds.net";
sshUser = "pbsds";
maxJobs = 2;
speedFactor = 2;
#supportedFeatures = [ "nixos-test" "benchmark" "big-parallel" "kvm" ];
#mandatoryFeatures = [ ];
}
/**/
/**/
{
system = "x86_64-linux"; # can be a list
hostName = "isvegg.pvv.ntnu.no";
sshUser = "pederbs";
maxJobs = 1;
speedFactor = 1;
#supportedFeatures = [ "nixos-test" "benchmark" "big-parallel" "kvm" ];
#mandatoryFeatures = [ ];
}
/**/
];
# deduplicate with hardlinks, expensive. Alternative: nix-store --optimise
nix.settings.auto-optimise-store = true;
#nix.optimize.automatic = true; # periodic optimization
nix.gc = {
automatic = true;
dates = "weekly";
options = "--delete-older-than 30d";
};
# How to override package used by module
# https://github.com/NixOS/nixpkgs/issues/55366
imports = [
./hardware-configuration.nix # results of hardware scan
./profiles/nas # add NAS services
./profiles/websites
./profiles/code-remote
./users
<nixos-unstable/nixos/modules/services/misc/jellyfin.nix>
<nixos-unstable/nixos/modules/services/web-apps/invidious.nix>
];
disabledModules = [
"services/misc/jellyfin.nix"
"services/web-apps/invidious.nix"
];
services.jellyfin.package = pkgs.unstable.jellyfin;
services.invidious.package = pkgs.unstable.invidious;
nixpkgs.overlays = [
(import ./overlays)
];
# Allow unstable packages.
nixpkgs.config.packageOverrides = pkgs: {
unstable = import <nixos-unstable> {
config = config.nixpkgs.config;
};
};
# enable opengl (headless)
hardware.opengl.enable = true;
#hardware.opengl.extraPackages = [ pkgs.mesa.drivers ];
hardware.opengl.extraPackages = with pkgs; [ mesa.drivers vaapiIntel libvdpau-va-gl vaapiVdpau intel-ocl ];
# run/build weird binaries
boot.binfmt.emulatedSystems = [
"wasm32-wasi"
"x86_64-windows"
"aarch64-linux"
"riscv64-linux"
];
# Bootloader
boot.loader.grub.enable = true;
boot.loader.grub.device = "/dev/sda";
boot.loader.grub.useOSProber = true;
# Virtualization
#services.docker.enable = true;
virtualisation = {
podman.enable = true;
podman.dockerCompat = true; # alias docker to podman
oci-containers.backend = "podman";
};
# Networking
networking = {
# Enable networking
networkmanager.enable = true;
#wireless.enable = true; # Enables wireless support via wpa_supplicant.
hostName = "noximilien"; # Define your hostname.
domain = "pbsds.net";
interfaces.eno1.ipv4.addresses = [
{ address = "192.168.1.9"; prefixLength = 24; }
];
nameservers = [
"192.168.1.254"
"8.8.8.8"
];
defaultGateway = {
address = "192.168.1.254";
interface = "eno1";
};
#useDHCP = true;
# Configure network proxy if necessary
#proxy.default = "http://user:password@proxy:port/";
#proxy.noProxy = "127.0.0.1,localhost,internal.domain";
};
# Open ports in the firewall.
#networking.firewall.allowedTCPPorts = [ ... ];
#networking.firewall.allowedUDPPorts = [ ... ];
# Or disable the firewall altogether.
networking.firewall.enable = false; # default is true, TEMP
# NFS mounts
fileSystems = let
mkMount = mountpoint: server: subdir: {
"${mountpoint}${subdir}" = {
device = "${server}${subdir}";
fsType = "nfs";
#options = [ "nfsvers=4.2" ];
};
};
# TODO: combine nameValuePair and listToAttrs
joinSets = sets: builtins.foldl' (l: r: l // r) {} sets;
in joinSets (
(map (mkMount "/mnt/reidun" "192.168.1.3:/Reidun/shared") [
""
"/Backups"
"/Comics"
"/Downloads"
"/Games"
"/Games/Installable"
"/Games/Portable"
"/Games/ROMs"
"/ISO"
"/Images"
"/Images/Collections"
"/Images/Memes"
"/Images/Pictures"
"/Images/Wallpapers"
"/Music"
"/Music/Albums"
"/Music/Kancolle"
"/Music/OST"
"/Music/Old"
"/Music/Touhou"
"/Music/Vocaloid"
"/Music/dojin.co"
"/Various"
"/Various/Zotero"
"/Various/resilio"
"/Video"
"/Video/Anime"
"/Video/Concerts"
"/Video/Documentaries"
"/Video/Movies"
"/Video/Musicvideos"
"/Video/Series"
"/Video/Talks"
"/Work"
"/Work/Documents"
#"/Work/FL Studio" # broken, maybe due to the space?
"/Work/Programming"
"/Work/School"
"/pub"
]) ++ (map (mkMount "/mnt/meconium" "192.168.1.3:/Meconium" ) [
""
"/beets_music"
])
);
# Time zone and internationalisation properties.
time.timeZone = "Europe/Oslo";
i18n.defaultLocale = "en_US.utf8";
i18n.extraLocaleSettings = {
LC_ADDRESS = "nb_NO.utf8";
LC_IDENTIFICATION = "nb_NO.utf8";
LC_MEASUREMENT = "nb_NO.utf8";
LC_MONETARY = "nb_NO.utf8";
LC_NAME = "nb_NO.utf8";
LC_NUMERIC = "nb_NO.utf8";
LC_PAPER = "nb_NO.utf8";
LC_TELEPHONE = "nb_NO.utf8";
LC_TIME = "nb_NO.utf8";
};
services.xserver = {
# Configure X11 keymap
layout = "no";
xkbVariant = "";
};
console.keyMap = "no";# Configure console keymap
# Installed system packages
# $ nix search FOOBAR
environment.systemPackages = with pkgs; [
lsof
lshw
htop
file
tmux
#parallel # already provided by moreutils
pwgen
git
nmap
rsync
bind.dnsutils
graphviz
dialog
cowsay
gnused
gnumake
coreutils-full
moreutils
binutils
diffutils
findutils
usbutils
bash-completion
curl
wget
strace
zip
unrar
unzip
atool
p7zip
bzip2
gzip
atool
micro
aspell
aspellDicts.en
aspellDicts.nb
vimv
dos2unix
#rmate # TODO: add to nixpkgs
pandoc
cargo
cargo-edit
sqlite
#sshuttle
visidata
weston
cage
vimix-gtk-themes
flat-remix-icon-theme
xclip
feh
sshfs
glances
zenith
fzf
tealdeer #tldr
entr
axel aria
bat
xe # xargs alternative
sd # sed alternative
fd # find alternative
silver-searcher # `ag`
ripgrep
jq
yq
htmlq
sysz
du-dust # du alternative
ncdu # Disk usage analyzer with an ncurses interface
gh
hub
nix-output-monitor
nix-prefetch
nix-top
#nix-index
nix-tree
nixfmt
alejandra
];
# TODO: make this root only?
programs.bash.shellInit = ''
if command -v fzf-share >/dev/null; then
source "$(fzf-share)/key-bindings.bash"
source "$(fzf-share)/completion.bash"
fi
'';
# TODO: make this root only?
programs.bash.shellAliases = {
ed = "micro"; # TODO: ${EDITOR:-micro}
};
environment.variables = {
EDITOR = "micro";
};
programs.dconf.enable = true;
# System fonts
# Nice to have when X-forwading on headless machines
fonts.fonts = with pkgs; [
noto-fonts # includes Cousine
noto-fonts-cjk
noto-fonts-emoji
noto-fonts-extra
dejavu_fonts
];
# Some programs need SUID wrappers, can be configured further or are
# started in user sessions.
#programs.mtr.enable = true;
#programs.gnupg.agent = {
# enable = true;
# enableSSHSupport = true;
#};
# OpenSSH
services.openssh.enable = true;
services.openssh.forwardX11 = true;
# AutoSSH reverse tunnels
services.autossh.sessions = let
mkSshSession = user: name: host: rport: monitoringPort: {
user = user; # local user
name = "ssh-reverse-tunnel-${name}-${toString rport}";
monitoringPort = monitoringPort;
extraArguments = lib.concatStringsSep " " [
"-N" # no remote command
"-o ServerAliveInterval=10" # check if still alive
"-o ServerAliveCountMax=3" # check if still alive
"-o ExitOnForwardFailure=yes" # reverse tunnel critical
"-R ${toString rport}:127.0.0.1:22" # reverse tunnel
host
];
};
in [
#(mkSshSession "pbsds" "p7pi" "pi@p7.pbsds.net" 10023 20000) # no mutual signature algorithm
(mkSshSession "pbsds" "pbuntu" "pbsds@pbuntu.pbsds.net -p 23" 10023 20002)
(mkSshSession "pbsds" "hildring" "pederbs@hildring.pvv.ntnu.no" 25775 20004)
];
# auto domain update
# TODO: use the dyndns endpoint + curl instead
/**/
systemd.services.domeneshop-updater = {
description = "domene.shop domain updater";
#after = [ "something?.service" ];
#wants = [ "something?.service" ];
serviceConfig = let
env = pkgs.python3.withPackages (ps: with ps; [ pkgs.python3Packages.domeneshop httpx toml ]);
prog = pkgs.writeScript "domain-updater.py" ''
#!${env}/bin/python
from domeneshop import Client
import os, httpx, pprint, toml
def get_pub_ip() -> str:
for endpoint, getter in {
"http://myip.tf": lambda resp: resp.text,
"https://ipinfo.io/json": lambda resp: resp.json()["ip"],
"https://api.ipify.org": lambda resp: resp.text,
"http://ip.42.pl/raw": lambda resp: resp.text,
}.items():
resp = httpx.get(endpoint)
if not resp.is_success: continue
try:
return resp.json()["ip"]
except:
pass
else:
raise Exception("Could not find external IP")
# https://www.domeneshop.no/admin?view=api
with open("/var/lib/secrets/domeneshop.toml") as f:
c = toml.load(f)
DOMENESHOP_TOKEN = os.environ.get("DOMENESHOP_TOKEN", c["secrets"]["DOMENESHOP_TOKEN"])
DOMENESHOP_SECRET = os.environ.get("DOMENESHOP_SECRET", c["secrets"]["DOMENESHOP_SECRET"])
IP_ADDRESS = get_pub_ip() # TODO: both ipv4 and ipv6
DOMAINS = {
"pbsds.net": {
"olavtr": ["A"],
},
}
client = Client(DOMENESHOP_TOKEN, DOMENESHOP_SECRET)
for domain in client.get_domains():
if domain["domain"] not in DOMAINS:
continue
RECORDS = DOMAINS[domain["domain"]]
for record in client.get_records(domain["id"]):
if record["host"] in RECORDS \
and record["type"] in RECORDS[record["host"]]:
print("Found: ", end="")
pprint.pprint(record)
if record["data"] != IP_ADDRESS:
record["data"] = IP_ADDRESS
print("Push: ", end="")
pprint.pprint(record)
client.modify_record(domain_id=domain["id"], record_id=record.pop("id"), record=record)
else:
print("Nothing done")
RECORDS[record["host"]].remove(record["type"])
for k, v in list(RECORDS.items()):
if not v: RECORDS.pop(k)
if not RECORDS: DOMAINS.pop(domain["domain"])
if DOMAINS:
print("ERROR: The following records were not found:")
pprint.pprint(DOMAINS)
exit(1)
else:
print("Success")
'';
in {
User = "domeneshop";
Group = "domeneshop";
DynamicUser = true;
ExecStart = prog;
PrivateTmp = true;
};
};
systemd.timers.domeneshop-updater = let interval = "1d"; in {
description = "Update domene.shop every ${interval}";
wantedBy = [ "timers.target" ];
timerConfig = {
OnBootSec = "5m";
OnUnitInactiveSec = interval;
Unit = "domeneshop-updater.service";
};
};
/**/
# This value determines the NixOS release from which the default
# settings for stateful data, like file locations and database versions
# on your system were taken. Its perfectly fine and recommended to leave
# this value at the release version of the first install of this system.
# Before changing this value read the documentation for this option
# (e.g. man configuration.nix or on https://nixos.org/nixos/options.html).
system.stateVersion = "22.05"; # Did you read the comment?
}

51
overlays/default.nix Normal file
View File

@ -0,0 +1,51 @@
# https://nixos.wiki/wiki/Overlays
self: super: # final: prev:
let
# WARNING: this works for nixos-rebuild, but not for the nix-build trick shown on the bottom
testing = import (fetchTarball {
name = "pr-180823";
url = "https://github.com/r-ryantm/nixpkgs/archive/cfe56470cb641985d43adba690d5bca5453110fe.tar.gz";
sha256 = "0rbncjp2a99l6i4z7w2m86l40m33b3dl9qficfny47kqcfpgyx0b";
}) {
#config = super.config;
};
overridePythonPackages = old: {
overrides = self: super: {
pdoc = self.callPackage /home/pbsds/repos/nixpkgs-pdoc/pkgs/development/python-modules/pdoc {};
domeneshop = self.callPackage /home/pbsds/repos/nixpkgs-domemeshop/pkgs/development/python-modules/domeneshop {};
shap = self.callPackage /home/pbsds/repos/nixpkgs-catboost/pkgs/development/python-modules/shap {};
catboost = self.callPackage /home/pbsds/repos/nixpkgs-catboost/pkgs/development/python-modules/catboost {};
analytics-python = self.callPackage /home/pbsds/repos/nixpkgs-gradio/pkgs/development/python-modules/analytics-python {};
ffmpy = self.callPackage /home/pbsds/repos/nixpkgs-gradio/pkgs/development/python-modules/ffmpy {};
markdown-it-py = self.callPackage /home/pbsds/repos/nixpkgs-gradio/pkgs/development/python-modules/markdown-it-py {};
gradio = self.callPackage /home/pbsds/repos/nixpkgs-gradio/pkgs/development/python-modules/gradio {};
trivial-gradios = self.callPackage ./trivial-gradios {};
};
};
in { # "final" and "prev"
#kukkee = super.callPackage ./kukkee {};
#rallly = super.callPackage ./rallly {};
#inherit (testing) polaris polaris-web;
polaris = super.callPackage /home/pbsds/repos/nixpkgs-polaris/pkgs/servers/polaris {};
polaris-web = super.callPackage /home/pbsds/repos/nixpkgs-polaris/pkgs/servers/polaris/web.nix {};
mapcrafter = super.callPackage /home/pbsds/repos/nixpkgs-mapcrafter/pkgs/tools/games/minecraft/mapcrafter/default.nix {};
mapcrafter-world112 = super.callPackage /home/pbsds/repos/nixpkgs-mapcrafter/pkgs/tools/games/minecraft/mapcrafter/default.nix {world="world112";};
#python3.pkgs = super.python3.pkgs.override overridePythonPackages;
python3Packages = super.python3Packages.override overridePythonPackages;
}
# How to test:
# nix-build -E 'with import <nixpkgs> { overlays = [ (import ./. ) ]; }; MY_PACKAGE'
# warning: using testing or unstable here (^) will infinitely recurse.

View File

@ -0,0 +1,84 @@
# nix-build -E 'with import <nixpkgs> {}; callPackage ./default.nix {}'
{ lib
, stdenv
, pkgs
, fetchFromGitHub
, bash
, nodejs
, nodePackages
}:
let
nodeDependencies = (import ./node-composition.nix {
inherit pkgs nodejs;
inherit (stdenv.hostPlatform) system;
}).nodeDependencies.override (old: {
# access to path '/nix/store/...-source' is forbidden in restricted mode
#src = src;
#dontNpmInstall = true;
});
in stdenv.mkDerivation rec {
pname = "kukkee";
#version = "0.1.0";
version = "unstable-2022-06-19-270c8ed";
src = fetchFromGitHub {
owner = "AnandBaburajan";
repo = "Kukkee";
#rev = "v${version}";
rev = "270c8ed421c8f1100a845958430e1ebe61d86d5a";
sha256 = "CtbTKUZEPjwbLRYuC44JaeZn0Rjyn4h6tsBEWWQWJmA=";
};
buildInputs = [
nodeDependencies
];
buildPhase = ''
runHook preBuild
#export PATH="${nodeDependencies}/bin:${nodejs}/bin:$PATH"
ln -s ${nodeDependencies}/lib/node_modules .
next build
runHook postBuild
'';
installPhase = ''
runHook preInstall
# FIXME: is to possible for next.js to not run from a ".next" directory?
mkdir -p $out/share/kukkee
cp -a public .next $out/share/kukkee/
ln -s ${nodeDependencies}/lib/node_modules $out/share/kukkee/
# create next.js entrypoint
mkdir $out/bin
cat <<EOF > $out/bin/kukkee
#!${bash}/bin/bash
export PATH="${nodeDependencies}/bin:\$PATH"
exec -a kukkee next start $out/share/kukkee "\$@"
EOF
chmod +x $out/bin/kukkee
runHook postInstall
'';
passthru.updateScript = ./update.sh;
meta = with lib; {
description = "Self-hosted Doodle alternative: a meeting poll tool.";
longDescription = ''
The free and open source meeting poll tool.
Never ask what time works for you all? again.
A self-hosted Doodle alternative.
'';
homepage = "https://kukkee.com/";
license = licenses.mit;
platforms = platforms.unix;
maintainers = with maintainers; [ pbsds ];
};
}

View File

@ -0,0 +1,17 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-14_x"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

View File

@ -0,0 +1,598 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
replaceDependencies(packageObj.optionalDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 1,
requires: true,
dependencies: {}
};
function augmentPackageJSON(filePath, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, dependencies);
}
});
} else {
augmentPackageJSON(filePath, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
fi
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Patch the shebang lines of all the executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
patchShebangs "$file"
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,22 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p nodePackages.node2nix nix
set -euo pipefail
set -x
cd "$(dirname "${BASH_SOURCE[0]}")"
#store_src="$(nix-build . -A kukkee.src --no-out-link)"
store_src="$(nix-build -E 'with import <nixpkgs> {}; (callPackage ./default.nix {}).src' --no-out-link)"
node2nix \
--nodejs-14 \
--development \
--input "$store_src"/package.json \
--lock "$store_src"/package-lock.json \
--output ./node-packages.nix \
--composition ./node-composition.nix
# --node-env ../../development/node-packages/node-env.nix \

View File

@ -0,0 +1,17 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p common-updater-scripts curl jq gnused nix coreutils
set -euo pipefail
set -x
#cd "$(dirname "${BASH_SOURCE[0]}")"/../../..
latestVersion="$(curl -s "https://api.github.com/repos/AnandBaburajan/Kukkee/releases?per_page=1" | jq -r ".[0].tag_name")"
#currentVersion=$(nix-instantiate --eval -E "with import ./. {}; kukkee.version or (lib.getVersion kukkee)" | tr -d '"')
#if [[ "$currentVersion" == "$latestVersion" ]]; then
# echo "Package is up-to-date: $currentVersion"
# exit 0
#fi
#update-source-version kukkee "$latestVersion"

9
overlays/kukkee/update.sh Executable file
View File

@ -0,0 +1,9 @@
#!/usr/bin/env bash
set -euo pipefail
cd "$(dirname "${BASH_SOURCE[0]}")"
./update-version.sh
./update-node-deps.sh

View File

View File

@ -0,0 +1,39 @@
{ lib
, stdenv
, fetchFromGitea
, buildPythonApplication
, poetry-core
, graphviz
, gradio
}:
buildPythonApplication rec {
pname = "trivial-gradios";
version = "unstable-2022-07-03";
format = "pyproject";
src = fetchFromGitea {
domain = "gitea.noximilien.pbsds.net";
owner = "pbsds";
repo = "${pname}";
rev = "4d3218a5c8b2fe383ba72a005029964aa46c68e7";
sha256 = "hMuGgCjMT8II0ViRBWv8o+frRelvxQ8+E9MBrga4xL4=";
};
preCheck = ''
python -c 'import gradio; print(gradio.version)'
'';
propagatedBuildInputs = [
gradio
graphviz
];
nativeBuildInputs = [ poetry-core ];
meta = {
description = "spismeg";
license = lib.licenses.mit;
maintainer = with lib.maintainers; [ pbsds ];
};
}

View File

@ -0,0 +1,183 @@
{ config, pkgs, lib, ... }:
let
domain = "${config.networking.hostName}.${config.networking.domain}";
mkDomain = subname: "${subname}.${domain}";
cnt = config.containers.code-server-theo.config;
in {
networking.nat = {
enable = true;
internalInterfaces = ["ve-+"];
externalInterface = "eno1"; # TODO: can i make this dynamic?
};
#imports = [
# "/home/pbsds/repos/nixpkgs-trees/containers-mkdir/nixos/modules/virtualisation/nixos-containers.nix"
#];
#disabledModules = [
# "virtualisation/nixos-containers.nix"
#];
# data can be destroyed with `nixos-container destroy code-server-theo`
containers.code-server-theo = {
autoStart = true;
# container has no network access
#
privateNetwork = true;
hostAddress = "10.240.100.2";
localAddress = "10.240.100.3";
#forwardPorts = [
# {
# #hostAddress = "127.0.0.1"; # does not exist
# hostPort = 53754;
# containerPort = 53754;
# protocol = "tcp";
# }
#];
#bindMounts."/home" = {
# hostPath = "/var/lib/code-server";
# isReadOnly = false;
#};
config = { config, pkgs, ... }: {
system.stateVersion = "22.05";
#imports = [ <home-manager/nixos> ];
#home-manager.useUserPackages = true; # install to /etc instead of ~/.nix-profile, needed for containers
#home-manager.useGlobalPkgs = true; # brrr
#home-manager.users.${config.services.code-server.user} = { pkgs, config, ... }: {
# programs.git.enable = true;
# programs.git.userName = "Theoharis Theoharis";
# programs.git.userEmail = "theotheo@ntnu.no";
#};
services.code-server = {
enable = true;
host = "0.0.0.0"; # container
port = 53754;
# if you don't care about security: https://argon2.online/
hashedPassword = "$argon2i$v=19$m=16,t=2,p=1$MHh5UGNtU1lWR1UySnhIZw$ITg8U7Gq2CXByuOOnrKVUg";
package = pkgs.vscode-with-extensions.override {
vscode = pkgs.code-server.overrideAttrs (old: {
passthru.executableName = "code-server";
passthru.longName = "Visual Studio Code Server";
});
#vscodeExtensions = vscode-extensions; [
vscodeExtensions = with (import <nixos-unstable> {}).vscode-extensions; [
shd101wyy.markdown-preview-enhanced
sanaajani.taskrunnercode
tomoki1207.pdf
] ++ pkgs.vscode-utils.extensionsFromVscodeMarketplace [
{
name = "new-railscasts";
publisher = "carakan";
version = "1.0.68";
sha256 = "sha256-uZCAurvZu7QHjTR6ukmYbsI58GpfTo3shdoX/MH2ElA=";
}
{
name = "theme-railscasts";
publisher = "PaulOlteanu";
version = "4.0.1";
sha256 = "sha256-67RNcMr+hvzn2FvapkHLd8OdEBAz8w4cwsGlu0tbCNY=";
}
{
name = "trailscasts";
publisher = "mksafi";
version = "1.2.3";
sha256 = "sha256-mZ9I1BYf8x3lpl5/2sojk+5GMfhDqRBzs6nFkumlPKg=";
}
{
name = "vscode-theme-railscasts-plus";
publisher = "marlosirapuan";
version = "0.0.6";
sha256 = "sha256-8GyyxDeehFo/lGSmA6dfXZ3DMZ/B632ax+9q3+irjws=";
}
{
name = "theme-railscast-next";
publisher = "edus44";
version = "0.0.2";
sha256 = "sha256-RYk6X4iKoEQlKSVhydnwWQJqt884+HC9DZN2aqIbfNI=";
}
{ # best, but no markdown
name = "railscasts";
publisher = "mrded";
version = "0.0.4";
sha256 = "sha256-vjfoeRW+rmYlzSuEbYJqg41r03zSfbfuNCfAhHYyjDc=";
}
{
name = "beardedtheme";
publisher = "BeardedBear";
version = "7.4.0";
sha256 = "sha256-8FY9my7v7bcfD0LH5AVNGI2dF1qMLnVp2LR/CiP01NQ=";
}
];
};
extraPackages = with pkgs; [
(writeShellScriptBin "pandoc" ''
export XDG_DATA_HOME=${pandoc-lua-filters}/share
exec ${pandoc}/bin/pandoc "$@"
'')
(texlive.combine {
inherit (texlive)
scheme-small
titlesec
fontaxes
supertabular
xtab
# boxed quotes
mdframed
zref
needspace
soul
atkinson
;
})
pandoc-imagine
haskellPackages.pandoc-crossref
#haskellPackages.pandoc-plot
#pandoc-plantuml-filter nodePackages.mermaid-cli
bash
git
bat
gnumake
boxes
graphviz
#python3Packages.cairosvg
(python3.withPackages (ps: with ps; [
numpy
matplotlib
#python-lsp-server
]))
];
};
networking.firewall = {
enable = true;
allowedTCPPorts = [
config.services.code-server.port
];
};
};
};
services.nginx.virtualHosts.${mkDomain "code-server"} = {
forceSSL = true; # addSSL = true;
enableACME = true; #useACMEHost = acmeDomain;
locations."/" = {
#proxyPass = "http://127.0.0.1:${toString cnt.services.code-server.port}";
#proxyPass = "http://10.240.100.3:${toString cnt.services.code-server.port}";
proxyPass = "http://${config.containers.code-server-theo.localAddress}:${toString cnt.services.code-server.port}";
proxyWebsockets = true;
};
};
}

1278
profiles/nas/default.nix Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,160 @@
{ lib, pkgs, config, ... }:
let
cfg = config.services.kukkee;
in with builtins; {
options.services.kukkee = with lib; {
enable = mkEnableOption "kukkee service";
package = mkPackageOption pkgs "kukkee" { };
user = mkOption {
type = types.str;
default = "kukkee";
description = "User under which Kukkee runs.";
};
group = mkOption {
type = types.str;
default = "kukkee";
description = "Group under which Kukkee runs.";
};
listen = mkOption {
type = types.str;
default = "127.0.0.1";
description = "Which address to listen on.";
};
port = mkOption {
type = types.port;
default = 3000;
description = "Which port Kukkee should listen to for HTTP.";
};
extraArgs = mkOption {
type = types.listOf types.str;
default = [];
description = "Extra command-line arguments for the next.js runtime.";
};
baseUrl = mkOption {
type = types.str;
default = "http://localhost:${cfg.port}";
description = "The base URL for the site";
};
openFirewall = mkOption {
type = types.bool;
default = false;
description = ''
Open the configured port in the firewall for the Kukkee server.
Preferably the Kukkee server is instead put behind a reverse proxy.
'';
};
mongodb.enable = mkOption {
type = types.bool;
default = true;
description = "Whether to configure a local MongoDB instance.";
};
mongodb.uri = mkOption {
type = types.str;
default = "mongodb://127.0.0.1:27017/kukkeePolls";
example = "mongodb+srv://<user>:<password>@<cluster>.aewjs.mongodb.net/<database>?retryWrites=true&w=majority";
description = ''
Mongodb connection string. MongoDB databases are normally
created automatically upon first write.
'';
};
};
config = lib.mkMerge [
(lib.mkIf (cfg.enable && cfg.mongodb.enable) {
services.mongodb.enable = true;
systemd.services.kukkee.after = [ "mongodb.service" ];
systemd.services.kukkee.requires = [ "mongodb.service" ];
})
(lib.mkIf cfg.enable {
systemd.services.kukkee = {
description = "Kukkee Server";
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
environment = {
# https://github.com/AnandBaburajan/Kukkee/blob/270c8ed421c8f1100a845958430e1ebe61d86d5a/.env.example
NEXT_MONGODB_URI = cfg.mongodb.uri;
NEXT_PUBLIC_BASE_URL = cfg.baseUrl;
NEXT_PUBLIC_ENCRYPTION_KEY = "2a148b84dcec756c59ab96d450a79372"; # TODO
NEXT_PUBLIC_ENCRYPTION_IV = "0d88ec0887f614b6"; # TODO
};
serviceConfig = let
args = map lib.strings.escapeShellArg [
"--hostname" cfg.listen
"--port" cfg.port
] ++ cfg.extraArgs;
in rec {
User = cfg.user;
Group = cfg.group;
ExecStart = "${cfg.package}/bin/kukkee " + (lib.strings.concatStringsSep " " args);
Restart = "on-failure";
# Security options:
NoNewPrivileges = true;
AmbientCapabilities = "";
CapabilityBoundingSet = "";
DeviceAllow = "";
LockPersonality = true;
PrivateTmp = true;
PrivateDevices = true;
PrivateUsers = true;
ProtectClock = true;
ProtectControlGroups = true;
ProtectHostname = true;
ProtectKernelLogs = true;
ProtectKernelModules = true;
ProtectKernelTunables = true;
RemoveIPC = true;
RestrictNamespaces = true;
RestrictAddressFamilies = [ "AF_NETLINK" "AF_INET" "AF_INET6" "AF_UNIX" ];
RestrictRealtime = true;
RestrictSUIDSGID = true;
SystemCallArchitectures = "native";
SystemCallErrorNumber = "EPERM";
SystemCallFilter = [
"@system-service"
"~@cpu-emulation" "~@debug" "~@keyring" "~@memlock" "~@obsolete" "~@privileged" "~@setuid"
];
};
};
users.users = lib.mkIf (cfg.user == "kukkee") {
kukkee = {
group = cfg.group;
isSystemUser = true;
};
};
users.groups = lib.mkIf (cfg.group == "kukkee") {
kukkee = {};
};
networking.firewall = lib.mkIf cfg.openFirewall {
allowedTCPPorts = [ cfg.port ];
};
})
];
meta.maintainers = with lib.maintainers; [ pbsds ];
}

View File

@ -0,0 +1,140 @@
{ lib, pkgs, config, ... }:
#with builtins;
let
lib_ = lib;
in
let
cfg = config.services.webhook;
hooksFormat = pkgs.formats.json {};
lib = lib_ // { mdDoc = x: x; }; # HACK
in {
options.services.webhook = with lib; {
enable = mkEnableOption "webhook service";
package = mkPackageOption pkgs "webhook" { };
user = mkOption {
type = types.str;
default = "webhook";
description = lib.mdDoc "User under which Webhook runs.";
};
group = mkOption {
type = types.str;
default = "webhook";
description = lib.mdDoc "Group under which Webhook runs.";
};
listenHost = mkOption {
type = types.str;
default = "127.0.0.1";
description = lib.mdDoc "Which address Webhook should listen to for HTTP.";
};
listenPort = mkOption {
type = types.port;
default = 8080;
description = lib.mdDoc "Which port Webhook should listen to for HTTP.";
};
openFirewall = mkOption {
type = types.bool;
default = false;
description = lib.mdDoc ''
Open the configured ports in the firewall for the Webhook server.
Preferably the Webhook server is instead put behind a reverse proxy.
'';
};
urlPrefix = mkOption {
type = types.str;
default = "hooks";
description = lib.mdDoc ''
Url prefix to use for served hooks.
`http://listen:port/PREFIX/:hook-id`
'';
};
httpMethods = mkOption {
type = types.listOf types.str;
default = ["POST"];
defaultText = literalExpression ''["POST"]'';
description = lib.mdDoc "Default allowed HTTP methods";
};
verbose = mkOption {
type = types.bool;
default = true;
description = lib.mdDoc "Whether to log events or not.";
};
extraArgs = mkOption {
type = types.listOf types.str;
default = [];
description = lib.mdDoc ''
Extra command-line arguments.
If you want to set CORS headers, you can set [ "-header" "name=value" ]
to the appropriate CORS headers to passed along with each response.
'';
};
settings = mkOption {
type = hooksFormat.type;
default = [];
example = lib.literalExpression ''
[
{
id = "my-webhook";
execute-command = pkgs.writeShellScript "handle-my-webhook.sh" '${""}'
echo "foobar"
'${""}';
}
]
'';
description = lib.mdDoc ''
The configured hooks for Webhook to serve.
Here is a collection of hook examples:
<https://github.com/adnanh/webhook#examples>
'';
};
};
config = lib.mkIf cfg.enable {
systemd.services.webhook = {
description = lib.mdDoc "Webhook Server";
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = let
args = [
"-ip" cfg.listenHost
"-port" cfg.listenPort
"-http-methods" (lib.strings.concatStringsSep "," cfg.httpMethods)
"-urlprefix" cfg.urlPrefix
"-hooks" (hooksFormat.generate "hooks.json" cfg.settings)
] ++ lib.optional cfg.verbose "-verbose"
++ cfg.extraArgs;
in rec {
User = cfg.user;
Group = cfg.group;
DynamicUser = cfg.user == "webhook";
ExecStart = "${cfg.package}/bin/webhook " + (lib.strings.escapeShellArgs args);
Restart = "on-failure";
};
};
networking.firewall = lib.mkIf cfg.openFirewall {
allowedTCPPorts = [ cfg.listenPort ];
};
};
meta.maintainers = with lib.maintainers; [ pbsds ];
}

View File

@ -0,0 +1,254 @@
{ config, pkgs, ... }:
let
lib = pkgs.lib;
domain = "${config.networking.hostName}.${config.networking.domain}";
mkDomain = subname: "${subname}.${domain}";
in {
#services.nginx.enable = true;
imports = [
./services/pdoc.nix
];
# links.pbsds.net
services.nginx.virtualHosts."links.pbsds.net" = let
links-pbsds-net = pkgs.fetchFromGitea rec {
name = repo;
domain = "gitea.noximilien.pbsds.net";
owner = "pbsds";
repo = "links.pbsds.net";
rev = "fd980f4610f8027b4fc89c506542009f09504085";
hash = "sha256-Iz/lfLkdCLJyyZ/PM9+VCkCG5lYSb9/i4x0ZhranBxc=";
};
in {
#serverAliases = map mkDomain [ "links" ];
forceSSL = true; # addSSL = true;
enableACME = true; #useACMEHost = acmeDomain;
root = "${links-pbsds-net}";
};
# refleksjon.no
services.nginx.virtualHosts.${mkDomain "refleksjon"} = let
refleksjon-net = pkgs.fetchFromGitea rec {
name = repo;
domain = "gitea.noximilien.pbsds.net";
owner = "pbsds";
repo = "refleksjon.net";
rev = "c1b91e369bf411e44534334595d4481cb59bd129";
sha256 = "O+lNqD2LuESKM+S+AljF2SzIxzK05xdZqiLhylTQ2ls=";
};
in {
forceSSL = true; # addSSL = true;
enableACME = true; #useACMEHost = acmeDomain;
root = "${refleksjon-net}/www.refleksjon.net";
};
# roroslyd.no
services.nginx.virtualHosts.${mkDomain "roroslyd"} = let
roroslyd-no = pkgs.fetchFromGitea rec {
name = repo;
domain = "gitea.noximilien.pbsds.net";
owner = "pbsds";
repo = "roroslyd.no";
#rev = "v${version}";
rev = "fb7b0a7e70754cf368de7d7c469dabe71b2f1c78";
sha256 = "Rud5bBUuPgIC5UAGtyuYhUtXhN174UCWDoLUWWc/n6U=";
};
in {
forceSSL = true; # addSSL = true;
enableACME = true; #useACMEHost = acmeDomain;
root = "${roroslyd-no}/www.roroslyd.no";
};
# trivial gradios
/** /
systemd.services.trivial-gradios-heritage-graph = {
description = pkgs.python3Packages.trivial-gradios.meta.description;
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = rec {
User = "trivial-gradios";
Group = "trivial-gradios";
DynamicUser = true;
StateDirectory = "trivial-gradios-heritage-graph";
WorkingDirectory = "/var/lib/${StateDirectory}";
ExecStart = "${pkgs.python3Packages.trivial-gradios}/bin/trivial-gradios-heritage-graph --port 37001";
Restart = "on-failure";
};
};
services.nginx.virtualHosts.${mkDomain "gradio"} = {
forceSSL = true; # addSSL = true;
enableACME = true; #useACMEHost = acmeDomain;
locations."/" = {
root = pkgs.writeTextDir "index.html" ''
<table>
<tr>
<th>name
<th>description
<tr>
<td><a href="heritage-graph/">heritage-graph</a>
<td>A simple tool to greate a directed ancestry graph.
</table>
'';
};
locations."/heritage-graph/" = {
proxyPass = "http://127.0.0.1:37001";
proxyWebsockets = true;
extraConfig = ''
rewrite ^/heritage-graph(/.*)$ $1 break;
'';
};
};
/**/
# CensorDodge
# A lightweight and customisable web proxy
/** /
services.phpfpm.pools.censordodge = {
user = "censordodge";
group = "censordodge";
settings = {
"listen.owner" = config.services.nginx.user;
"listen.group" = config.services.nginx.group;
"pm" = "dynamic";
"pm.max_children" = "32";
"pm.start_servers" = "2";
"pm.min_spare_servers" = "2";
"pm.max_spare_servers" = "4";
"pm.max_requests" = "500";
};
};
services.nginx.virtualHosts.${mkDomain "censordodge"} = {
forceSSL = true; # addSSL = true;
enableACME = true; #useACMEHost = acmeDomain;
root = pkgs.fetchFromGitHub {
owner = "ryanmab";
repo = "CensorDodge";
rev = "2480e8269190ca8618e41dc581f9d55f4ce9f333";
sha256 = "8R3lyxF22HXui4pJytMcqwwa5TDXIJb6fWII934IhEA=";
};
extraConfig = ''
index index.php;
'';
locations."/".extraConfig = ''
try_files $uri $uri/ /index.php?$args;
'';
locations."~ \.php$".extraConfig = ''
include ${config.services.nginx.package}/conf/fastcgi.conf;
fastcgi_pass unix:${config.services.phpfpm.pools.censordodge.socket};
fastcgi_buffers 16 16k;
fastcgi_buffer_size 32k;
'';
};
users.users.censordodge = {
isSystemUser = true;
group = "censordodge";
};
users.groups.censordodge = {};
/**/
# OpenSpeedtTest
# Pure HTML5 Network Performance Estimation Tool
/** /
services.nginx.virtualHosts.${mkDomain "openspeedtest"} = let
cfg = config.services.nginx.virtualHosts.${mkDomain "openspeedtest"};
openspeedtest = pkgs.fetchFromGitHub rec {
name = "${owner}-unstable-2022-07-02";
owner = "openspeedtest";
repo = "Speed-Test";
#rev = "v${version}";
rev = "59eb7367ede5555f7516ebb8eeeb65245bc5a6e5";
sha256 = "yzvulzgBUri+sU9WxZrLKH/T+mlZu9G2zucv8t/fZdY=";
postFetch = ''
rm $out/README.md
rm $out/License.md
rm $out/.gitignore
rm $out/hosted.html
'';
};
in {
forceSSL = true; # addSSL = true;
enableACME = true; #useACMEHost = acmeDomain;
http2 = false;
root = "${openspeedtest}";
extraConfig = ''
#access_log off;
#error_log /dev/null; #Disable this for Windows Nginx.
#log_not_found off;
gzip off;
fastcgi_read_timeout 999;
server_tokens off;
tcp_nodelay on;
tcp_nopush on;
sendfile on;
open_file_cache max=200000 inactive=20s;
open_file_cache_valid 30s;
open_file_cache_min_uses 2;
open_file_cache_errors off;
'';
locations."/".extraConfig = lib.mkIf false ''
if_modified_since off;
expires off;
etag off;
if ($request_method != OPTIONS ) {
add_header 'Access-Control-Allow-Origin' "*" always;
add_header 'Access-Control-Allow-Headers' 'Accept,Authorization,Cache-Control,Content-Type,DNT,If-Modified-Since,Keep-Alive,Origin,User-Agent,X-Mx-ReqToken,X-Requested-With' always;
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always;
#Very Very Important! You SHOULD send no-store from server for Google Chrome.
add_header 'Cache-Control' 'no-store, no-cache, max-age=0, no-transform';
add_header 'Last-Modified' $date_gmt;
}
if ($request_method = OPTIONS ) {
add_header 'Access-Control-Allow-Origin' "$http_origin" always;
add_header 'Access-Control-Allow-Headers' 'Accept,Authorization,Cache-Control,Content-Type,DNT,If-Modified-Since,Keep-Alive,Origin,User-Agent,X-Mx-ReqToken,X-Requested-With' always;
add_header 'Access-Control-Allow-Methods' "GET, POST, OPTIONS" always;
add_header 'Access-Control-Allow-Credentials' "true";
return 204;
}
'';
# IF and Only if you Enabled HTTP2 otherwise never enable the following
# HTTP2 will return 200 withot waiting for upload to complete. it's smart but we don't need that to happen here when testing upload speed on HTTP2.
locations."/upload.bin".extraConfig = ''
#proxy_set_header Host $host;
proxy_pass http://127.0.0.1:80/upload.bin;
'';
locations."~* ^.+\.(?:css|cur|js|jpe?g|gif|htc|ico|png|html|xml|otf|ttf|eot|woff|woff2|svg)$".extraConfig = lib.mkIf false ''
#access_log off;
expires 365d;
add_header 'Cache-Control' public;
add_header 'Vary' Accept-Encoding;
tcp_nodelay off;
open_file_cache max=3000 inactive=120s;
open_file_cache_valid 45s;
open_file_cache_min_uses 2;
open_file_cache_errors off;
gzip on;
gzip_disable "msie6";
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_buffers 16 8k;
gzip_http_version 1.1;
gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript application/javascript;
'';
};
/**/
}

View File

@ -0,0 +1,273 @@
{ config, pkgs, ... }:
let
lib = pkgs.lib;
domain = "${config.networking.hostName}.${config.networking.domain}";
mkDomain = subname: "${subname}.${domain}";
# pdoc data
pdoc-builtins = [
"builtins"
"os"
"array"
"sys"
"time"
"traceback"
"pathlib"
"itertools"
"functools"
"unittest"
"argparse"
"asyncio"
"textwrap"
"collections"
"configparser"
"concurrent"
"contextlib"
"operator"
"pickle" # TODO: marsmellow or whatever
"copy"
"ctypes"
"pprint"
"shlex"
"re"
"abc"
"ast"
"random"
"shutil"
"sqlite3"
"subprocess"
"statistics"
"string"
"tarfile"
"typing"
"uuid"
"warnings"
"wave"
"dataclasses"
"glob"
"gzip"
"inspect"
"json"
"base64"
"zipfile"
];
pdoc-modules = [
{name="more-itertools"; literal="more_itertools";}
"altair"
"pygal"
"vispy"
#"ggplot"
"seaborn"
"bokeh"
"plotly"
"tabulate"
"wavefile"
"moderngl"
"pydantic"
"typer"
"ptpython"
"colorama"
{name="pyjwt"; literal="jwt";}
"zipp"
"aiofiles"
"aafigure"
"urllib3"
"tesserocr"
"trio"
"starlette"
"pyverilog"
"nixpkgs"
"wavedrom"
"httpx"
"pyquery"
"mpv"
{name="beautifulsoup4"; literal="bs4";}
"hid"
#{name="hidapi"; literal="hid";}
"sanic"
"paramiko"
"pydub"
"aiohttp"
"rtoml"
"redis"
"numpy"
"domeneshop"
"munch"
"migen"
"amaranth"
"click"
"attrs"
"graphviz"
"baron"
"redbaron"
"fastapi"
"pytest"
#"pyglet" # pyglet.com fails, windows only
#"pygame" # pygame.movie fails on pdoc3, pdoc hangs
"plotly"
"peewee"
"parsel"
"pandas"
"mutmut"
"mlflow"
"meshio"
#"einops" # depends on tensorflow, which is broken ATM
"aiodns"
"json5"
"seaborn"
"matplotlib"
"dash"
"rarfile"
"pyramid"
"pygtail"
"codecov"
"nbconvert"
"humanfriendly"
"pendulum"
"jsonpickle"
"cachetools"
"wrapt"
"lxml"
"chardet"
"yarl"
"frozenlist"
"itsdangerous"
"xmltodict"
{name="cached-property"; literal="cached_property";}
"toolz"
"aioitertools"
"coconut"
"asyncpg" #"aiopg"
{name="libsass"; literal="sass";}
{name="pytorch"; literal="torch";}
{name="pytorch-lightning"; literal="pytorch_lightning";}
{name="pillow"; literal="PIL";}
"trio"
"tqdm"
"rich"
"pudb"
"pony"
"mido"
"jedi"
"h5py"
"atom"
"toml"
{name="pyyaml"; literal="yaml";}
"jinja2"
"requests"
"h5py"
"imageio"
"pygments"
"trimesh"
#"faiss"
#"geomloss"
#"mesh_to_sdf"
#"pyrender"
];
toName = x: if builtins.isString x then x else x.name;
toLiteral = x: if builtins.isString x then x else x.literal;
mkPdoc = use-pdoc3: isBuiltin: pkg: let
description = if isBuiltin
then "builtin"
else pkgs.python3Packages.${toName pkg}.meta.description;
version = if isBuiltin
then "-"
else pkgs.python3Packages.${toName pkg}.version;
homepage = if isBuiltin
then "https://docs.python.org/3/library/${toLiteral pkg}.html"
else pkgs.python3Packages.${toName pkg}.meta.homepage or "-";
doc = pkgs.runCommand "pdoc${if use-pdoc3 then "3" else ""}-${toName pkg}-docs" {
nativeBuildInputs = (if use-pdoc3
then [pkgs.python3Packages.pdoc3]
else [pkgs.python3Packages.pdoc])
++ lib.optional (!isBuiltin) (builtins.getAttr (toName pkg) pkgs.python3Packages);
NAME = toName pkg;
LITERAL = toLiteral pkg;
# TODO: license
# TODO: build html with something better than bash
} ''
( timeout 900s ${if !use-pdoc3
then ''pdoc --no-search --math --no-browser --output-directory $out "$LITERAL"''
else ''pdoc3 --skip-errors --output-dir $out --html "$LITERAL"''
} 2>&1 | tee $LITERAL.log ) || true
mkdir -p $out
cp $LITERAL.log $out
test -f $out/index.html && rm -v $out/index.html
function write {
{ printf "%s" "$@"; echo; } >> $out/index.part-"$LITERAL".html
}
write "<tr>"
if test -f $out/"$LITERAL".html; then
write "<td><a href=\"$LITERAL.html\">$NAME</a>"
elif test -d $out/"$LITERAL"; then
write "<td><a href=\"$LITERAL/\">$NAME</a>"
else
write "<td>$NAME"
fi
write "<td>${version}"
if test -s $out/$LITERAL.log; then
write "<td><a href=\"$LITERAL.log\">log</a>"
else
write "<td>-"
fi
write "<td>${lib.escapeXML description}"
${if homepage == "-" then ''
write "<td>n/a"
'' else ''
write "<td><a href=\"${homepage}\">${homepage}</a>"
''}
write "</tr>"
'';
fallback = pkgs.writeTextDir "index.part-${toLiteral pkg}.html" ''
<tr>
<td>${toLiteral pkg}
<td>${version}
<td>&#10799;
<td>${lib.escapeXML description}
<td>${if homepage == "-" then
"n/a"
else
''<a href="${homepage}">${homepage}</a>''
}
</tr>
'';
in if (builtins.tryEval doc.outPath).success
then doc
else fallback;
mkPdocs = use-pdoc3: pkgs.symlinkJoin {
name = "pdoc-docs";
paths = (map (mkPdoc use-pdoc3 true) pdoc-builtins) ++ (map (mkPdoc use-pdoc3 false) pdoc-modules);
# note: globs are sorted
postBuild = ''
echo "<!DOCTYPE html>" >> $out/index.html
echo "<table><tr><th>name<th>version<th>log<th>description<th>homepage</tr>" >> $out/index.html
cat $out/index.part-*.html >> $out/index.html
rm $out/index.part-*.html
echo "</table>" >> $out/index.html
'';
};
in {
# lib.filter (x: lib.isDerivation x && (builtins.tryEval x.outPath).success) (lib.attrValues linuxPackages_latest))
# Pdoc
# Auto-generate API documentation for Python projects.
services.nginx.virtualHosts.${mkDomain "pdoc"} = {
forceSSL = true; # addSSL = true;
enableACME = true; #useACMEHost = acmeDomain;
root = mkPdocs false;
};
services.nginx.virtualHosts.${mkDomain "pdoc3"} = {
forceSSL = true; # addSSL = true;
enableACME = true; #useACMEHost = acmeDomain;
root = mkPdocs true;
};
}

31
users/default.nix Normal file
View File

@ -0,0 +1,31 @@
{ config, pkgs, lib, ... }:
{
# User accounts
# Don't forget to set a password with passwd!
imports = [
<home-manager/nixos>
./pbsds
];
home-manager.useGlobalPkgs = true;
# TODO: nas stuff
# TODO: can uid mapping be done at nfs level?
users.users.pbsds.uid = 1001;
users.groups.pbsds.gid = 1001;
users.users.jornane = {
isNormalUser = true;
uid = 1002;
description = "jornane";
extraGroups = [ "networkmanager" "wheel" ]; # TODO: NAS stuff
openssh.authorizedKeys.keys = [
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDhGRFktIRrppVVILraEn5eTrANBIBMcpNT4qvNcd7Ut"
"ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAA/UAomSQjOHb4Im0TV70f7Jb/JpsQDd7YKHCXBmjmsrXi2dJVpw/tn+FzP4d2XJXm38hVN89yG+PQwZhf3PSHBaB4DXqFnVLFNWXTRyPPnc1U9uTEvLnZPpVJ/iv2zuS73QHrDcBdIubtdnsr239dJizUJJKwSMHszhOAN4AMYS9WivozdmyS+3o4p8mSp+tOWhq0hmVewnH0teTaHASpvE0V65xW9RGc5AWx0PgkGTXScOOf4/N8oXILn6mepODstlRKCZnBsC/LaXgJsk2+BX/Q/t4V0ytHh9iYblSavNjZZXRvygvkmV/eYAJAJ+igHubs2fEDsXfRj9J0O6JWjAmsELObCYGRbg9QkvaRq5EQgDoSW64iQUmbfB8NmYyXxg4fh0xBUrX87YkYvtHznWzD8hZkqRfj4K9Ixfk1Bsaxb5ubU3/mjGLOpZZ47zEqoen43rUxLq+eeMEQGDbq3mAcA6uX73MvBTzERrfh93rojwlUHEUDoUYyq7aN6Y9vF8/gy3KT2+pvAoUy4NDImSmJTwVcFJ+qUsAaGMECKiznte3Qn8TiD5G9nqeqCoA9edegM2N0z+ovsiXRxVqQDPh3cz/VPSsTKa8jNxhFpw4Q6KzDrtQOKXDkrhSKTxozVLYw2rYCkd2odOhjIJiN63UTNSm2z37ckKbOCqDy6LwW2ls4OzH/LOz2QDkMCwe7MYMrC66wanDhsRUZwlbSEs8aB25NB6OGg61hId3SLS8HzJ+4dmbHhciZm0oJlKRSMAqMLO6o9OVguJOl1td71rhnqAbp4UuaMqm5Zzut1ET+zkYB4t2voTuMhSrEJn1RS4hxR2rWt5jF9Nn67Mu70c0K2DE7FXqldGALC87GO7PHLTnNRg3o8FCkmVYlHNUEqHR56Incg5sC6KS9G1RL3KEHzjgzz8RjFXR5p1Qj+ZZjObVuENdWuqk7gQaxsyocCCB4pbBtF9AYDOIIGCn8rJSKUFvD8KIaTpWFsFoUXanSnAiSCT98GhfIBLbgAt4yJmegRKOML/cxplCh0z9MkNlfPdVU+LI/2RSj3NJpxd/KuR1l73IpgVNcbumXefAY95ztB/w067ZHCFlO0r+Q42NacthsMDc4Ffd+grLpo7KSmDRc+L9YdRNDgLZMbfIimHYIRRMdvEMEICXe1tUvtKBSfU1goTSXXYK2fLOBfOFIXCQponfgZ04klRjgpzCtv8juCOOrHU6r/FpIRkDNbwjWm9i8yBacZGT30bwjK8UW6JSFvDDu747f0ztKyQew8hEivOyqGDwZyrhFImasulsS0/7DB07oUQtaXJ7J8ucGsarttt02D6K8yuCh5bqEVk5Fy4Xlw=="
];
};
}

33
users/pbsds/default.nix Normal file
View File

@ -0,0 +1,33 @@
{ config, pkgs, lib, ... }:
{
users.users.pbsds = {
isNormalUser = true;
description = "pbsds";
extraGroups = [ "pbsds" "networkmanager" "wheel" "nixbld" ]; # TODO: NAS stuff
initialHashedPassword = "$6$yNgxTHcP1UYkNwuZ$1sBehnKgPjVnDe0tSV8kyfynWpfjDzuohZX6SoTrMnYFa3/aiMOtI6JppYevl.M6qYhBIT0XBvL6TqSSFWn8B/";
openssh.authorizedKeys.keys = [
"ecdsa-sha2-nistp521 AAAAE2VjZHNhLXNoYTItbmlzdHA1MjEAAAAIbmlzdHA1MjEAAACFBAClYXCu7CyStjJ0AqZadUWfuA0h/3kC5FUJk4nTsR0nLXSNp26ETKYY9dID1JQCNgPDDZU3EKCNCpK+nZ/Q09L+agH5XtP6MRIDs0+aXZXy0rcLjS2fgx4ZgIiDGfBlaMCIrM/mdRzVmrYbeMotmkdsLSQ/lFBvX1IuzvUSnyYmRPCXxA== pederbs@hildring"
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDnVaayewel9GWGUYpTdLqfBnYnaBM10Vfq9fxeb9odwjf6pWe78il/5BCgW5EOadR/PeRv/ZYYnIT1uKEJOZkhjY2E6P2/B/JgzwPTwsrrjQsDHd5VjZty097dmf6vj0LXeJHmP9yogjPjGaSxktqyZi2CTFskRfZBPeCsoRMG+Z5bCMOHpXolvGCVWBNRcT3ITVYAAFL7HNPhcN3f5JkQgu0N+ySlMclNNSbHXXv1OIcLMKto6ZDx4DHp7NmU9uSbv8ERAfmoLCgdz1zOg0eVw9Kxs+XpUy3YFDdmPrny/Vq2LCDHljUWtjJI1uBoPF/ngavV+AuX5FHU9OSKNu7H pbsds@knut.pbsds.net"
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC+qv5MogWwOgctQfQeHxUHF2ij6UA8BR4DLXtZClnw6A1CtOjAtZeAW62C8q9OKaIKDO0hqd2vLBkgEno4smqBDJ2ThwKuXrhiHqJzCkXZqIKKx79mpTo7aRpFgkJ7328Ee+tbqa65coL98WRhLnDg69NDaOfSCmH85/D0kuyTG7mYIMdBtFXB/IU0QC9USCSGcUGSnQAEx8S0vaXL7JP043kfEfeqwsea598qX+LFa2UfGwgLBpiWi4QEfYy6fviz2TFkbRYKQImybidzUHZkljjPupqu8U4dIx/jsJM/vew717xZPCU0ZCho77TIU+bYSitD5mjnzuD7LrAdbFgnhkD2sQlD/hUW40kPVT/Tq3DrpDRKC9tniiTaIQV1Pe0k82XwYrvV/hTl8T1ed6TuzhmUggqowAbJRbaBIa1zI672AFFQM8OBIN59ZlLy3V2RZW4fvQk2/xMRdVBT0W5Upx+9rCbH9LCGWL8gNNA/PRJ0L9Ts6cq8kf4tFhFQQrk= pbsds@bjarte"
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDlLTAf5ObSpUU490M/l6+s5m0rxayPeaH23RLvIyoCqGftf/3Yi2iHP8wusBWGrEkXg8Po9YKh2CztflqJBnhsv/HaGYRXNsz3oVf2bSURUepZBkUXkg+T1x9OGG8pfvde8ROWZ8KxwLbAKghHUusyAvtJE9ktDxLpajomXDQlo+v7Hj2v4tMKCG/vHPxf/ni3Icl/8Rwo4zjuxl1MxLftPZv9rxCFv06ujuW6f6Mu5q+damt6ReH7RpOzs1rtDjPSnrRCboY4IbT5P4v6cZCr5hgAblKXHfOzPO9WM7O9tugJeE7eJK6Ps8gvWSHs/48SONSpjcYX3NzsRfxp6RRyD0yGrTDP/Ly6TNZzwZdKPO6GkRbLFXAxSn+ex/zW//R4ECQmof3KPYyjpt7yygICSdRlRocpz5aYxytFqBhelEbQqSZTP8q3HdxqGUplAgaCc0bK+m2ob5cirx3kHK2TyQ2dyCZgOML7AjD3GaclxPjkfEipL3/uFkq6EdsdQFs= pbsds@Svanbjorg"
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCo5/9uHIKhhpVbcLSKslj9wdBiV4YaY/tydTfypNZBMMP2U54640t8JvHHkxCZRur8AqYupellxAqkmKn516Ut0WvfQcNgF7ieI66JHkK1j7kSFHHG1nkJHslwCh2PeYtfx5zHZZq8X9v/UjVGY182BC4BHC5zixmNiUvvc+N24BRT4NwslFmMYVcTdoNBSJXPgte4uUd+FZrAnHQrjYdJVANgI4i1d11mxlDFgJrPJj30KaIDxHAsAWgCEqGLMDO9N1cpGGbXVeXfoGvv+vdCXgbyA8BK7wWwXvy5HlvhpEJo8g84r6uKMMkEf+K1MpTiaNjdu+7/sKD/ZOyDB4RgCBs0DskouWRi+xfxABaKBj6706Z3hpj+GfpuSXrHKgGYXIL4cZHaAlz8GVsN1mUL4eJ12Sk14Od2QUHbzp7TDz5eaWuczPs5W9qXwNDMZcmBBZ3mkt9ZYPvAPRjeLpAKhhA9xPL3hbob5hhAENTWsFRFJEgpm8l362XFIOLHr/M= pbsds@rocm"
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDHV25/jfk0upLl6JOq1tu1n+VNkMr0OOu8nZa3NBZQfqrGiLQuTTFycBd5hhoWBaZewb0R8jm/GESE4gfeiLtObe0bKXo8SVty5hNrIq06BbICXByJR99ux17psaNyp/dvZO7gkjKm3m30q7TfbZANlIwhv0XmqCz8S31ocJddFznWyK3nFau/Lvzpupi0Y+7yHkmcKiWYzZsjluQF90M5X5nIf2x4jj7WY0IkR2l41MOLk4NCQNIor6EyAXnHs78JBS3kY5p2x7t/cBpMDBmbgZePdfjGv/L4vFgYiG1wTZT77PWPA93GHueZWDGUkIvKbNriP/U+bShKnGjIfZttjerhzsFE1V/RctCFToqHkW39439nCj6eFpgUiLHkx/mAUPz/whKP+9x5I3/DQkgYZ7qA424Msdz2wXWNi3465Mtrf5XPsjWNReEWt9I29W19K5OLO9QQVrkgdioSCvxoHLvQypPscPkLVF8srzCVA6npUOrOuJ5zZcK1ax2/0v8= pbsds@frotbjoff"
#"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC8S44NzMNlCGpQ0aMqpv4YCbp4esYKLejsFRtCCA3oSgz+zq0Rbem1S1/vQehC44Ps1JPljiJgb8rj0VFUcuqnDtJP6kYRvvUDBaM7QO8Z4mZjOKYQo/MoidaPYEHakPB4fk4fdDU1u090VvTgPJvNe0UmPoTHbedk4u+OMuvMr8T56OPmwZPrCyRLtc4O+cYoig/cB+Y7DlwNI9wBx3xhShb5tuML+ZR1XyBYgprwoZML5l2pzEeK7dXdmkc4QT4TM13EcNOopsNZymH/xOCsY/yVqVJJC2Smp6mkIk+Or0zdlzxXFp4u3MS4bg5pzFVFfsqJAQGB7laMxtakMbn0if54MOA34hEAdmzdBCc+g9suuqFhA9WPqMsVlxx9khTue0MNoUVflUkm4B51aPbnPe+aycxdqMgfONroOjtBAQYfGnlRUP1qR3AD9Y2ND/NhGA9f8gTKPBRam+lRDWEGQO9HmWQdpeZbfWEyJa82HZcTCIhQyQukfa5PIzwtops= pbsds@pbsds-optiplex7060"
];
#EDITOR = "micro";
#packages = with pkgs; [
#
#];
};
users.groups.pbsds = {};
home-manager.users.pbsds = import ./home;
}

View File

@ -0,0 +1,282 @@
# https://nix-community.github.io/home-manager/options.html
{ pkgs, config, ... }:
{
imports = [
./modules/jump.nix
./modules/micro.nix
];
nixpkgs.config.allowUnfree = true;
home.stateVersion = "22.05";
home.sessionVariables = {
EDITOR = "micro";
};
programs.bash.initExtra = ''
if [ "$COLORTERM" = "truecolor" ] || [ "$TERM" == "xterm" ]; then
export TERM=xterm-256color
export MICRO_TRUECOLOR=1
fi
parse_git_branch() {
git branch 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/ (\1)/'
}
export PS1='\[\033[01;32m\]\u@\h\[\033[01;37m\] \[\033[01;34m\]\W\[\033[33m\]$(parse_git_branch)\[\033[01;32m\]\$\[\033[00m\] '
# ssh autocomplete
if test -f ~/.ssh/config; then
complete -W "$(cat ~/.ssh/config | grep '^Host ' | cut -b 6- | grep -v "\*")" ssh
complete -W "$(cat ~/.ssh/config | grep '^Host ' | cut -b 6- | grep -v "\*")" rssh
complete -W "$(cat ~/.ssh/config | grep '^Host ' | cut -b 6- | grep -v "\*")" vpn
complete -W "$(cat ~/.ssh/config | grep '^Host ' | cut -b 6- | grep -v "\*")" lvpn
complete -W "$(cat ~/.ssh/config | grep '^Host ' | cut -b 6- | grep -v "\*")" dvpn
complete -W "$(cat ~/.ssh/config | grep '^Host ' | cut -b 6- | grep -v "\*")" scp
complete -W "$(cat ~/.ssh/config | grep '^Host ' | cut -b 6- | grep -v "\*")" remote-init
fi
# remote-exec and tldr
complete -F _command remote
complete -F _command remote-quick
#complete -F _command tldr
function atom_nix {
nix-shell -p atom --run "atom $(printf "%q " "$@") --in-process-gpu --no-sandbox"
}
'';
# TODO: split ^
home.packages = with pkgs; [
rsync
bind.dnsutils
xclip
zip
unrar
unzip
atool
p7zip
bzip2
gzip
atool
micro
aspell
aspellDicts.en
aspellDicts.nb
vimv
dos2unix
pandoc
graphviz
vgmstream
gallery-dl
yt-dlp
ffmpeg-full
git
curl
wget
visidata
lolcat
toilet
boxes
tewisay
ponysay
#tldr
entr
axel aria
bat
xe # xargs alternative
sd # sed alternative
fd # find alternative
silver-searcher # `ag`
ripgrep
gron
jq
yq
htmlq
sysz
du-dust # du alternative
ncdu # Disk usage analyzer with an ncurses interface
mesa-demos
cage
gh
hub
librespeed-cli
nix-template
nix-output-monitor
nixpkgs-review
manix
(python3.withPackages (python-packages: with python-packages; [
requests
numpy
scipy
ptpython
poetry
rich
matplotlib
more-itertools
toml
pyyaml
virtualenv
]))
];
home.shellAliases = {
ip = "ip -br -color";
watch = "watch -c ";
hman = "man -H ";
#igrep = "grep -i";
#flexget = "ssh -t knut.pbsds.net sudo -u flexget flexget";
flexget = "sudo --user=flexget flexget -c /var/lib/flexget/flexget.yml";
tmux = "systemd-run --scope --user tmux";
ed = "$EDITOR"; # ed is the standard editor
de = "$EDITOR";
dush = "du -shc * | sort -h";
dushd = "du -shc * .[!.]?* | sort -h";
diff = "diff -u --color";
sudo = "sudo ";
xargs = "xargs ";
dc = "cd";
#sl = "ls";
sl = "exa";
rssh = "ssh -l root";
# TODO: wayland detection
clip = "xclip -sel clip -t text/plain -rmlastnl -i";
# git gud
gs = "git status";
gb = "git blame";
gl = "git log --oneline --color | head -n 30";
glg = "git log --all --decorate --oneline --graph";
gpra = "git pull --rebase --autostash";
gd = "git diff";
gdwd = "git diff --word-diff";
gdwdr = "git diff --word-diff --word-diff-regex=.";
gds = "git diff --staged";
gdswd = "git diff --staged --word-diff";
gdswdr = "git diff --staged --word-diff --word-diff-regex=.";
gcp = "git cherry-pick";
gca = "git commit --amend";
gcara = "git commit --amend --reset-author";
gpo = "git push origin";
gpasr = "git pull --autostash --rebase";
#gfr = "git fetch origin master && git rebase FETCH_HEAD";
gfr = "git pull --rebase";
gp = "git pull --rebase --autostash";
python = "ptpython"; # this has too many problems...
cpython = "python";
pwd-fqdn = ''echo "$(whoami)@$(hostname -f):$(printf "%q" "$(realpath .)/")"'';
http-server = "${pkgs.python3}/bin/python -m http.server";
manix-fzf = ''manix "" 2>/dev/null | grep '^# ' | sed 's/^# \(.*\) (.*/\1/;s/ (.*//;s/^# //' | fzf --preview="manix '{}'" | xargs manix'';
};
programs.bash.enable = true;
#programs.bash.enableCompletion = true;
programs.bash.shellOptions = [
# Append to history file rather than replacing it.
"histappend"
# check the window size after each command and, if
# necessary, update the values of LINES and COLUMNS.
"checkwinsize"
# Extended globbing.
"extglob"
"globstar"
# Warn if closing shell with running jobs.
"checkjobs"
];
programs.fzf.enable = true; # TODO: does this conflict with system-wide setup?
#programs.git.gitui.enable = true;
programs.git.enable = true;
programs.git.delta.enable = true;
#programs.git.lfs.enable = true;
#programs.git.signing
#programs.git.userName = "pbsds"
programs.git.userName = "Peder Bergebakken Sundt";
programs.git.userEmail = "pbsds@hotmail.com";
programs.git.ignores = [ "result" "__pycache__" ];
programs.exa.enable = true;
programs.exa.enableAliases = true;
programs.direnv.enable = true;
programs.just.enable = true;
#programs.mpv.bindings
#programs.mpv.config
/**/
# TODO: upstream this
programs.micro.enable = true;
programs.micro.trueColor = true;
programs.micro.settings = {
colorscheme = "railscast";
rmtrailingws = true;
tabstospaces = true;
};
#xdg.configFile."micro/bindings.json".source = (pkgs.formats.json {}).generate "micro-bindings" {
programs.micro.bindings = {
"Alt-/" = "lua:comment.comment";
"Alt-d" = "SpawnMultiCursor";
"Alt-j" = "lua:joinLines.joinLines";
"Alt-l" = "command:lower";
"Alt-u" = "command:upper";
"AltLeft" = "PreviousTab";
"AltRight" = "NextTab";
"Ctrl-j" = "EndOfLine,CursorRight,OutdentLine,OutdentLine,OutdentLine,OutdentLine,OutdentLine,OutdentLine,OutdentLine,OutdentLine,OutdentLine,OutdentLine,OutdentLine,OutdentLine,OutdentLine,OutdentLine,Backspace";
"CtrlDown" = "None";
"CtrlUnderscore" = "lua:comment.comment";
"CtrlUp" = "None";
"Escape" = "RemoveAllMultiCursors";
"Shift-PageDown" = "SelectPageDown";
"Shift-PageUp" = "SelectPageUp";
};
programs.micro.ensurePlugins = [
"aspell"
"detectindent"
"editorconfig"
"joinLines"
"manipulator"
"quoter"
];
/**/
programs.nix-index.enable = true;
programs.tealdeer.enable = true;
xdg.enable = true;
#xdg.desktopEntries
gtk.enable = true; # TODO: only if programs.dconf is enabled
gtk.theme.name = "vimix-dark-ruby";
gtk.theme.package = pkgs.vimix-gtk-themes;
gtk.iconTheme.name = "Flat-Remix-Blue-Dark";
gtk.iconTheme.package = pkgs.flat-remix-icon-theme;
programs.jump.enableBash = true;
programs.beets = {
enable = true;
settings = {
directory = "/mnt/meconium/beets_preprocessed/data";
#library = "/mnt/meconium/beets_preprocessed/library.db";
library = "${config.xdg.configHome}/beets/library_preprocessed.db";
#directory = "/mnt/meconium/beets_music/library";
#library = "${config.xdg.configHome}/beets/library_meconium.db";
##library = "/mnt/meconium/beets_music/data.db";
};
};
}

View File

@ -0,0 +1,71 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.programs.jump;
enabled = cfg.enableBash || cfg.enableZsh;
in {
meta.maintainers = [ hm.maintainers.pbsds ];
options.programs.jump = {
# Jumping around with symbolic links
# Based on http://jeroenjanssens.com/2013/08/16/quickly-navigate-your-filesystem-from-the-command-line.html
enableBash = mkEnableOption "jump - Quickly Navigate your Filesystem";
enableZsh = mkEnableOption "jump - Quickly Navigate your Filesystem";
marksPath = mkOption {
type = types.str;
default = "$HOME/.marks";
description = ''
Where the jump marks are stored
'';
};
};
config = mkIf enabled {
#home.packages = [ cfg.package ];
home.sessionVariables = { _JUMP_MARKPATH = cfg.marksPath; };
programs = let
rcScript = ''
function jump {
pushd . > /dev/null
cd -P "$_JUMP_MARKPATH/$1" 2>/dev/null || echo "No such mark: $1"
}
function mark {
mkdir -p "$_JUMP_MARKPATH" &&
test ! -L "$_JUMP_MARKPATH/$1" \
&& ln -s "$(pwd)" "$_JUMP_MARKPATH/$1" \
|| echo "mark already exists!"
}
function unmark {
test ! -z "$1" \
&& rm -i "$_JUMP_MARKPATH/$1"
}
function marks {
#ls -l "$_JUMP_MARKPATH" | sed 's/ / /g' | cut -d' ' -f9- | sed 's/ -/\t-/g' && echo
ls --color=always -l "$_JUMP_MARKPATH" | tr -s ' ' | cut -d' ' -f9- | sed -e 's/ -> /§/g' | column -t -s '§' -o ' -> '
}
_complete_jump_marks() {
local curw=''${COMP_WORDS[COMP_CWORD]}
local wordlist=$(find $_JUMP_MARKPATH -type l -printf "%f\n")
COMPREPLY=($(compgen -W ''\'''${wordlist[@]}' -- "$curw"))
return 0
}
complete -F _complete_jump_marks jump unmark
'';
in {
# TODO: fish
bash.initExtra = mkIf cfg.enableBash rcScript;
zsh.initExtra = mkIf cfg.enableZsh rcScript;
};
};
}

View File

@ -0,0 +1,109 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.programs.micro;
jsonFormat = pkgs.formats.json { };
in {
meta.maintainers = [ hm.maintainers.mforster maintainers.pbsds ];
options = {
programs.micro = {
enable = mkEnableOption "micro, a terminal-based text editor";
trueColor = mkOption {
type = types.bool;
default = true;
description =
"Enables support for the whole color range, should the terminal allow.";
};
settings = mkOption {
type = jsonFormat.type;
default = { };
example = literalExpression ''
{
autosu = false;
cursorline = false;
}
'';
description = ''
Configuration written to
<filename>$XDG_CONFIG_HOME/micro/settings.json</filename>. See
<link xlink:href="https://github.com/zyedidia/micro/blob/master/runtime/help/options.md"/>
for supported values.
'';
};
bindings = mkOption {
type = jsonFormat.type;
default = { };
example = literalExpression ''
{
"Alt-d" = "SpawnMultiCursor";
"Escape" = "RemoveAllMultiCursors";
"CtrlDown" = "None";
"CtrlUp" = "None";
"Shift-PageDown" = "SelectPageDown";
"Shift-PageUp" = "SelectPageUp";
}
'';
description = ''
Configuration written to
<filename>$XDG_CONFIG_HOME/micro/bindings.json</filename>. See
<link xlink:href="https://github.com/zyedidia/micro/blob/master/runtime/help/keybindings.md"/>
for supported values.
'';
};
ensurePlugins = mkOption {
type = types.listOf types.str;
default = [ ];
example = literalExpression ''
[
"aspell"
]
'';
description = ''
Install micro plugins during activation. See
<link xlink:href="https://micro-editor.github.io/plugins.html"/>
for a listing of available plugins.
'';
};
};
};
config = mkIf cfg.enable {
home.packages = [ pkgs.micro ];
home.sessionVariables = mkIf cfg.trueColor { MICRO_TRUECOLOR = "1"; };
xdg.configFile."micro/settings.json".source =
jsonFormat.generate "micro-settings" cfg.settings;
xdg.configFile."micro/bindings.json".source =
jsonFormat.generate "micro-bindings" cfg.bindings;
home.activation = let
mkInstall = pluginName: ''
if ! test -d ${config.xdg.configHome}/micro/plug/${
lib.escapeShellArg pluginName
}; then
(set -x
$DRY_RUN_CMD ${pkgs.micro}/bin/micro -plugin install ${
lib.escapeShellArg pluginName
}
)
fi
'';
installs = lib.concatStringsSep "\n" (map mkInstall cfg.ensurePlugins);
in mkIf (cfg.ensurePlugins != [ ]) {
microPluginSetup = lib.hm.dag.entryAfter [ "writeBoundary" ] installs;
};
};
}