galadriel uncommited changes ai stuff

This commit is contained in:
Adrian Gunnar Lauterer 2024-12-07 14:34:16 +01:00
parent 9fec5b56e8
commit a7277567d4
9 changed files with 37 additions and 39 deletions

6
home/common/unfree.nix Normal file
View File

@ -0,0 +1,6 @@
{ pkgs, lib, config, ... }:
{
imports = [
];
nixpkgs.config.allowUnfree = true;
}

View File

@ -2,6 +2,7 @@
{
imports = [
../common/sshconfig.nix
../common/unfree.nix
];
programs.nix-index = {

View File

@ -2,6 +2,7 @@
{
imports = [
../common/sshconfig.nix
../common/unfree.nix
];
programs.nix-index = {

View File

@ -70,12 +70,11 @@
jftui
sonixd
unpackerr
qbittorrent-nox
python3
python3Packages.torchWithCuda
ollama
];
unstable.qbittorrent-nox
];
# Some programs need SUID wrappers, can be configured further or are
# started in user sessions.
# programs.mtr.enable = true;

View File

@ -9,45 +9,36 @@
# Load nvidia driver for Xorg and Wayland
services.xserver.videoDrivers = ["nvidia"];
#boot with nvidia kernel module
boot.initrd.kernelModules = [ "nvidia" ];
hardware.nvidia = {
# Modesetting is required.
modesetting.enable = true;
# Nvidia power management. Experimental, and can cause sleep/suspend to fail.
#powerManagement.enable = true;
# Fine-grained power management. Turns off GPU when not in use.
# Experimental and only works on modern Nvidia GPUs (Turing or newer).
# Fine-grained power management. Turns off GPU when not in use. Experimental and only works on modern Nvidia GPUs (Turing or newer).
#powerManagement.finegrained = true;
# Use the NVidia open source kernel module (not to be confused with the
# independent third-party "nouveau" open source driver).
# Support is limited to the Turing and later architectures. Full list of
# supported GPUs is at:
# https://github.com/NVIDIA/open-gpu-kernel-modules#compatible-gpus
# Only available from driver 515.43.04+
# Use the NVidia open source kernel module (not to be confused with the independent third-party "nouveau" open source driver).
# Currently alpha-quality/buggy, so false is currently the recommended setting.
open = false;
# Enable the Nvidia settings menu,
# accessible via `nvidia-settings`.
# Enable the Nvidia settings menu, accessible via `nvidia-settings`.
#nvidiaSettings = true;
# Optionally, you may need to select the appropriate driver version for your specific GPU.
package = config.boot.kernelPackages.nvidiaPackages.stable;
};
# Enable the CUDA toolkit
#install packages
environment.systemPackages = with pkgs; [
#cudaPackages.cudnn
#cudaPackages.cudatoolkit
cudaPackages.cudnn
cudaPackages.cudatoolkit
nvtopPackages.nvidia
#cudaPackages.tensorrt_8_6_0 #needs to be added manually, to the store and is a pain because of the license agreement and garbage collection
];
nixpkgs.config.allowUnfreePredicate = pkg: builtins.elem (lib.getName pkg) [
"cuda_cudart"
];
}
}

View File

@ -5,8 +5,8 @@
./base.nix
../services/podman.nix
../services/ollama.nix
#../services/ollama-webui.nix
#../services/whisper.nix
# ../services/ollama-webui.nix
# ../services/whisper.nix
];
environment.systemPackages = with pkgs.unstable; [
@ -26,25 +26,25 @@
python3Packages.faster-whisper
python3Packages.scipy
# python3Packages.numba-scipy
#python3Packages.scikit-image
# python3Packages.scikit-image
# python3Packages.traittypes
# python3Packages.statsmodels
python3Packages.scikits-odes
# python3Packages.scikits-odes
python3Packages.sympy
python3Packages.numpy
python3Packages.pandas
python3Packages.matplotlib
#python3Packages.tensorflow
#python3Packages.tensorboard
#python3Packages.keras
python3Packages.matplotlib
# python3Packages.tensorflow
# python3Packages.tensorboard
# python3Packages.keras
python3Packages.transformers
python3Packages.torch
#python3Packages.torchvision-bin
#python3Packages.torchsde
#python3Packages.torchaudio-bin
#python3Packages.torchWithRocm
#python3Packages.torchWithCuda
#python3Packages.scikit-learn-extra
# python3Packages.torchvision-bin
# python3Packages.torchsde
# python3Packages.torchaudio-bin
# python3Packages.torchWithRocm
# python3Packages.torchWithCuda
# python3Packages.scikit-learn-extra
python3Packages.langchain
python3Packages.langchain-community
python3Packages.langchain-core

View File

@ -27,7 +27,7 @@ imports =
#just allow unfree, im fine with it.
nixpkgs.config.allowUnfree = true;
zramSwap = {
enable = true;
memoryPercent = 25;

View File

@ -1,10 +1,11 @@
{ config, pkgs, lib, ... }:
{
environment.systemPackages = [
pkgs.ollama
pkgs.unstable.ollama
];
services.ollama.enable = true;
services.ollama.package = pkgs.unstable.ollama;
services.ollama.listenAddress = "0.0.0.0:11434";
services.ollama.models = "/var/lib/ollama/models";
services.ollama.home = "/var/lib/ollama";
@ -14,7 +15,6 @@
hostname = config.networking.hostName;
in
if hostname == "galadriel" then "cuda"
else if hostname == "boromir" then "cuda"
else if hostname == "aragorn" then "rocm"
else null);

View File

@ -118,7 +118,7 @@ in
serviceConfig = {
ExecStartPre = "${pkgs.bash}/bin/bash -c '${pkgs.coreutils}/bin/mkdir -p ${path} && ${pkgs.coreutils}/bin/chmod -R 755 ${path} && ${pkgs.coreutils}/bin/cp ${config.sops.templates."qbittorrent/configuration".path} ${path}/.config/qBittorrent/qBittorrent.conf'";
ExecStart = "${pkgs.qbittorrent-nox}/bin/qbittorrent-nox";
ExecStart = "${pkgs.unstable.qbittorrent-nox}/bin/qbittorrent-nox";
User = "qbittorrent";
Group = "media";
Restart = "on-failure";