This commit is contained in:
Denis Manherz 2024-01-11 18:13:38 +01:00
parent 7b91a42165
commit a227d57d28
10 changed files with 389 additions and 21 deletions

View file

@ -32,6 +32,22 @@
"type": "github"
}
},
"comfyui-src": {
"flake": false,
"locked": {
"lastModified": 1702736172,
"narHash": "sha256-BYZHfnhdubasOAhUyq/iW4HvYKPiqBwiXY3ozZXU1Oc=",
"owner": "comfyanonymous",
"repo": "ComfyUI",
"rev": "6453dc1ca2d98d89af7cf312bb48d1e3fd2ca27f",
"type": "github"
},
"original": {
"owner": "comfyanonymous",
"repo": "ComfyUI",
"type": "github"
}
},
"flake-parts": {
"inputs": {
"nixpkgs-lib": [
@ -251,6 +267,7 @@
"inputs": {
"a1111-src": "a1111-src",
"bark-gui-src": "bark-gui-src",
"comfyui-src": "comfyui-src",
"flake-parts": "flake-parts",
"hercules-ci-effects": "hercules-ci-effects",
"invokeai-src": "invokeai-src",

View file

@ -7,6 +7,10 @@
description = "A Nix Flake that makes AI reproducible and easy to run";
inputs = {
comfyui-src = {
url = github:comfyanonymous/ComfyUI;
flake = false;
};
nixpkgs-stable = {
url = github:NixOS/nixpkgs/nixos-23.05;
};
@ -52,17 +56,38 @@
};
outputs = { flake-parts, invokeai-src, hercules-ci-effects, ... }@inputs:
flake-parts.lib.mkFlake { inherit inputs; } {
perSystem = { system, ... }:{
_module.args.pkgs = import inputs.nixpkgs { config.allowUnfree = true; inherit system; cudaSupport = true; };
perSystem = { system, ... }: {
# _module.args.pkgs = import inputs.nixpkgs { config.allowUnfree = true; inherit system; config.cudaSupport = true; };
_module.args.pkgs = import inputs.nixpkgs {
inherit system;
/*overlays = [
(
final: prev: {
final.python310 = prev.python310.override {
enableOptimizations = true;
reproducibleBuild = false;
self = final.python310;
buildInputs = [ final.ffmpeg-full ];
};
pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [
(
python-final: python-prev: {
torch = python-prev.torch-bin;
}
)
];
}
)
];*/
config = { allowUnfree = true; cudaSupport = true; };
};
legacyPackages = {
koboldai = builtins.throw ''
koboldai has been dropped from nixified.ai due to lack of upstream development,
try textgen instead which is better maintained. If you would like to use the last
available version of koboldai with nixified.ai, then run:
koboldai has been dropped from nixified.ai due to lack of upstream development,
try textgen instead which is better maintained. If you would like to use the last
available version of koboldai with nixified.ai, then run:
nix run github:nixified.ai/flake/0c58f8cba3fb42c54f2a7bf9bd45ee4cbc9f2477#koboldai
nix run github:nixified.ai/flake/0c58f8cba3fb42c54f2a7bf9bd45ee4cbc9f2477#koboldai
'';
};
};
@ -72,8 +97,9 @@
debug = true;
imports = [
hercules-ci-effects.flakeModule
# ./modules/nixpkgs-config
# ./modules/nixpkgs-config
./overlays
./projects/comfyui
./projects/automatic1111
./projects/invokeai
./projects/textgen

View file

@ -23,8 +23,53 @@
, transformers
, timm
#, flash-attn
, cudaPackages
, stable-pkgs
}:
let
inherit (cudaPackages) cudaFlags cudnn;
# Some packages are not available on all platforms
nccl = cudaPackages.nccl or null;
setBool = v: if v then "1" else "0";
# https://github.com/pytorch/pytorch/blob/v2.0.1/torch/utils/cpp_extension.py#L1744
supportedTorchCudaCapabilities =
let
real = ["3.5" "3.7" "5.0" "5.2" "5.3" "6.0" "6.1" "6.2" "7.0" "7.2" "7.5" "8.0" "8.6" "8.7" "8.9" "9.0"];
ptx = lists.map (x: "${x}+PTX") real;
in
real ++ ptx;
# NOTE: The lists.subtractLists function is perhaps a bit unintuitive. It subtracts the elements
# of the first list *from* the second list. That means:
# lists.subtractLists a b = b - a
# For CUDA
supportedCudaCapabilities = lists.intersectLists cudaFlags.cudaCapabilities supportedTorchCudaCapabilities;
unsupportedCudaCapabilities = lists.subtractLists supportedCudaCapabilities cudaFlags.cudaCapabilities;
# Use trivial.warnIf to print a warning if any unsupported GPU targets are specified.
gpuArchWarner = supported: unsupported:
trivial.throwIf (supported == [ ])
(
"No supported GPU targets specified. Requested GPU targets: "
+ strings.concatStringsSep ", " unsupported
)
supported;
# Create the gpuTargetString.
gpuTargetString = strings.concatStringsSep ";" (
if gpuTargets != [ ] then
# If gpuTargets is specified, it always takes priority.
gpuTargets
else if rocmSupport then
rocmPackages.clr.gpuTargets
else
gpuArchWarner supportedCudaCapabilities unsupportedCudaCapabilities
);
version = "0.0.22.post7";
in
buildPythonPackage {
@ -42,6 +87,16 @@ buildPythonPackage {
fetchSubmodules = true;
};
preConfigure = ''
export TORCH_CUDA_ARCH_LIST="${gpuTargetString}"
export CUDNN_INCLUDE_DIR=${cudnn.dev}/include
export CUDNN_LIB_DIR=${cudnn.lib}/lib
export CUPTI_INCLUDE_DIR=${cudaPackages.cuda_cupti.dev}/include
export CUPTI_LIBRARY_DIR=${cudaPackages.cuda_cupti.lib}/lib
export CUDA_PATH=${stable-packages.cudatoolkit}
export EXTRA_LD_FLAGS="-L${stable-pkgs.linuxPackages.nvidia_x11_production}/lib"
'';
preBuild = ''
cat << EOF > ./xformers/version.py
# noqa: C801
@ -51,13 +106,43 @@ buildPythonPackage {
nativeBuildInputs = [
which
];
] ++ (with cudaPackages; [
autoAddOpenGLRunpathHook
cuda_nvcc
]);
propagatedBuildInputs = [
numpy
torch
];
buildInputs = with cudaPackages; [
cuda_cccl.dev # <thrust/*>
cuda_cudart # cuda_runtime.h and libraries
cuda_cupti.dev # For kineto
cuda_cupti.lib # For kineto
cuda_nvcc.dev # crt/host_config.h; even though we include this in nativeBuildinputs, it's needed here too
cuda_nvml_dev.dev # <nvml.h>
cuda_nvrtc.dev
cuda_nvrtc.lib
cuda_nvtx.dev
cuda_nvtx.lib # -llibNVToolsExt
cudnn.dev
cudnn.lib
libcublas.dev
libcublas.lib
libcufft.dev
libcufft.lib
libcurand.dev
libcurand.lib
libcusolver.dev
libcusolver.lib
libcusparse.dev
libcusparse.lib
effectiveMagma
numactl
] ++ [stable-pkgs.linuxPackages.nvidia_x11_production gcc stable-packages.cudatoolkit];
pythonImportsCheck = [ "xformers" ];
dontUseCmakeConfigure = true;
@ -89,4 +174,4 @@ buildPythonPackage {
license = licenses.bsd3;
maintainers = with maintainers; [ happysalada ];
};
}
}

View file

@ -13,11 +13,11 @@ in
#cant i do like only for this for invoke other version?
(
final: prev: {
pillow = pkgs.python3.pkgs.callPackage ../../packages/pillow { };
pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [
(
python-final: python-prev: {
pillow = python-final.callPackage ../../packages/pillow { };
xformers = python-final.callPackage ../../packages/xformers { inherit stable-pkgs; };
}
)
];
@ -48,7 +48,6 @@ in
#../../packages/torch-fidelity
#../../packages/torch-grammar
../../packages/ultralytics
../../packages/xformers
../../packages/zipunicode
])
(final: prev: lib.mapAttrs

View file

@ -38,8 +38,7 @@ python3Packages.buildPythonPackage {
taming-transformers-rom1504
timm
tomesd
#torchWithCuda
#torch-bin
torchWithCuda
transformers
xformers
@ -69,6 +68,8 @@ python3Packages.buildPythonPackage {
patches = [ ./_outputpaths.patch ];
nativeBuildInputs = [ pkgs.cudatoolkit ];
buildPhase =
''
runHook preBuild
@ -96,12 +97,12 @@ python3Packages.buildPythonPackage {
chmod +x launch.py
makeWrapper "$out/launch.py" $out/bin/launch-wrapped.py \
--run 'export COMMANDLINE_ARGS="''${COMMANDLINE_ARGS:-\
--data-dir $HOME/webui --skip-install \
--data-dir $HOME/webui --skip-install --xformers \
--theme dark --ckpt-dir $HOME/webui/models/ckpt \
--embeddings-dir $HOME/webui/models/embeddings \
--medvram --no-half-vae}"' \
--set-default PYTHONPATH $PYTHONPATH \
--chdir $out
--chdir $out --set-default CUDA_PATH ${pkgs.cudatoolkit}
rm -rf dist

View file

@ -20,15 +20,13 @@ python3Packages.buildPythonPackage {
pytorch-seed
safetensors
scipy
torch-bin
torchaudio-bin
torch
torchaudio
transformers
vector-quantize-pytorch
];
#nativeBuildInputs = with python3Packages; [ pythonRelaxDepsHook pip ];
nativeBuildInputs = with python3Packages; [ setuptools pip ];
#pythonRemoveDeps = [ "clip" "pyreadline3" "flaskwebgui" "opencv-python" ];
pythonRelaxDeps = [ "dnspython" "flask" "requests" "numpy" "pytorch-lightning" "torchsde" "uvicorn" "invisible-watermark" "accelerate" "scikit-image" "safetensors" "torchvision" "test-tube" "fastapi" ];
makeWrapperArgs = [
'' --set-default PYTHONPATH=$PYTHONPATH ''

View file

@ -0,0 +1,40 @@
{ config, inputs, lib, withSystem, ... }:
{
perSystem = { config, pkgs, system, ... }:
let
src = inputs.comfyui-src;
stable-pkgs = import inputs.nixpkgs-stable {
allowUnfree = true;
cudaSupport = true;
inherit system;
};
mkComfyUIVariant = args: pkgs.python310Packages.callPackage ./package.nix ({
inherit src;
} // args);
in
{
packages = {
comfyui-nvidia = mkComfyUIVariant { };
};
};
#flake.nixosModules =
/* let
packageModule = pkgAttrName: { pkgs, ... }: {
services.comfyui.package = withSystem pkgs.system (
{ config, ... }: lib.mkOptionDefault config.packages.${pkgAttrName}
);
};
in
{
comfyui = ./nixos;
comfyui-nvidia = {
imports = [
config.flake.nixosModules.a1111
(packageModule "comfyui-nvidia")
];
};
};*/
}

View file

@ -0,0 +1,131 @@
{ config, lib, ... }:
let
inherit (lib)
mkIf mkOption mkEnableOption mkRenamedOptionModule types
escapeShellArgs flatten getExe mapAttrsToList
isBool isFloat isInt isList isString
floatToString optionalString
;
cfg = config.services.comfyui;
in
{
imports = map ({ old, new ? old }: mkRenamedOptionModule [ "services" "comfyui" old ] [ "services" "comfyui" "settings" new ]) [
{ old = "host"; }
{ old = "port"; }
{ old = "dataDir"; new = "root"; }
{ old = "precision"; }
];
options.services.a1111 = {
enable = mkEnableOption "Automatic1111 UI for Stable Diffusion";
package = mkOption {
description = "Which Automatic1111 package to use.";
type = types.package;
};
user = mkOption {
description = "Which user to run A1111 as.";
default = "a1111";
type = types.str;
};
group = mkOption {
description = "Which group to run A1111 as.";
default = "a1111";
type = types.str;
};
settings = mkOption {
description = "Structured command line arguments.";
default = { };
type = types.submodule {
freeformType = with types; let
atom = nullOr (oneOf [
bool
str
int
float
]);
in attrsOf (either atom (listOf atom));
options = {
#listen = mkOption {
# description = "Launch gradio with 0.0.0.0 as server name, allowing to respond to network requests.";
# default = false;
# type = types.bool;
#};
port = mkOption {
description = "Launch gradio with given server port, you need root/admin rights for ports < 1024; defaults to 7860 if available.";
default = 7860;
type = types.port;
};
data-dir = mkOption {
description = "Where to store A1111's state.";
default = "/var/lib/a1111";
type = types.path;
};
ckpt-dir = mkOption {
description = "Path to A1111's SD models.";
default = "/var/lib/models/ckpt";
type = types.path;
};
};
};
};
extraArgs = mkOption {
description = "Additional raw command line arguments.";
default = [];
type = with types; listOf str;
};
};
config = let
cliArgs = (flatten (mapAttrsToList (n: v:
if v == null then []
#else if isBool v then [ "--${optionalString (!v) "no-"}${n}" ]
else if isInt v then [ "--${n}" "${toString v}" ]
else if isFloat v then [ "--${n}" "${floatToString v}" ]
else if isString v then ["--${n}" v ]
else if isList v then [ "--${n}" (toString v) ]
else throw "Unhandled type for setting \"${n}\""
) cfg.settings)) ++ cfg.extraArgs;
in mkIf cfg.enable {
users.users = mkIf (cfg.user == "a1111") {
a1111 = {
isSystemUser = true;
inherit (cfg) group;
};
};
users.groups = mkIf (cfg.group == "a1111") {
a1111 = {};
};
systemd.services.a1111 = {
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
environment = {
HOME = "${cfg.settings.data-dir}/.home";
COMMANDLINE_ARGS = escapeShellArgs cliArgs;
NIXIFIED_AI_NONINTERACTIVE = "1";
};
serviceConfig = {
User = cfg.user;
Group = cfg.group;
ExecStart = "${getExe cfg.package}";
PrivateTmp = true;
};
};
systemd.tmpfiles.rules = [
"d '${cfg.settings.data-dir}/' 0755 ${cfg.user} ${cfg.group} - -"
"d '${cfg.settings.data-dir}/configs' 0755 ${cfg.user} ${cfg.group} - -"
"d '${cfg.settings.data-dir}/.home' 0750 ${cfg.user} ${cfg.group} - -"
];
};
}

View file

@ -0,0 +1,70 @@
{ src
, buildPythonPackage
, torchvision-bin
, torch-bin
, safetensors
, psutil
, einops
, transformers
, scipy
, torchsde
, pillow
, torch
, torchvision
, accelerate
}:
buildPythonPackage {
pname = "ComfyUI";
format = "other";
version = "latest";
inherit src;
propagatedBuildInputs = [
accelerate
torchvision
torch
safetensors
psutil
einops
transformers
scipy
pillow
torchsde
];
buildPhase =
''
runHook preBuild
mkdir -p dist
cp -R . $out
chmod -R +w $out
cd $out
#make main.py executable > shebang
mkdir -p $out/bin
cat <<-EOF > main.py
$(echo "#!/usr/bin/python")
$(cat main.py)
EOF
chmod +x main.py
makeWrapper "$out/main.py" $out/bin/main-wrapped.py \
--set-default PYTHONPATH $PYTHONPATH \
rm -rf dist
runHook postBuild
'';
meta = {
description = "The most powerful and modular stable diffusion GUI and backend.";
homepage = "https://github.com/comfyanonymous/ComfyUI.git";
mainProgram = "main-wrapped.py";
};
#Tiled VAE supported without additional dependencies
#Infinit image browser couple of deps
#civit-ai browser + couple of deps
#animatediff --> needs deforum for frame interpolation
#deforum
#controlnet
}

View file

@ -2,6 +2,7 @@
# misc
, lib
, src
, pkgs
# extra deps
}: