diff --git a/flake.lock b/flake.lock index a596135..fadc031 100644 --- a/flake.lock +++ b/flake.lock @@ -32,6 +32,22 @@ "type": "github" } }, + "comfyui-src": { + "flake": false, + "locked": { + "lastModified": 1702736172, + "narHash": "sha256-BYZHfnhdubasOAhUyq/iW4HvYKPiqBwiXY3ozZXU1Oc=", + "owner": "comfyanonymous", + "repo": "ComfyUI", + "rev": "6453dc1ca2d98d89af7cf312bb48d1e3fd2ca27f", + "type": "github" + }, + "original": { + "owner": "comfyanonymous", + "repo": "ComfyUI", + "type": "github" + } + }, "flake-parts": { "inputs": { "nixpkgs-lib": [ @@ -251,6 +267,7 @@ "inputs": { "a1111-src": "a1111-src", "bark-gui-src": "bark-gui-src", + "comfyui-src": "comfyui-src", "flake-parts": "flake-parts", "hercules-ci-effects": "hercules-ci-effects", "invokeai-src": "invokeai-src", diff --git a/flake.nix b/flake.nix index 2c738c1..e5adfce 100644 --- a/flake.nix +++ b/flake.nix @@ -7,6 +7,10 @@ description = "A Nix Flake that makes AI reproducible and easy to run"; inputs = { + comfyui-src = { + url = github:comfyanonymous/ComfyUI; + flake = false; + }; nixpkgs-stable = { url = github:NixOS/nixpkgs/nixos-23.05; }; @@ -52,17 +56,38 @@ }; outputs = { flake-parts, invokeai-src, hercules-ci-effects, ... }@inputs: flake-parts.lib.mkFlake { inherit inputs; } { - perSystem = { system, ... }:{ - _module.args.pkgs = import inputs.nixpkgs { config.allowUnfree = true; inherit system; }; + perSystem = { system, ... }: { + # _module.args.pkgs = import inputs.nixpkgs { config.allowUnfree = true; inherit system; config.cudaSupport = true; }; + _module.args.pkgs = import inputs.nixpkgs { + inherit system; + /*overlays = [ + ( + final: prev: { + final.python310 = prev.python310.override { + enableOptimizations = true; + reproducibleBuild = false; + self = final.python310; + buildInputs = [ final.ffmpeg-full ]; + }; + pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [ + ( + python-final: python-prev: { + torch = python-prev.torch-bin; + } + ) + ]; + } + ) + ];*/ + config = { allowUnfree = true; cudaSupport = true; }; + }; legacyPackages = { koboldai = builtins.throw '' + koboldai has been dropped from nixified.ai due to lack of upstream development, + try textgen instead which is better maintained. If you would like to use the last + available version of koboldai with nixified.ai, then run: - - koboldai has been dropped from nixified.ai due to lack of upstream development, - try textgen instead which is better maintained. If you would like to use the last - available version of koboldai with nixified.ai, then run: - - nix run github:nixified.ai/flake/0c58f8cba3fb42c54f2a7bf9bd45ee4cbc9f2477#koboldai + nix run github:nixified.ai/flake/0c58f8cba3fb42c54f2a7bf9bd45ee4cbc9f2477#koboldai ''; }; }; @@ -72,8 +97,9 @@ debug = true; imports = [ hercules-ci-effects.flakeModule -# ./modules/nixpkgs-config + # ./modules/nixpkgs-config ./overlays + ./projects/comfyui ./projects/automatic1111 ./projects/invokeai ./projects/textgen diff --git a/packages/pyfunctional/default.nix b/packages/pyfunctional/default.nix new file mode 100644 index 0000000..7a95608 --- /dev/null +++ b/packages/pyfunctional/default.nix @@ -0,0 +1,24 @@ + +{ buildPythonPackage, lib, fetchPypi +}: + +buildPythonPackage rec { + pname = "PyFunctional"; + version = "1.4.3"; + + src = fetchPypi { + inherit pname version; + sha256 = "sha256-EcMT/iUbJpxlBmiRNUVqBbxab6EpydArRF84PU9BHhA="; + }; + + propagatedBuildInputs = [ + ]; + + # TODO FIXME + doCheck = false; + + meta = with lib; { + description = "PyFunctional makes creating data pipelines easy by using chained functional operators."; + homepage = "https://github.com/EntilZha/PyFunctional"; + }; +} diff --git a/packages/xformers/default.nix b/packages/xformers/default.nix index f0a2eae..1f251d0 100644 --- a/packages/xformers/default.nix +++ b/packages/xformers/default.nix @@ -1,23 +1,177 @@ -# WARNING: This file was automatically generated. You should avoid editing it. -# If you run pynixify again, the file will be either overwritten or -# deleted, and you will lose the changes you made to it. +{ lib +, buildPythonPackage +, pythonOlder +, fetchFromGitHub +, which +# runtime dependencies +, numpy +, torch +# check dependencies +, pytestCheckHook +, pytest-cov +# , pytest-mpi +, pytest-timeout +# , pytorch-image-models +, hydra-core +, fairscale +, scipy +, cmake +, openai-triton +, networkx +#, apex +, einops +, transformers +, timm +#, flash-attn +, cudaPackages +, stable-pkgs +}: +let + inherit (cudaPackages) cudaFlags cudnn; -{ buildPythonPackage, fetchPypi, lib, torch, numpy, pyre-extensions, pythonRelaxDepsHook, which }: + # Some packages are not available on all platforms + nccl = cudaPackages.nccl or null; -buildPythonPackage rec { + setBool = v: if v then "1" else "0"; + + # https://github.com/pytorch/pytorch/blob/v2.0.1/torch/utils/cpp_extension.py#L1744 + supportedTorchCudaCapabilities = + let + real = ["3.5" "3.7" "5.0" "5.2" "5.3" "6.0" "6.1" "6.2" "7.0" "7.2" "7.5" "8.0" "8.6" "8.7" "8.9" "9.0"]; + ptx = lists.map (x: "${x}+PTX") real; + in + real ++ ptx; + + # NOTE: The lists.subtractLists function is perhaps a bit unintuitive. It subtracts the elements + # of the first list *from* the second list. That means: + # lists.subtractLists a b = b - a + + # For CUDA + supportedCudaCapabilities = lists.intersectLists cudaFlags.cudaCapabilities supportedTorchCudaCapabilities; + unsupportedCudaCapabilities = lists.subtractLists supportedCudaCapabilities cudaFlags.cudaCapabilities; + + # Use trivial.warnIf to print a warning if any unsupported GPU targets are specified. + gpuArchWarner = supported: unsupported: + trivial.throwIf (supported == [ ]) + ( + "No supported GPU targets specified. Requested GPU targets: " + + strings.concatStringsSep ", " unsupported + ) + supported; + + # Create the gpuTargetString. + gpuTargetString = strings.concatStringsSep ";" ( + if gpuTargets != [ ] then + # If gpuTargets is specified, it always takes priority. + gpuTargets + else if rocmSupport then + rocmPackages.clr.gpuTargets + else + gpuArchWarner supportedCudaCapabilities unsupportedCudaCapabilities + ); + + version = "0.0.22.post7"; +in +buildPythonPackage { pname = "xformers"; - version = "0.0.16"; + inherit version; + format = "setuptools"; - src = fetchPypi { - inherit pname version; - sha256 = "sha256-ksfwVWpo9EhkkmkbP1ZxQO4ZK1Y9kEGFtmabH4u4rlM="; + disabled = pythonOlder "3.7"; + + src = fetchFromGitHub { + owner = "facebookresearch"; + repo = "xformers"; + rev = "refs/tags/v${version}"; + hash = "sha256-7lZi3+2dVDZJFYCUlxsyDU8t9qdnl+b2ERRXKA6Zp7U="; + fetchSubmodules = true; }; - nativeBuildInputs = [ pythonRelaxDepsHook which ]; - pythonRelaxDeps = [ "pyre-extensions" ]; - propagatedBuildInputs = [ torch numpy pyre-extensions /*triton*/ ]; - # TODO FIXME + preConfigure = '' + export TORCH_CUDA_ARCH_LIST="${gpuTargetString}" + export CUDNN_INCLUDE_DIR=${cudnn.dev}/include + export CUDNN_LIB_DIR=${cudnn.lib}/lib + export CUPTI_INCLUDE_DIR=${cudaPackages.cuda_cupti.dev}/include + export CUPTI_LIBRARY_DIR=${cudaPackages.cuda_cupti.lib}/lib + export CUDA_PATH=${stable-packages.cudatoolkit} + export EXTRA_LD_FLAGS="-L${stable-pkgs.linuxPackages.nvidia_x11_production}/lib" + ''; + + preBuild = '' + cat << EOF > ./xformers/version.py + # noqa: C801 + __version__ = "${version}" + EOF + ''; + + nativeBuildInputs = [ + which + ] ++ (with cudaPackages; [ + autoAddOpenGLRunpathHook + cuda_nvcc + ]); + + propagatedBuildInputs = [ + numpy + torch + ]; + + buildInputs = with cudaPackages; [ + cuda_cccl.dev # + cuda_cudart # cuda_runtime.h and libraries + cuda_cupti.dev # For kineto + cuda_cupti.lib # For kineto + cuda_nvcc.dev # crt/host_config.h; even though we include this in nativeBuildinputs, it's needed here too + cuda_nvml_dev.dev # + cuda_nvrtc.dev + cuda_nvrtc.lib + cuda_nvtx.dev + cuda_nvtx.lib # -llibNVToolsExt + cudnn.dev + cudnn.lib + libcublas.dev + libcublas.lib + libcufft.dev + libcufft.lib + libcurand.dev + libcurand.lib + libcusolver.dev + libcusolver.lib + libcusparse.dev + libcusparse.lib + effectiveMagma + numactl + ] ++ [stable-pkgs.linuxPackages.nvidia_x11_production gcc stable-packages.cudatoolkit]; + + pythonImportsCheck = [ "xformers" ]; + + dontUseCmakeConfigure = true; + + # see commented out missing packages doCheck = false; - meta = with lib; { }; -} + nativeCheckInputs = [ + pytestCheckHook + pytest-cov + pytest-timeout + hydra-core + fairscale + scipy + cmake + networkx + openai-triton + # apex + einops + transformers + timm + # flash-attn + ]; + + meta = with lib; { + description = "XFormers: A collection of composable Transformer building blocks"; + homepage = "https://github.com/facebookresearch/xformers"; + changelog = "https://github.com/facebookresearch/xformers/blob/${version}/CHANGELOG.md"; + license = licenses.bsd3; + maintainers = with maintainers; [ happysalada ]; + }; +} \ No newline at end of file diff --git a/projects/automatic1111/default.nix b/projects/automatic1111/default.nix index 28d2f67..df9d281 100644 --- a/projects/automatic1111/default.nix +++ b/projects/automatic1111/default.nix @@ -13,11 +13,11 @@ in #cant i do like only for this for invoke other version? ( final: prev: { - pillow = pkgs.python3.pkgs.callPackage ../../packages/pillow { }; pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [ ( python-final: python-prev: { pillow = python-final.callPackage ../../packages/pillow { }; + xformers = python-final.callPackage ../../packages/xformers { inherit stable-pkgs; }; } ) ]; @@ -30,21 +30,25 @@ in ../../packages/blendmodes ../../packages/blip ../../packages/codeformer + ../../packages/deforum ../../packages/facexlib ../../packages/gfpgan ../../packages/gradio ../../packages/gradio-client ../../packages/k_diffusion ../../packages/lpips + ../../packages/mediapipe ../../packages/openclip ../../packages/pillow + ../../packages/pyfunctional ../../packages/pytorch-lightning ../../packages/realesrgan ../../packages/taming-transformers-rom1504 ../../packages/tomesd - ../../packages/torch-fidelity - ../../packages/torch-grammar - ../../packages/xformers + #../../packages/torch-fidelity + #../../packages/torch-grammar + ../../packages/ultralytics + ../../packages/zipunicode ]) (final: prev: lib.mapAttrs (_: pkg: pkg.overrideAttrs (old: { @@ -69,8 +73,16 @@ in ]); }; + stable-pkgs = import inputs.nixpkgs-stable { + allowUnfree = true; + cudaSupport = true; + inherit system; + }; + src = inputs.a1111-src; - mkAutomatic1111Variant = args: pkgs.callPackage ./package.nix ({ inherit src; sd-src = inputs.sd-src; sgm-src = inputs.sgm-src; } // args); + mkAutomatic1111Variant = args: pkgs.callPackage ./package.nix ({ + inherit src; sd-src = inputs.sd-src; sgm-src = inputs.sgm-src; inherit stable-pkgs pkgs; + } // args); in { packages = { @@ -90,9 +102,9 @@ in in { a1111 = ./nixos; - invokeai-nvidia = { + a1111-nvidia = { imports = [ - config.flake.nixosModules.invokeai + config.flake.nixosModules.a1111 (packageModule "a1111-nvidia") ]; }; diff --git a/projects/automatic1111/nixos/default.nix b/projects/automatic1111/nixos/default.nix index 716f371..f28afc5 100644 --- a/projects/automatic1111/nixos/default.nix +++ b/projects/automatic1111/nixos/default.nix @@ -8,33 +8,33 @@ let floatToString optionalString ; - cfg = config.services.invokeai; + cfg = config.services.a1111; in { - imports = map ({ old, new ? old }: mkRenamedOptionModule [ "services" "invokeai" old ] [ "services" "invokeai" "settings" new ]) [ + imports = map ({ old, new ? old }: mkRenamedOptionModule [ "services" "a1111" old ] [ "services" "a1111" "settings" new ]) [ { old = "host"; } { old = "port"; } { old = "dataDir"; new = "root"; } { old = "precision"; } ]; - options.services.invokeai = { - enable = mkEnableOption "InvokeAI Web UI for Stable Diffusion"; + options.services.a1111 = { + enable = mkEnableOption "Automatic1111 UI for Stable Diffusion"; package = mkOption { - description = "Which InvokeAI package to use."; + description = "Which Automatic1111 package to use."; type = types.package; }; user = mkOption { - description = "Which user to run InvokeAI as."; - default = "invokeai"; + description = "Which user to run A1111 as."; + default = "a1111"; type = types.str; }; group = mkOption { - description = "Which group to run InvokeAI as."; - default = "invokeai"; + description = "Which group to run A1111 as."; + default = "a1111"; type = types.str; }; @@ -51,28 +51,28 @@ in ]); in attrsOf (either atom (listOf atom)); options = { - host = mkOption { - description = "Which IP address to listen on."; - default = "127.0.0.1"; - type = types.str; - }; + #listen = mkOption { + # description = "Launch gradio with 0.0.0.0 as server name, allowing to respond to network requests."; + # default = false; + # type = types.bool; + #}; port = mkOption { - description = "Which port to listen on."; - default = 9090; + description = "Launch gradio with given server port, you need root/admin rights for ports < 1024; defaults to 7860 if available."; + default = 7860; type = types.port; }; - root = mkOption { - description = "Where to store InvokeAI's state."; - default = "/var/lib/invokeai"; + data-dir = mkOption { + description = "Where to store A1111's state."; + default = "/var/lib/a1111"; type = types.path; }; - precision = mkOption { - description = "Set model precision."; - default = "auto"; - type = types.enum [ "auto" "float32" "autocast" "float16" ]; + ckpt-dir = mkOption { + description = "Path to A1111's SD models."; + default = "/var/lib/models/ckpt"; + type = types.path; }; }; }; @@ -88,8 +88,8 @@ in config = let cliArgs = (flatten (mapAttrsToList (n: v: - if v == null then [] - else if isBool v then [ "--${optionalString (!v) "no-"}${n}" ] + if v == null then [] + #else if isBool v then [ "--${optionalString (!v) "no-"}${n}" ] else if isInt v then [ "--${n}" "${toString v}" ] else if isFloat v then [ "--${n}" "${floatToString v}" ] else if isString v then ["--${n}" v ] @@ -98,34 +98,34 @@ in ) cfg.settings)) ++ cfg.extraArgs; in mkIf cfg.enable { - users.users = mkIf (cfg.user == "invokeai") { - invokeai = { + users.users = mkIf (cfg.user == "a1111") { + a1111 = { isSystemUser = true; inherit (cfg) group; }; }; - users.groups = mkIf (cfg.group == "invokeai") { - invokeai = {}; + users.groups = mkIf (cfg.group == "a1111") { + a1111 = {}; }; - systemd.services.invokeai = { + systemd.services.a1111 = { after = [ "network.target" ]; wantedBy = [ "multi-user.target" ]; environment = { - HOME = "${cfg.settings.root}/.home"; - INVOKEAI_ROOT = "${cfg.settings.root}"; + HOME = "${cfg.settings.data-dir}/.home"; + COMMANDLINE_ARGS = escapeShellArgs cliArgs; NIXIFIED_AI_NONINTERACTIVE = "1"; }; serviceConfig = { User = cfg.user; Group = cfg.group; - ExecStart = "${getExe cfg.package} ${escapeShellArgs cliArgs}"; + ExecStart = "${getExe cfg.package}"; PrivateTmp = true; }; }; systemd.tmpfiles.rules = [ - "d '${cfg.settings.root}' 0755 ${cfg.user} ${cfg.group} - -" - "d '${cfg.settings.root}/configs' 0755 ${cfg.user} ${cfg.group} - -" - "d '${cfg.settings.root}/.home' 0750 ${cfg.user} ${cfg.group} - -" + "d '${cfg.settings.data-dir}/' 0755 ${cfg.user} ${cfg.group} - -" + "d '${cfg.settings.data-dir}/configs' 0755 ${cfg.user} ${cfg.group} - -" + "d '${cfg.settings.data-dir}/.home' 0750 ${cfg.user} ${cfg.group} - -" ]; }; } diff --git a/projects/automatic1111/package.nix b/projects/automatic1111/package.nix index faa8fac..0ed4bd9 100644 --- a/projects/automatic1111/package.nix +++ b/projects/automatic1111/package.nix @@ -4,6 +4,8 @@ , # misc lib , src +, stable-pkgs +, pkgs # extra deps }: python3Packages.buildPythonPackage { @@ -36,13 +38,38 @@ python3Packages.buildPythonPackage { taming-transformers-rom1504 timm tomesd - torch + torchWithCuda transformers xformers - ]; + + #For Extensions -- dont know if e.g you dont install image browser then maybe lack of dep for civitai browser + pyfunctional #infinite image browser + dill #infinite image browser + python-dotenv #infinite image browser + fastapi #infinite image browser + uvicorn #infinite image browser + tabulate #infinite image browser + #infinite image browser sends dleted images to nirvana + + send2trash #civitai browser+ + zipunicode #civitai browser+ + fake-useragent #civitai browser+ + + rich #adetailer + ultralytics #adetailer + py-cpuinfo #adetailer + mediapipe #adeteailer + + av #animatediff to create webm and other fileformats + + numexpr #deforum + deforum #deforum + ]; patches = [ ./_outputpaths.patch ]; + nativeBuildInputs = [ pkgs.cudatoolkit ]; + buildPhase = '' runHook preBuild @@ -70,12 +97,12 @@ python3Packages.buildPythonPackage { chmod +x launch.py makeWrapper "$out/launch.py" $out/bin/launch-wrapped.py \ --run 'export COMMANDLINE_ARGS="''${COMMANDLINE_ARGS:-\ - --data-dir $HOME/webui --skip-install \ + --data-dir $HOME/webui --skip-install --xformers \ --theme dark --ckpt-dir $HOME/webui/models/ckpt \ --embeddings-dir $HOME/webui/models/embeddings \ --medvram --no-half-vae}"' \ --set-default PYTHONPATH $PYTHONPATH \ - --chdir $out + --chdir $out --set-default CUDA_PATH ${pkgs.cudatoolkit} rm -rf dist @@ -111,4 +138,11 @@ python3Packages.buildPythonPackage { homepage = "https://github.com/AUTOMATIC1111/stable-diffusion-webui"; mainProgram = "launch-wrapped.py"; }; + + #Tiled VAE supported without additional dependencies + #Infinit image browser couple of deps + #civit-ai browser + couple of deps + #animatediff --> needs deforum for frame interpolation + #deforum + #controlnet } diff --git a/projects/bark-gui/default.nix b/projects/bark-gui/default.nix index ec49cb8..6977f34 100644 --- a/projects/bark-gui/default.nix +++ b/projects/bark-gui/default.nix @@ -35,17 +35,17 @@ in flake.nixosModules = let packageModule = pkgAttrName: { pkgs, ... }: { - services.a1111.package = withSystem pkgs.system ( + services.bark-gui.package = withSystem pkgs.system ( { config, ... }: lib.mkOptionDefault config.packages.${pkgAttrName} ); }; in { - bark-gui = ./nixos; - invokeai-nvidia = { - imports = [ - config.flake.nixosModules.invokeai - (packageModule "bark-gui-nvidia") - ]; - }; + #bark-gui = ./nixos; + #invokeai-nvidia = { + # imports = [ + # config.flake.nixosModules.invokeai + # (packageModule "bark-gui-nvidia") + # ]; + #}; }; } diff --git a/projects/bark-gui/package.nix b/projects/bark-gui/package.nix index 3c88e3c..e80a7e2 100644 --- a/projects/bark-gui/package.nix +++ b/projects/bark-gui/package.nix @@ -20,15 +20,13 @@ python3Packages.buildPythonPackage { pytorch-seed safetensors scipy - torch-bin - torchaudio-bin + torch + torchaudio transformers vector-quantize-pytorch ]; #nativeBuildInputs = with python3Packages; [ pythonRelaxDepsHook pip ]; nativeBuildInputs = with python3Packages; [ setuptools pip ]; - #pythonRemoveDeps = [ "clip" "pyreadline3" "flaskwebgui" "opencv-python" ]; - pythonRelaxDeps = [ "dnspython" "flask" "requests" "numpy" "pytorch-lightning" "torchsde" "uvicorn" "invisible-watermark" "accelerate" "scikit-image" "safetensors" "torchvision" "test-tube" "fastapi" ]; makeWrapperArgs = [ '' --set-default PYTHONPATH=$PYTHONPATH '' diff --git a/projects/comfyui/default.nix b/projects/comfyui/default.nix new file mode 100644 index 0000000..0750e0d --- /dev/null +++ b/projects/comfyui/default.nix @@ -0,0 +1,40 @@ +{ config, inputs, lib, withSystem, ... }: +{ + perSystem = { config, pkgs, system, ... }: + let + src = inputs.comfyui-src; + + stable-pkgs = import inputs.nixpkgs-stable { + allowUnfree = true; + cudaSupport = true; + inherit system; + }; + + mkComfyUIVariant = args: pkgs.python310Packages.callPackage ./package.nix ({ + inherit src; + } // args); + in + { + packages = { + comfyui-nvidia = mkComfyUIVariant { }; + }; + }; + + #flake.nixosModules = + /* let + packageModule = pkgAttrName: { pkgs, ... }: { + services.comfyui.package = withSystem pkgs.system ( + { config, ... }: lib.mkOptionDefault config.packages.${pkgAttrName} + ); + }; + in + { + comfyui = ./nixos; + comfyui-nvidia = { + imports = [ + config.flake.nixosModules.a1111 + (packageModule "comfyui-nvidia") + ]; + }; + };*/ +} diff --git a/projects/comfyui/nixos/default.nix b/projects/comfyui/nixos/default.nix new file mode 100644 index 0000000..efbede4 --- /dev/null +++ b/projects/comfyui/nixos/default.nix @@ -0,0 +1,131 @@ +{ config, lib, ... }: + +let + inherit (lib) + mkIf mkOption mkEnableOption mkRenamedOptionModule types + escapeShellArgs flatten getExe mapAttrsToList + isBool isFloat isInt isList isString + floatToString optionalString + ; + + cfg = config.services.comfyui; +in + +{ + imports = map ({ old, new ? old }: mkRenamedOptionModule [ "services" "comfyui" old ] [ "services" "comfyui" "settings" new ]) [ + { old = "host"; } + { old = "port"; } + { old = "dataDir"; new = "root"; } + { old = "precision"; } + ]; + options.services.a1111 = { + enable = mkEnableOption "Automatic1111 UI for Stable Diffusion"; + + package = mkOption { + description = "Which Automatic1111 package to use."; + type = types.package; + }; + + user = mkOption { + description = "Which user to run A1111 as."; + default = "a1111"; + type = types.str; + }; + + group = mkOption { + description = "Which group to run A1111 as."; + default = "a1111"; + type = types.str; + }; + + settings = mkOption { + description = "Structured command line arguments."; + default = { }; + type = types.submodule { + freeformType = with types; let + atom = nullOr (oneOf [ + bool + str + int + float + ]); + in attrsOf (either atom (listOf atom)); + options = { + #listen = mkOption { + # description = "Launch gradio with 0.0.0.0 as server name, allowing to respond to network requests."; + # default = false; + # type = types.bool; + #}; + + port = mkOption { + description = "Launch gradio with given server port, you need root/admin rights for ports < 1024; defaults to 7860 if available."; + default = 7860; + type = types.port; + }; + + data-dir = mkOption { + description = "Where to store A1111's state."; + default = "/var/lib/a1111"; + type = types.path; + }; + + ckpt-dir = mkOption { + description = "Path to A1111's SD models."; + default = "/var/lib/models/ckpt"; + type = types.path; + }; + }; + }; + }; + + extraArgs = mkOption { + description = "Additional raw command line arguments."; + default = []; + type = with types; listOf str; + }; + }; + + config = let + + cliArgs = (flatten (mapAttrsToList (n: v: + if v == null then [] + #else if isBool v then [ "--${optionalString (!v) "no-"}${n}" ] + else if isInt v then [ "--${n}" "${toString v}" ] + else if isFloat v then [ "--${n}" "${floatToString v}" ] + else if isString v then ["--${n}" v ] + else if isList v then [ "--${n}" (toString v) ] + else throw "Unhandled type for setting \"${n}\"" + ) cfg.settings)) ++ cfg.extraArgs; + + in mkIf cfg.enable { + users.users = mkIf (cfg.user == "a1111") { + a1111 = { + isSystemUser = true; + inherit (cfg) group; + }; + }; + users.groups = mkIf (cfg.group == "a1111") { + a1111 = {}; + }; + systemd.services.a1111 = { + after = [ "network.target" ]; + wantedBy = [ "multi-user.target" ]; + environment = { + HOME = "${cfg.settings.data-dir}/.home"; + COMMANDLINE_ARGS = escapeShellArgs cliArgs; + NIXIFIED_AI_NONINTERACTIVE = "1"; + }; + serviceConfig = { + User = cfg.user; + Group = cfg.group; + ExecStart = "${getExe cfg.package}"; + PrivateTmp = true; + }; + }; + systemd.tmpfiles.rules = [ + "d '${cfg.settings.data-dir}/' 0755 ${cfg.user} ${cfg.group} - -" + "d '${cfg.settings.data-dir}/configs' 0755 ${cfg.user} ${cfg.group} - -" + "d '${cfg.settings.data-dir}/.home' 0750 ${cfg.user} ${cfg.group} - -" + ]; + }; +} diff --git a/projects/comfyui/package.nix b/projects/comfyui/package.nix new file mode 100644 index 0000000..dd06fe1 --- /dev/null +++ b/projects/comfyui/package.nix @@ -0,0 +1,70 @@ +{ src +, buildPythonPackage +, torchvision-bin +, torch-bin +, safetensors +, psutil +, einops +, transformers +, scipy +, torchsde +, pillow +, torch +, torchvision +, accelerate +}: +buildPythonPackage { + pname = "ComfyUI"; + format = "other"; + version = "latest"; + inherit src; + propagatedBuildInputs = [ + accelerate + torchvision + torch + safetensors + psutil + einops + transformers + scipy + pillow + torchsde + ]; + + buildPhase = + '' + runHook preBuild + + mkdir -p dist + cp -R . $out + chmod -R +w $out + cd $out + + #make main.py executable > shebang + mkdir -p $out/bin + cat <<-EOF > main.py + $(echo "#!/usr/bin/python") + $(cat main.py) + EOF + chmod +x main.py + makeWrapper "$out/main.py" $out/bin/main-wrapped.py \ + --set-default PYTHONPATH $PYTHONPATH \ + + rm -rf dist + + runHook postBuild + ''; + + meta = { + description = "The most powerful and modular stable diffusion GUI and backend."; + homepage = "https://github.com/comfyanonymous/ComfyUI.git"; + mainProgram = "main-wrapped.py"; + }; + + #Tiled VAE supported without additional dependencies + #Infinit image browser couple of deps + #civit-ai browser + couple of deps + #animatediff --> needs deforum for frame interpolation + #deforum + #controlnet +} diff --git a/projects/invokeai/package.nix b/projects/invokeai/package.nix index 93e4ec8..5da44b4 100644 --- a/projects/invokeai/package.nix +++ b/projects/invokeai/package.nix @@ -2,6 +2,7 @@ # misc , lib , src +, pkgs # extra deps }: @@ -76,7 +77,7 @@ python3Packages.buildPythonPackage { huggingface-hub easing-functions dynamicprompts - torchvision + torchvision-bin test-tube ]; nativeBuildInputs = with python3Packages; [ pythonRelaxDepsHook pip ]; diff --git a/projects/kohya_ss/default.nix b/projects/kohya_ss/default.nix index 7c0371a..7ceac8b 100644 --- a/projects/kohya_ss/default.nix +++ b/projects/kohya_ss/default.nix @@ -48,13 +48,13 @@ in kohya_ss = ./nixos; kohya_ss-amd = { imports = [ - config.flake.nixosModules.invokeai + config.flake.nixosModules.kohya_ss (packageModule "kohya_ss-amd") ]; }; kohya_ss-nvidia = { imports = [ - config.flake.nixosModules.invokeai + config.flake.nixosModules.kohya_ss (packageModule "kohya_ss-nvidia") ]; }; diff --git a/projects/textgen/package.nix b/projects/textgen/package.nix index face144..8b40e4c 100644 --- a/projects/textgen/package.nix +++ b/projects/textgen/package.nix @@ -76,12 +76,14 @@ let ln -s ${tmpDir}/presets/ $out/presets ''; textgenPython = python3Packages.python.withPackages (_: with python3Packages; [ + aiofiles accelerate bitsandbytes colorama datasets flexgen gradio + gradio-client llama-cpp-python markdown numpy