Compare commits

...

37 commits

Author SHA1 Message Date
Denis Manherz e096e47e9e
Merge pull request #1 from denismhz/comfyui
Comfyui
2024-01-11 19:01:06 +01:00
Denis Manherz a227d57d28 Update 2024-01-11 18:13:38 +01:00
Denis Manherz 7b91a42165 Merge branch 'sd_webui' of github.com:denismhz/flake into sd_webui 2023-12-17 21:04:52 +01:00
Denis Manherz f9fe0957dc Update 2023-12-17 21:03:43 +01:00
Denis Manherz 28aff58462
Update default.nix xformers 2023-12-02 12:44:48 +00:00
Denis Manherz d12a51a9d5 Update 2023-12-02 01:45:55 +01:00
Denis Manherz 11df303011 Update 2023-12-02 01:40:14 +01:00
Denis Manherz fb68dcf306 Update 2023-12-02 01:26:06 +01:00
Denis Manherz f878b1291a +deps Animatediff, deforum 2023-11-30 12:38:08 +01:00
Denis Manherz ad2f612129 +deps Civitai browser + 2023-11-30 12:04:35 +01:00
Denis Manherz f3bd801137 +deps Adetailer 2023-11-29 23:19:34 +01:00
Denis Manherz f8f97aad01 +deps for civitai browser + 2023-11-29 23:02:44 +01:00
Denis Manherz 14315178ef Infinite image browser deps done 2023-11-29 22:46:28 +01:00
Denis Manherz ac32c8a937 Infinite image browser deps 2023-11-29 22:29:53 +01:00
Denis Manherz 20bfaab47e Infinite Image Browser deps 2023-11-29 22:15:27 +01:00
Denis Manherz 426c5d444a Update 2023-11-27 18:06:14 +01:00
Denis Manherz 1eb7223715 Revert "Update"
This reverts commit d896b23f48.
2023-11-27 18:03:04 +01:00
Denis Manherz d896b23f48 Update 2023-11-27 17:59:48 +01:00
Denis Manherz b5f9fb6f0d Update 2023-11-27 17:54:32 +01:00
Denis Manherz 902151b0ad Update 2023-11-27 15:22:38 +01:00
Denis Manherz d6a880fefb Update 2023-11-27 15:20:37 +01:00
Denis Manherz 3122edf0c9 Update 2023-11-27 15:00:36 +01:00
Denis Manherz 201072da3d Update 2023-11-27 15:00:06 +01:00
Denis Manherz 811b606bac Update 2023-11-27 14:58:33 +01:00
Denis Manherz b6f8ee2420 Update 2023-11-27 14:56:07 +01:00
Denis Manherz b1aa03d201 Update 2023-11-27 14:38:19 +01:00
Denis Manherz 0e22c503ac Update 2023-11-27 14:37:04 +01:00
Denis Manherz 9ff54ac634 Update 2023-11-27 14:35:19 +01:00
Denis Manherz bd93aa6e0e Update 2023-11-27 14:32:20 +01:00
Denis Manherz 0f7734670b Update 2023-11-27 14:30:04 +01:00
Denis Manherz 28c02b1f6a Update 2023-11-27 14:28:14 +01:00
Denis Manherz a062543a61 Update 2023-11-27 14:24:08 +01:00
Denis Manherz bc50096893 Update 2023-11-27 14:12:50 +01:00
Denis Manherz 0b912feb3b Update 2023-11-27 14:09:50 +01:00
Denis Manherz 7518e0e333 Update 2023-11-27 14:07:25 +01:00
Denis Manherz b3914b1459 Update 2023-11-27 13:59:33 +01:00
Denis Manherz 9aaf8fec27 A1111 Service 2023-11-27 13:55:54 +01:00
15 changed files with 595 additions and 86 deletions

View file

@ -32,6 +32,22 @@
"type": "github"
}
},
"comfyui-src": {
"flake": false,
"locked": {
"lastModified": 1702736172,
"narHash": "sha256-BYZHfnhdubasOAhUyq/iW4HvYKPiqBwiXY3ozZXU1Oc=",
"owner": "comfyanonymous",
"repo": "ComfyUI",
"rev": "6453dc1ca2d98d89af7cf312bb48d1e3fd2ca27f",
"type": "github"
},
"original": {
"owner": "comfyanonymous",
"repo": "ComfyUI",
"type": "github"
}
},
"flake-parts": {
"inputs": {
"nixpkgs-lib": [
@ -251,6 +267,7 @@
"inputs": {
"a1111-src": "a1111-src",
"bark-gui-src": "bark-gui-src",
"comfyui-src": "comfyui-src",
"flake-parts": "flake-parts",
"hercules-ci-effects": "hercules-ci-effects",
"invokeai-src": "invokeai-src",

View file

@ -7,6 +7,10 @@
description = "A Nix Flake that makes AI reproducible and easy to run";
inputs = {
comfyui-src = {
url = github:comfyanonymous/ComfyUI;
flake = false;
};
nixpkgs-stable = {
url = github:NixOS/nixpkgs/nixos-23.05;
};
@ -52,17 +56,38 @@
};
outputs = { flake-parts, invokeai-src, hercules-ci-effects, ... }@inputs:
flake-parts.lib.mkFlake { inherit inputs; } {
perSystem = { system, ... }:{
_module.args.pkgs = import inputs.nixpkgs { config.allowUnfree = true; inherit system; };
perSystem = { system, ... }: {
# _module.args.pkgs = import inputs.nixpkgs { config.allowUnfree = true; inherit system; config.cudaSupport = true; };
_module.args.pkgs = import inputs.nixpkgs {
inherit system;
/*overlays = [
(
final: prev: {
final.python310 = prev.python310.override {
enableOptimizations = true;
reproducibleBuild = false;
self = final.python310;
buildInputs = [ final.ffmpeg-full ];
};
pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [
(
python-final: python-prev: {
torch = python-prev.torch-bin;
}
)
];
}
)
];*/
config = { allowUnfree = true; cudaSupport = true; };
};
legacyPackages = {
koboldai = builtins.throw ''
koboldai has been dropped from nixified.ai due to lack of upstream development,
try textgen instead which is better maintained. If you would like to use the last
available version of koboldai with nixified.ai, then run:
koboldai has been dropped from nixified.ai due to lack of upstream development,
try textgen instead which is better maintained. If you would like to use the last
available version of koboldai with nixified.ai, then run:
nix run github:nixified.ai/flake/0c58f8cba3fb42c54f2a7bf9bd45ee4cbc9f2477#koboldai
nix run github:nixified.ai/flake/0c58f8cba3fb42c54f2a7bf9bd45ee4cbc9f2477#koboldai
'';
};
};
@ -72,8 +97,9 @@
debug = true;
imports = [
hercules-ci-effects.flakeModule
# ./modules/nixpkgs-config
# ./modules/nixpkgs-config
./overlays
./projects/comfyui
./projects/automatic1111
./projects/invokeai
./projects/textgen

View file

@ -0,0 +1,24 @@
{ buildPythonPackage, lib, fetchPypi
}:
buildPythonPackage rec {
pname = "PyFunctional";
version = "1.4.3";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-EcMT/iUbJpxlBmiRNUVqBbxab6EpydArRF84PU9BHhA=";
};
propagatedBuildInputs = [
];
# TODO FIXME
doCheck = false;
meta = with lib; {
description = "PyFunctional makes creating data pipelines easy by using chained functional operators.";
homepage = "https://github.com/EntilZha/PyFunctional";
};
}

View file

@ -1,23 +1,177 @@
# WARNING: This file was automatically generated. You should avoid editing it.
# If you run pynixify again, the file will be either overwritten or
# deleted, and you will lose the changes you made to it.
{ lib
, buildPythonPackage
, pythonOlder
, fetchFromGitHub
, which
# runtime dependencies
, numpy
, torch
# check dependencies
, pytestCheckHook
, pytest-cov
# , pytest-mpi
, pytest-timeout
# , pytorch-image-models
, hydra-core
, fairscale
, scipy
, cmake
, openai-triton
, networkx
#, apex
, einops
, transformers
, timm
#, flash-attn
, cudaPackages
, stable-pkgs
}:
let
inherit (cudaPackages) cudaFlags cudnn;
{ buildPythonPackage, fetchPypi, lib, torch, numpy, pyre-extensions, pythonRelaxDepsHook, which }:
# Some packages are not available on all platforms
nccl = cudaPackages.nccl or null;
buildPythonPackage rec {
setBool = v: if v then "1" else "0";
# https://github.com/pytorch/pytorch/blob/v2.0.1/torch/utils/cpp_extension.py#L1744
supportedTorchCudaCapabilities =
let
real = ["3.5" "3.7" "5.0" "5.2" "5.3" "6.0" "6.1" "6.2" "7.0" "7.2" "7.5" "8.0" "8.6" "8.7" "8.9" "9.0"];
ptx = lists.map (x: "${x}+PTX") real;
in
real ++ ptx;
# NOTE: The lists.subtractLists function is perhaps a bit unintuitive. It subtracts the elements
# of the first list *from* the second list. That means:
# lists.subtractLists a b = b - a
# For CUDA
supportedCudaCapabilities = lists.intersectLists cudaFlags.cudaCapabilities supportedTorchCudaCapabilities;
unsupportedCudaCapabilities = lists.subtractLists supportedCudaCapabilities cudaFlags.cudaCapabilities;
# Use trivial.warnIf to print a warning if any unsupported GPU targets are specified.
gpuArchWarner = supported: unsupported:
trivial.throwIf (supported == [ ])
(
"No supported GPU targets specified. Requested GPU targets: "
+ strings.concatStringsSep ", " unsupported
)
supported;
# Create the gpuTargetString.
gpuTargetString = strings.concatStringsSep ";" (
if gpuTargets != [ ] then
# If gpuTargets is specified, it always takes priority.
gpuTargets
else if rocmSupport then
rocmPackages.clr.gpuTargets
else
gpuArchWarner supportedCudaCapabilities unsupportedCudaCapabilities
);
version = "0.0.22.post7";
in
buildPythonPackage {
pname = "xformers";
version = "0.0.16";
inherit version;
format = "setuptools";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-ksfwVWpo9EhkkmkbP1ZxQO4ZK1Y9kEGFtmabH4u4rlM=";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "facebookresearch";
repo = "xformers";
rev = "refs/tags/v${version}";
hash = "sha256-7lZi3+2dVDZJFYCUlxsyDU8t9qdnl+b2ERRXKA6Zp7U=";
fetchSubmodules = true;
};
nativeBuildInputs = [ pythonRelaxDepsHook which ];
pythonRelaxDeps = [ "pyre-extensions" ];
propagatedBuildInputs = [ torch numpy pyre-extensions /*triton*/ ];
# TODO FIXME
preConfigure = ''
export TORCH_CUDA_ARCH_LIST="${gpuTargetString}"
export CUDNN_INCLUDE_DIR=${cudnn.dev}/include
export CUDNN_LIB_DIR=${cudnn.lib}/lib
export CUPTI_INCLUDE_DIR=${cudaPackages.cuda_cupti.dev}/include
export CUPTI_LIBRARY_DIR=${cudaPackages.cuda_cupti.lib}/lib
export CUDA_PATH=${stable-packages.cudatoolkit}
export EXTRA_LD_FLAGS="-L${stable-pkgs.linuxPackages.nvidia_x11_production}/lib"
'';
preBuild = ''
cat << EOF > ./xformers/version.py
# noqa: C801
__version__ = "${version}"
EOF
'';
nativeBuildInputs = [
which
] ++ (with cudaPackages; [
autoAddOpenGLRunpathHook
cuda_nvcc
]);
propagatedBuildInputs = [
numpy
torch
];
buildInputs = with cudaPackages; [
cuda_cccl.dev # <thrust/*>
cuda_cudart # cuda_runtime.h and libraries
cuda_cupti.dev # For kineto
cuda_cupti.lib # For kineto
cuda_nvcc.dev # crt/host_config.h; even though we include this in nativeBuildinputs, it's needed here too
cuda_nvml_dev.dev # <nvml.h>
cuda_nvrtc.dev
cuda_nvrtc.lib
cuda_nvtx.dev
cuda_nvtx.lib # -llibNVToolsExt
cudnn.dev
cudnn.lib
libcublas.dev
libcublas.lib
libcufft.dev
libcufft.lib
libcurand.dev
libcurand.lib
libcusolver.dev
libcusolver.lib
libcusparse.dev
libcusparse.lib
effectiveMagma
numactl
] ++ [stable-pkgs.linuxPackages.nvidia_x11_production gcc stable-packages.cudatoolkit];
pythonImportsCheck = [ "xformers" ];
dontUseCmakeConfigure = true;
# see commented out missing packages
doCheck = false;
meta = with lib; { };
}
nativeCheckInputs = [
pytestCheckHook
pytest-cov
pytest-timeout
hydra-core
fairscale
scipy
cmake
networkx
openai-triton
# apex
einops
transformers
timm
# flash-attn
];
meta = with lib; {
description = "XFormers: A collection of composable Transformer building blocks";
homepage = "https://github.com/facebookresearch/xformers";
changelog = "https://github.com/facebookresearch/xformers/blob/${version}/CHANGELOG.md";
license = licenses.bsd3;
maintainers = with maintainers; [ happysalada ];
};
}

View file

@ -13,11 +13,11 @@ in
#cant i do like only for this for invoke other version?
(
final: prev: {
pillow = pkgs.python3.pkgs.callPackage ../../packages/pillow { };
pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [
(
python-final: python-prev: {
pillow = python-final.callPackage ../../packages/pillow { };
xformers = python-final.callPackage ../../packages/xformers { inherit stable-pkgs; };
}
)
];
@ -30,21 +30,25 @@ in
../../packages/blendmodes
../../packages/blip
../../packages/codeformer
../../packages/deforum
../../packages/facexlib
../../packages/gfpgan
../../packages/gradio
../../packages/gradio-client
../../packages/k_diffusion
../../packages/lpips
../../packages/mediapipe
../../packages/openclip
../../packages/pillow
../../packages/pyfunctional
../../packages/pytorch-lightning
../../packages/realesrgan
../../packages/taming-transformers-rom1504
../../packages/tomesd
../../packages/torch-fidelity
../../packages/torch-grammar
../../packages/xformers
#../../packages/torch-fidelity
#../../packages/torch-grammar
../../packages/ultralytics
../../packages/zipunicode
])
(final: prev: lib.mapAttrs
(_: pkg: pkg.overrideAttrs (old: {
@ -69,8 +73,16 @@ in
]);
};
stable-pkgs = import inputs.nixpkgs-stable {
allowUnfree = true;
cudaSupport = true;
inherit system;
};
src = inputs.a1111-src;
mkAutomatic1111Variant = args: pkgs.callPackage ./package.nix ({ inherit src; sd-src = inputs.sd-src; sgm-src = inputs.sgm-src; } // args);
mkAutomatic1111Variant = args: pkgs.callPackage ./package.nix ({
inherit src; sd-src = inputs.sd-src; sgm-src = inputs.sgm-src; inherit stable-pkgs pkgs;
} // args);
in
{
packages = {
@ -90,9 +102,9 @@ in
in
{
a1111 = ./nixos;
invokeai-nvidia = {
a1111-nvidia = {
imports = [
config.flake.nixosModules.invokeai
config.flake.nixosModules.a1111
(packageModule "a1111-nvidia")
];
};

View file

@ -8,33 +8,33 @@ let
floatToString optionalString
;
cfg = config.services.invokeai;
cfg = config.services.a1111;
in
{
imports = map ({ old, new ? old }: mkRenamedOptionModule [ "services" "invokeai" old ] [ "services" "invokeai" "settings" new ]) [
imports = map ({ old, new ? old }: mkRenamedOptionModule [ "services" "a1111" old ] [ "services" "a1111" "settings" new ]) [
{ old = "host"; }
{ old = "port"; }
{ old = "dataDir"; new = "root"; }
{ old = "precision"; }
];
options.services.invokeai = {
enable = mkEnableOption "InvokeAI Web UI for Stable Diffusion";
options.services.a1111 = {
enable = mkEnableOption "Automatic1111 UI for Stable Diffusion";
package = mkOption {
description = "Which InvokeAI package to use.";
description = "Which Automatic1111 package to use.";
type = types.package;
};
user = mkOption {
description = "Which user to run InvokeAI as.";
default = "invokeai";
description = "Which user to run A1111 as.";
default = "a1111";
type = types.str;
};
group = mkOption {
description = "Which group to run InvokeAI as.";
default = "invokeai";
description = "Which group to run A1111 as.";
default = "a1111";
type = types.str;
};
@ -51,28 +51,28 @@ in
]);
in attrsOf (either atom (listOf atom));
options = {
host = mkOption {
description = "Which IP address to listen on.";
default = "127.0.0.1";
type = types.str;
};
#listen = mkOption {
# description = "Launch gradio with 0.0.0.0 as server name, allowing to respond to network requests.";
# default = false;
# type = types.bool;
#};
port = mkOption {
description = "Which port to listen on.";
default = 9090;
description = "Launch gradio with given server port, you need root/admin rights for ports < 1024; defaults to 7860 if available.";
default = 7860;
type = types.port;
};
root = mkOption {
description = "Where to store InvokeAI's state.";
default = "/var/lib/invokeai";
data-dir = mkOption {
description = "Where to store A1111's state.";
default = "/var/lib/a1111";
type = types.path;
};
precision = mkOption {
description = "Set model precision.";
default = "auto";
type = types.enum [ "auto" "float32" "autocast" "float16" ];
ckpt-dir = mkOption {
description = "Path to A1111's SD models.";
default = "/var/lib/models/ckpt";
type = types.path;
};
};
};
@ -88,8 +88,8 @@ in
config = let
cliArgs = (flatten (mapAttrsToList (n: v:
if v == null then []
else if isBool v then [ "--${optionalString (!v) "no-"}${n}" ]
if v == null then []
#else if isBool v then [ "--${optionalString (!v) "no-"}${n}" ]
else if isInt v then [ "--${n}" "${toString v}" ]
else if isFloat v then [ "--${n}" "${floatToString v}" ]
else if isString v then ["--${n}" v ]
@ -98,34 +98,34 @@ in
) cfg.settings)) ++ cfg.extraArgs;
in mkIf cfg.enable {
users.users = mkIf (cfg.user == "invokeai") {
invokeai = {
users.users = mkIf (cfg.user == "a1111") {
a1111 = {
isSystemUser = true;
inherit (cfg) group;
};
};
users.groups = mkIf (cfg.group == "invokeai") {
invokeai = {};
users.groups = mkIf (cfg.group == "a1111") {
a1111 = {};
};
systemd.services.invokeai = {
systemd.services.a1111 = {
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
environment = {
HOME = "${cfg.settings.root}/.home";
INVOKEAI_ROOT = "${cfg.settings.root}";
HOME = "${cfg.settings.data-dir}/.home";
COMMANDLINE_ARGS = escapeShellArgs cliArgs;
NIXIFIED_AI_NONINTERACTIVE = "1";
};
serviceConfig = {
User = cfg.user;
Group = cfg.group;
ExecStart = "${getExe cfg.package} ${escapeShellArgs cliArgs}";
ExecStart = "${getExe cfg.package}";
PrivateTmp = true;
};
};
systemd.tmpfiles.rules = [
"d '${cfg.settings.root}' 0755 ${cfg.user} ${cfg.group} - -"
"d '${cfg.settings.root}/configs' 0755 ${cfg.user} ${cfg.group} - -"
"d '${cfg.settings.root}/.home' 0750 ${cfg.user} ${cfg.group} - -"
"d '${cfg.settings.data-dir}/' 0755 ${cfg.user} ${cfg.group} - -"
"d '${cfg.settings.data-dir}/configs' 0755 ${cfg.user} ${cfg.group} - -"
"d '${cfg.settings.data-dir}/.home' 0750 ${cfg.user} ${cfg.group} - -"
];
};
}

View file

@ -4,6 +4,8 @@
, # misc
lib
, src
, stable-pkgs
, pkgs
# extra deps
}:
python3Packages.buildPythonPackage {
@ -36,13 +38,38 @@ python3Packages.buildPythonPackage {
taming-transformers-rom1504
timm
tomesd
torch
torchWithCuda
transformers
xformers
];
#For Extensions -- dont know if e.g you dont install image browser then maybe lack of dep for civitai browser
pyfunctional #infinite image browser
dill #infinite image browser
python-dotenv #infinite image browser
fastapi #infinite image browser
uvicorn #infinite image browser
tabulate #infinite image browser
#infinite image browser sends dleted images to nirvana
send2trash #civitai browser+
zipunicode #civitai browser+
fake-useragent #civitai browser+
rich #adetailer
ultralytics #adetailer
py-cpuinfo #adetailer
mediapipe #adeteailer
av #animatediff to create webm and other fileformats
numexpr #deforum
deforum #deforum
];
patches = [ ./_outputpaths.patch ];
nativeBuildInputs = [ pkgs.cudatoolkit ];
buildPhase =
''
runHook preBuild
@ -70,12 +97,12 @@ python3Packages.buildPythonPackage {
chmod +x launch.py
makeWrapper "$out/launch.py" $out/bin/launch-wrapped.py \
--run 'export COMMANDLINE_ARGS="''${COMMANDLINE_ARGS:-\
--data-dir $HOME/webui --skip-install \
--data-dir $HOME/webui --skip-install --xformers \
--theme dark --ckpt-dir $HOME/webui/models/ckpt \
--embeddings-dir $HOME/webui/models/embeddings \
--medvram --no-half-vae}"' \
--set-default PYTHONPATH $PYTHONPATH \
--chdir $out
--chdir $out --set-default CUDA_PATH ${pkgs.cudatoolkit}
rm -rf dist
@ -111,4 +138,11 @@ python3Packages.buildPythonPackage {
homepage = "https://github.com/AUTOMATIC1111/stable-diffusion-webui";
mainProgram = "launch-wrapped.py";
};
#Tiled VAE supported without additional dependencies
#Infinit image browser couple of deps
#civit-ai browser + couple of deps
#animatediff --> needs deforum for frame interpolation
#deforum
#controlnet
}

View file

@ -35,17 +35,17 @@ in
flake.nixosModules = let
packageModule = pkgAttrName: { pkgs, ... }: {
services.a1111.package = withSystem pkgs.system (
services.bark-gui.package = withSystem pkgs.system (
{ config, ... }: lib.mkOptionDefault config.packages.${pkgAttrName}
);
};
in {
bark-gui = ./nixos;
invokeai-nvidia = {
imports = [
config.flake.nixosModules.invokeai
(packageModule "bark-gui-nvidia")
];
};
#bark-gui = ./nixos;
#invokeai-nvidia = {
# imports = [
# config.flake.nixosModules.invokeai
# (packageModule "bark-gui-nvidia")
# ];
#};
};
}

View file

@ -20,15 +20,13 @@ python3Packages.buildPythonPackage {
pytorch-seed
safetensors
scipy
torch-bin
torchaudio-bin
torch
torchaudio
transformers
vector-quantize-pytorch
];
#nativeBuildInputs = with python3Packages; [ pythonRelaxDepsHook pip ];
nativeBuildInputs = with python3Packages; [ setuptools pip ];
#pythonRemoveDeps = [ "clip" "pyreadline3" "flaskwebgui" "opencv-python" ];
pythonRelaxDeps = [ "dnspython" "flask" "requests" "numpy" "pytorch-lightning" "torchsde" "uvicorn" "invisible-watermark" "accelerate" "scikit-image" "safetensors" "torchvision" "test-tube" "fastapi" ];
makeWrapperArgs = [
'' --set-default PYTHONPATH=$PYTHONPATH ''

View file

@ -0,0 +1,40 @@
{ config, inputs, lib, withSystem, ... }:
{
perSystem = { config, pkgs, system, ... }:
let
src = inputs.comfyui-src;
stable-pkgs = import inputs.nixpkgs-stable {
allowUnfree = true;
cudaSupport = true;
inherit system;
};
mkComfyUIVariant = args: pkgs.python310Packages.callPackage ./package.nix ({
inherit src;
} // args);
in
{
packages = {
comfyui-nvidia = mkComfyUIVariant { };
};
};
#flake.nixosModules =
/* let
packageModule = pkgAttrName: { pkgs, ... }: {
services.comfyui.package = withSystem pkgs.system (
{ config, ... }: lib.mkOptionDefault config.packages.${pkgAttrName}
);
};
in
{
comfyui = ./nixos;
comfyui-nvidia = {
imports = [
config.flake.nixosModules.a1111
(packageModule "comfyui-nvidia")
];
};
};*/
}

View file

@ -0,0 +1,131 @@
{ config, lib, ... }:
let
inherit (lib)
mkIf mkOption mkEnableOption mkRenamedOptionModule types
escapeShellArgs flatten getExe mapAttrsToList
isBool isFloat isInt isList isString
floatToString optionalString
;
cfg = config.services.comfyui;
in
{
imports = map ({ old, new ? old }: mkRenamedOptionModule [ "services" "comfyui" old ] [ "services" "comfyui" "settings" new ]) [
{ old = "host"; }
{ old = "port"; }
{ old = "dataDir"; new = "root"; }
{ old = "precision"; }
];
options.services.a1111 = {
enable = mkEnableOption "Automatic1111 UI for Stable Diffusion";
package = mkOption {
description = "Which Automatic1111 package to use.";
type = types.package;
};
user = mkOption {
description = "Which user to run A1111 as.";
default = "a1111";
type = types.str;
};
group = mkOption {
description = "Which group to run A1111 as.";
default = "a1111";
type = types.str;
};
settings = mkOption {
description = "Structured command line arguments.";
default = { };
type = types.submodule {
freeformType = with types; let
atom = nullOr (oneOf [
bool
str
int
float
]);
in attrsOf (either atom (listOf atom));
options = {
#listen = mkOption {
# description = "Launch gradio with 0.0.0.0 as server name, allowing to respond to network requests.";
# default = false;
# type = types.bool;
#};
port = mkOption {
description = "Launch gradio with given server port, you need root/admin rights for ports < 1024; defaults to 7860 if available.";
default = 7860;
type = types.port;
};
data-dir = mkOption {
description = "Where to store A1111's state.";
default = "/var/lib/a1111";
type = types.path;
};
ckpt-dir = mkOption {
description = "Path to A1111's SD models.";
default = "/var/lib/models/ckpt";
type = types.path;
};
};
};
};
extraArgs = mkOption {
description = "Additional raw command line arguments.";
default = [];
type = with types; listOf str;
};
};
config = let
cliArgs = (flatten (mapAttrsToList (n: v:
if v == null then []
#else if isBool v then [ "--${optionalString (!v) "no-"}${n}" ]
else if isInt v then [ "--${n}" "${toString v}" ]
else if isFloat v then [ "--${n}" "${floatToString v}" ]
else if isString v then ["--${n}" v ]
else if isList v then [ "--${n}" (toString v) ]
else throw "Unhandled type for setting \"${n}\""
) cfg.settings)) ++ cfg.extraArgs;
in mkIf cfg.enable {
users.users = mkIf (cfg.user == "a1111") {
a1111 = {
isSystemUser = true;
inherit (cfg) group;
};
};
users.groups = mkIf (cfg.group == "a1111") {
a1111 = {};
};
systemd.services.a1111 = {
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
environment = {
HOME = "${cfg.settings.data-dir}/.home";
COMMANDLINE_ARGS = escapeShellArgs cliArgs;
NIXIFIED_AI_NONINTERACTIVE = "1";
};
serviceConfig = {
User = cfg.user;
Group = cfg.group;
ExecStart = "${getExe cfg.package}";
PrivateTmp = true;
};
};
systemd.tmpfiles.rules = [
"d '${cfg.settings.data-dir}/' 0755 ${cfg.user} ${cfg.group} - -"
"d '${cfg.settings.data-dir}/configs' 0755 ${cfg.user} ${cfg.group} - -"
"d '${cfg.settings.data-dir}/.home' 0750 ${cfg.user} ${cfg.group} - -"
];
};
}

View file

@ -0,0 +1,70 @@
{ src
, buildPythonPackage
, torchvision-bin
, torch-bin
, safetensors
, psutil
, einops
, transformers
, scipy
, torchsde
, pillow
, torch
, torchvision
, accelerate
}:
buildPythonPackage {
pname = "ComfyUI";
format = "other";
version = "latest";
inherit src;
propagatedBuildInputs = [
accelerate
torchvision
torch
safetensors
psutil
einops
transformers
scipy
pillow
torchsde
];
buildPhase =
''
runHook preBuild
mkdir -p dist
cp -R . $out
chmod -R +w $out
cd $out
#make main.py executable > shebang
mkdir -p $out/bin
cat <<-EOF > main.py
$(echo "#!/usr/bin/python")
$(cat main.py)
EOF
chmod +x main.py
makeWrapper "$out/main.py" $out/bin/main-wrapped.py \
--set-default PYTHONPATH $PYTHONPATH \
rm -rf dist
runHook postBuild
'';
meta = {
description = "The most powerful and modular stable diffusion GUI and backend.";
homepage = "https://github.com/comfyanonymous/ComfyUI.git";
mainProgram = "main-wrapped.py";
};
#Tiled VAE supported without additional dependencies
#Infinit image browser couple of deps
#civit-ai browser + couple of deps
#animatediff --> needs deforum for frame interpolation
#deforum
#controlnet
}

View file

@ -2,6 +2,7 @@
# misc
, lib
, src
, pkgs
# extra deps
}:
@ -76,7 +77,7 @@ python3Packages.buildPythonPackage {
huggingface-hub
easing-functions
dynamicprompts
torchvision
torchvision-bin
test-tube
];
nativeBuildInputs = with python3Packages; [ pythonRelaxDepsHook pip ];

View file

@ -48,13 +48,13 @@ in
kohya_ss = ./nixos;
kohya_ss-amd = {
imports = [
config.flake.nixosModules.invokeai
config.flake.nixosModules.kohya_ss
(packageModule "kohya_ss-amd")
];
};
kohya_ss-nvidia = {
imports = [
config.flake.nixosModules.invokeai
config.flake.nixosModules.kohya_ss
(packageModule "kohya_ss-nvidia")
];
};

View file

@ -76,12 +76,14 @@ let
ln -s ${tmpDir}/presets/ $out/presets
'';
textgenPython = python3Packages.python.withPackages (_: with python3Packages; [
aiofiles
accelerate
bitsandbytes
colorama
datasets
flexgen
gradio
gradio-client
llama-cpp-python
markdown
numpy