Merge remote-tracking branch 'origin/main' into refactor/model_manager_instantiate

This commit is contained in:
Kevin Turner 2023-07-31 08:46:09 -07:00
commit e3519052ae
4 changed files with 118 additions and 5 deletions

25
flake.lock Normal file
View File

@ -0,0 +1,25 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1690630721,
"narHash": "sha256-Y04onHyBQT4Erfr2fc82dbJTfXGYrf4V0ysLUYnPOP8=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "d2b52322f35597c62abf56de91b0236746b2a03d",
"type": "github"
},
"original": {
"id": "nixpkgs",
"type": "indirect"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}

81
flake.nix Normal file
View File

@ -0,0 +1,81 @@
# Important note: this flake does not attempt to create a fully isolated, 'pure'
# Python environment for InvokeAI. Instead, it depends on local invocations of
# virtualenv/pip to install the required (binary) packages, most importantly the
# prebuilt binary pytorch packages with CUDA support.
# ML Python packages with CUDA support, like pytorch, are notoriously expensive
# to compile so it's purposefuly not what this flake does.
{
description = "An (impure) flake to develop on InvokeAI.";
outputs = { self, nixpkgs }:
let
system = "x86_64-linux";
pkgs = import nixpkgs {
inherit system;
config.allowUnfree = true;
};
python = pkgs.python310;
mkShell = { dir, install }:
let
setupScript = pkgs.writeScript "setup-invokai" ''
# This must be sourced using 'source', not executed.
${python}/bin/python -m venv ${dir}
${dir}/bin/python -m pip install ${install}
# ${dir}/bin/python -c 'import torch; assert(torch.cuda.is_available())'
source ${dir}/bin/activate
'';
in
pkgs.mkShell rec {
buildInputs = with pkgs; [
# Backend: graphics, CUDA.
cudaPackages.cudnn
cudaPackages.cuda_nvrtc
cudatoolkit
freeglut
glib
gperf
procps
libGL
libGLU
linuxPackages.nvidia_x11
python
stdenv.cc
stdenv.cc.cc.lib
xorg.libX11
xorg.libXext
xorg.libXi
xorg.libXmu
xorg.libXrandr
xorg.libXv
zlib
# Pre-commit hooks.
black
# Frontend.
yarn
nodejs
];
LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath buildInputs;
CUDA_PATH = pkgs.cudatoolkit;
EXTRA_LDFLAGS = "-L${pkgs.linuxPackages.nvidia_x11}/lib";
shellHook = ''
if [[ -f "${dir}/bin/activate" ]]; then
source "${dir}/bin/activate"
echo "Using Python: $(which python)"
else
echo "Use 'source ${setupScript}' to set up the environment."
fi
'';
};
in
{
devShells.${system} = rec {
develop = mkShell { dir = "venv"; install = "-e '.[xformers]' --extra-index-url https://download.pytorch.org/whl/cu118"; };
default = develop;
};
};
}

View File

@ -274,7 +274,7 @@ class InvokeAISettings(BaseSettings):
@classmethod @classmethod
def _excluded(self) -> List[str]: def _excluded(self) -> List[str]:
# internal fields that shouldn't be exposed as command line options # internal fields that shouldn't be exposed as command line options
return ["type", "initconf"] return ["type", "initconf", "cached_root"]
@classmethod @classmethod
def _excluded_from_yaml(self) -> List[str]: def _excluded_from_yaml(self) -> List[str]:
@ -290,6 +290,7 @@ class InvokeAISettings(BaseSettings):
"restore", "restore",
"root", "root",
"nsfw_checker", "nsfw_checker",
"cached_root",
] ]
class Config: class Config:
@ -423,6 +424,7 @@ class InvokeAIAppConfig(InvokeAISettings):
log_level : Literal[tuple(["debug","info","warning","error","critical"])] = Field(default="info", description="Emit logging messages at this level or higher", category="Logging") log_level : Literal[tuple(["debug","info","warning","error","critical"])] = Field(default="info", description="Emit logging messages at this level or higher", category="Logging")
version : bool = Field(default=False, description="Show InvokeAI version and exit", category="Other") version : bool = Field(default=False, description="Show InvokeAI version and exit", category="Other")
cached_root : Path = Field(default=None, description="internal use only", category="DEPRECATED")
# fmt: on # fmt: on
def parse_args(self, argv: List[str] = None, conf: DictConfig = None, clobber=False): def parse_args(self, argv: List[str] = None, conf: DictConfig = None, clobber=False):
@ -470,10 +472,15 @@ class InvokeAIAppConfig(InvokeAISettings):
""" """
Path to the runtime root directory Path to the runtime root directory
""" """
if self.root: # we cache value of root to protect against it being '.' and the cwd changing
return Path(self.root).expanduser().absolute() if self.cached_root:
root = self.cached_root
elif self.root:
root = Path(self.root).expanduser().absolute()
else: else:
return self.find_root() root = self.find_root()
self.cached_root = root
return self.cached_root
@property @property
def root_dir(self) -> Path: def root_dir(self) -> Path:

View File

@ -320,7 +320,7 @@ class mergeModelsForm(npyscreen.FormMultiPageAction):
def get_model_names(self, base_model: BaseModelType = None) -> List[str]: def get_model_names(self, base_model: BaseModelType = None) -> List[str]:
model_names = [ model_names = [
info["name"] info["model_name"]
for info in self.model_manager.list_models(model_type=ModelType.Main, base_model=base_model) for info in self.model_manager.list_models(model_type=ModelType.Main, base_model=base_model)
if info["model_format"] == "diffusers" if info["model_format"] == "diffusers"
] ]