2023-05-24 05:50:55 +00:00
|
|
|
# Copyright (c) 2022 Kyle Schouviller (https://github.com/kyle0654) and the InvokeAI Team
|
2023-05-05 05:16:26 +00:00
|
|
|
|
2023-08-23 19:25:24 +00:00
|
|
|
import math
|
2023-07-03 16:17:45 +00:00
|
|
|
from typing import Literal, Optional, get_args
|
2023-05-05 05:16:26 +00:00
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
from PIL import Image, ImageOps
|
|
|
|
|
2023-08-23 19:25:24 +00:00
|
|
|
from invokeai.app.invocations.primitives import ColorField, ImageField, ImageOutput
|
feat: refactor services folder/module structure
Refactor services folder/module structure.
**Motivation**
While working on our services I've repeatedly encountered circular imports and a general lack of clarity regarding where to put things. The structure introduced goes a long way towards resolving those issues, setting us up for a clean structure going forward.
**Services**
Services are now in their own folder with a few files:
- `services/{service_name}/__init__.py`: init as needed, mostly empty now
- `services/{service_name}/{service_name}_base.py`: the base class for the service
- `services/{service_name}/{service_name}_{impl_type}.py`: the default concrete implementation of the service - typically one of `sqlite`, `default`, or `memory`
- `services/{service_name}/{service_name}_common.py`: any common items - models, exceptions, utilities, etc
Though it's a bit verbose to have the service name both as the folder name and the prefix for files, I found it is _extremely_ confusing to have all of the base classes just be named `base.py`. So, at the cost of some verbosity when importing things, I've included the service name in the filename.
There are some minor logic changes. For example, in `InvocationProcessor`, instead of assigning the model manager service to a variable to be used later in the file, the service is used directly via the `Invoker`.
**Shared**
Things that are used across disparate services are in `services/shared/`:
- `default_graphs.py`: previously in `services/`
- `graphs.py`: previously in `services/`
- `paginatation`: generic pagination models used in a few services
- `sqlite`: the `SqliteDatabase` class, other sqlite-specific things
2023-09-24 08:11:07 +00:00
|
|
|
from invokeai.app.services.image_records.image_records_common import ImageCategory, ResourceOrigin
|
feat(ui): add support for custom field types
Node authors may now create their own arbitrary/custom field types. Any pydantic model is supported.
Two notes:
1. Your field type's class name must be unique.
Suggest prefixing fields with something related to the node pack as a kind of namespace.
2. Custom field types function as connection-only fields.
For example, if your custom field has string attributes, you will not get a text input for that attribute when you give a node a field with your custom type.
This is the same behaviour as other complex fields that don't have custom UIs in the workflow editor - like, say, a string collection.
feat(ui): fix tooltips for custom types
We need to hold onto the original type of the field so they don't all just show up as "Unknown".
fix(ui): fix ts error with custom fields
feat(ui): custom field types connection validation
In the initial commit, a custom field's original type was added to the *field templates* only as `originalType`. Custom fields' `type` property was `"Custom"`*. This allowed for type safety throughout the UI logic.
*Actually, it was `"Unknown"`, but I changed it to custom for clarity.
Connection validation logic, however, uses the *field instance* of the node/field. Like the templates, *field instances* with custom types have their `type` set to `"Custom"`, but they didn't have an `originalType` property. As a result, all custom fields could be connected to all other custom fields.
To resolve this, we need to add `originalType` to the *field instances*, then switch the validation logic to use this instead of `type`.
This ended up needing a bit of fanagling:
- If we make `originalType` a required property on field instances, existing workflows will break during connection validation, because they won't have this property. We'd need a new layer of logic to migrate the workflows, adding the new `originalType` property.
While this layer is probably needed anyways, typing `originalType` as optional is much simpler. Workflow migration logic can come layer.
(Technically, we could remove all references to field types from the workflow files, and let the templates hold all this information. This feels like a significant change and I'm reluctant to do it now.)
- Because `originalType` is optional, anywhere we care about the type of a field, we need to use it over `type`. So there are a number of `field.originalType ?? field.type` expressions. This is a bit of a gotcha, we'll need to remember this in the future.
- We use `Array.prototype.includes()` often in the workflow editor, e.g. `COLLECTION_TYPES.includes(type)`. In these cases, the const array is of type `FieldType[]`, and `type` is is `FieldType`.
Because we now support custom types, the arg `type` is now widened from `FieldType` to `string`.
This causes a TS error. This behaviour is somewhat controversial (see https://github.com/microsoft/TypeScript/issues/14520). These expressions are now rewritten as `COLLECTION_TYPES.some((t) => t === type)` to satisfy TS. It's logically equivalent.
fix(ui): typo
feat(ui): add CustomCollection and CustomPolymorphic field types
feat(ui): add validation for CustomCollection & CustomPolymorphic types
- Update connection validation for custom types
- Use simple string parsing to determine if a field is a collection or polymorphic type.
- No longer need to keep a list of collection and polymorphic types.
- Added runtime checks in `baseinvocation.py` to ensure no fields are named in such a way that it could mess up the new parsing
chore(ui): remove errant console.log
fix(ui): rename 'nodes.currentConnectionFieldType' -> 'nodes.connectionStartFieldType'
This was confusingly named and kept tripping me up. Renamed to be consistent with the `reactflow` `ConnectionStartParams` type.
fix(ui): fix ts error
feat(nodes): add runtime check for custom field names
"Custom", "CustomCollection" and "CustomPolymorphic" are reserved field names.
chore(ui): add TODO for revising field type names
wip refactor fieldtype structured
wip refactor field types
wip refactor types
wip refactor types
fix node layout
refactor field types
chore: mypy
organisation
organisation
organisation
fix(nodes): fix field orig_required, field_kind and input statuses
feat(nodes): remove broken implementation of default_factory on InputField
Use of this could break connection validation due to the difference in node schemas required fields and invoke() required args.
Removed entirely for now. It wasn't ever actually used by the system, because all graphs always had values provided for fields where default_factory was used.
Also, pydantic is smart enough to not reuse the same object when specifying a default value - it clones the object first. So, the common pattern of `default_factory=list` is extraneous. It can just be `default=[]`.
fix(nodes): fix InputField name validation
workflow validation
validation
chore: ruff
feat(nodes): fix up baseinvocation comments
fix(ui): improve typing & logic of buildFieldInputTemplate
improved error handling in parseFieldType
fix: back compat for deprecated default_factory and UIType
feat(nodes): do not show node packs loaded log if none loaded
chore(ui): typegen
2023-11-17 00:32:35 +00:00
|
|
|
from invokeai.app.util.misc import SEED_MAX
|
2023-09-01 16:48:18 +00:00
|
|
|
from invokeai.backend.image_util.cv2_inpaint import cv2_inpaint
|
2023-08-23 19:25:24 +00:00
|
|
|
from invokeai.backend.image_util.lama import LaMA
|
2023-05-05 05:16:26 +00:00
|
|
|
from invokeai.backend.image_util.patchmatch import PatchMatch
|
|
|
|
|
2023-10-17 06:23:10 +00:00
|
|
|
from .baseinvocation import BaseInvocation, InputField, InvocationContext, WithMetadata, WithWorkflow, invocation
|
2023-09-01 20:36:01 +00:00
|
|
|
from .image import PIL_RESAMPLING_MAP, PIL_RESAMPLING_MODES
|
2023-05-05 05:16:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
def infill_methods() -> list[str]:
|
2023-09-01 16:48:18 +00:00
|
|
|
methods = ["tile", "solid", "lama", "cv2"]
|
2023-05-05 05:16:26 +00:00
|
|
|
if PatchMatch.patchmatch_available():
|
|
|
|
methods.insert(0, "patchmatch")
|
|
|
|
return methods
|
|
|
|
|
|
|
|
|
|
|
|
INFILL_METHODS = Literal[tuple(infill_methods())]
|
|
|
|
DEFAULT_INFILL_METHOD = "patchmatch" if "patchmatch" in get_args(INFILL_METHODS) else "tile"
|
|
|
|
|
|
|
|
|
2023-08-23 19:25:24 +00:00
|
|
|
def infill_lama(im: Image.Image) -> Image.Image:
|
|
|
|
lama = LaMA()
|
|
|
|
return lama(im)
|
|
|
|
|
|
|
|
|
2023-05-05 05:16:26 +00:00
|
|
|
def infill_patchmatch(im: Image.Image) -> Image.Image:
|
|
|
|
if im.mode != "RGBA":
|
|
|
|
return im
|
|
|
|
|
|
|
|
# Skip patchmatch if patchmatch isn't available
|
|
|
|
if not PatchMatch.patchmatch_available():
|
|
|
|
return im
|
|
|
|
|
|
|
|
# Patchmatch (note, we may want to expose patch_size? Increasing it significantly impacts performance though)
|
|
|
|
im_patched_np = PatchMatch.inpaint(im.convert("RGB"), ImageOps.invert(im.split()[-1]), patch_size=3)
|
|
|
|
im_patched = Image.fromarray(im_patched_np, mode="RGB")
|
|
|
|
return im_patched
|
|
|
|
|
|
|
|
|
2023-09-01 16:48:18 +00:00
|
|
|
def infill_cv2(im: Image.Image) -> Image.Image:
|
|
|
|
return cv2_inpaint(im)
|
|
|
|
|
|
|
|
|
2023-05-05 05:16:26 +00:00
|
|
|
def get_tile_images(image: np.ndarray, width=8, height=8):
|
|
|
|
_nrows, _ncols, depth = image.shape
|
|
|
|
_strides = image.strides
|
|
|
|
|
|
|
|
nrows, _m = divmod(_nrows, height)
|
|
|
|
ncols, _n = divmod(_ncols, width)
|
|
|
|
if _m != 0 or _n != 0:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return np.lib.stride_tricks.as_strided(
|
|
|
|
np.ravel(image),
|
|
|
|
shape=(nrows, ncols, height, width, depth),
|
|
|
|
strides=(height * _strides[0], width * _strides[1], *_strides),
|
|
|
|
writeable=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-07-03 16:17:45 +00:00
|
|
|
def tile_fill_missing(im: Image.Image, tile_size: int = 16, seed: Optional[int] = None) -> Image.Image:
|
2023-05-05 05:16:26 +00:00
|
|
|
# Only fill if there's an alpha layer
|
|
|
|
if im.mode != "RGBA":
|
|
|
|
return im
|
|
|
|
|
|
|
|
a = np.asarray(im, dtype=np.uint8)
|
|
|
|
|
|
|
|
tile_size_tuple = (tile_size, tile_size)
|
|
|
|
|
|
|
|
# Get the image as tiles of a specified size
|
|
|
|
tiles = get_tile_images(a, *tile_size_tuple).copy()
|
|
|
|
|
|
|
|
# Get the mask as tiles
|
|
|
|
tiles_mask = tiles[:, :, :, :, 3]
|
|
|
|
|
|
|
|
# Find any mask tiles with any fully transparent pixels (we will be replacing these later)
|
|
|
|
tmask_shape = tiles_mask.shape
|
|
|
|
tiles_mask = tiles_mask.reshape(math.prod(tiles_mask.shape))
|
|
|
|
n, ny = (math.prod(tmask_shape[0:2])), math.prod(tmask_shape[2:])
|
|
|
|
tiles_mask = tiles_mask > 0
|
|
|
|
tiles_mask = tiles_mask.reshape((n, ny)).all(axis=1)
|
|
|
|
|
|
|
|
# Get RGB tiles in single array and filter by the mask
|
|
|
|
tshape = tiles.shape
|
|
|
|
tiles_all = tiles.reshape((math.prod(tiles.shape[0:2]), *tiles.shape[2:]))
|
|
|
|
filtered_tiles = tiles_all[tiles_mask]
|
|
|
|
|
|
|
|
if len(filtered_tiles) == 0:
|
|
|
|
return im
|
|
|
|
|
|
|
|
# Find all invalid tiles and replace with a random valid tile
|
2023-08-24 02:21:57 +00:00
|
|
|
replace_count = (tiles_mask == False).sum() # noqa: E712
|
2023-05-05 05:16:26 +00:00
|
|
|
rng = np.random.default_rng(seed=seed)
|
|
|
|
tiles_all[np.logical_not(tiles_mask)] = filtered_tiles[rng.choice(filtered_tiles.shape[0], replace_count), :, :, :]
|
|
|
|
|
|
|
|
# Convert back to an image
|
|
|
|
tiles_all = tiles_all.reshape(tshape)
|
|
|
|
tiles_all = tiles_all.swapaxes(1, 2)
|
|
|
|
st = tiles_all.reshape(
|
|
|
|
(
|
|
|
|
math.prod(tiles_all.shape[0:2]),
|
|
|
|
math.prod(tiles_all.shape[2:4]),
|
|
|
|
tiles_all.shape[4],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
si = Image.fromarray(st, mode="RGBA")
|
|
|
|
|
|
|
|
return si
|
|
|
|
|
|
|
|
|
2023-11-16 00:14:26 +00:00
|
|
|
@invocation("infill_rgba", title="Solid Color Infill", tags=["image", "inpaint"], category="inpaint", version="1.1.0")
|
2023-10-17 06:23:10 +00:00
|
|
|
class InfillColorInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
2023-05-06 09:36:51 +00:00
|
|
|
"""Infills transparent areas of an image with a solid color"""
|
2023-05-05 05:16:26 +00:00
|
|
|
|
2023-08-14 03:23:09 +00:00
|
|
|
image: ImageField = InputField(description="The image to infill")
|
|
|
|
color: ColorField = InputField(
|
2023-05-05 05:16:26 +00:00
|
|
|
default=ColorField(r=127, g=127, b=127, a=255),
|
2023-05-06 09:06:39 +00:00
|
|
|
description="The color to use to infill",
|
2023-05-05 05:16:26 +00:00
|
|
|
)
|
2023-05-06 09:06:39 +00:00
|
|
|
|
|
|
|
def invoke(self, context: InvocationContext) -> ImageOutput:
|
2023-06-14 11:40:09 +00:00
|
|
|
image = context.services.images.get_pil_image(self.image.image_name)
|
2023-05-06 09:06:39 +00:00
|
|
|
|
|
|
|
solid_bg = Image.new("RGBA", image.size, self.color.tuple())
|
2023-05-24 05:50:55 +00:00
|
|
|
infilled = Image.alpha_composite(solid_bg, image.convert("RGBA"))
|
2023-05-06 09:06:39 +00:00
|
|
|
|
|
|
|
infilled.paste(image, (0, 0), image.split()[-1])
|
|
|
|
|
2023-05-24 05:50:55 +00:00
|
|
|
image_dto = context.services.images.create(
|
|
|
|
image=infilled,
|
2023-05-27 11:39:20 +00:00
|
|
|
image_origin=ResourceOrigin.INTERNAL,
|
2023-05-24 05:50:55 +00:00
|
|
|
image_category=ImageCategory.GENERAL,
|
|
|
|
node_id=self.id,
|
|
|
|
session_id=context.graph_execution_state_id,
|
2023-05-26 01:39:19 +00:00
|
|
|
is_intermediate=self.is_intermediate,
|
2023-10-17 06:23:10 +00:00
|
|
|
metadata=self.metadata,
|
2023-08-24 11:42:32 +00:00
|
|
|
workflow=self.workflow,
|
2023-05-06 09:06:39 +00:00
|
|
|
)
|
|
|
|
|
2023-05-24 05:50:55 +00:00
|
|
|
return ImageOutput(
|
2023-06-14 11:40:09 +00:00
|
|
|
image=ImageField(image_name=image_dto.image_name),
|
2023-05-24 05:50:55 +00:00
|
|
|
width=image_dto.width,
|
|
|
|
height=image_dto.height,
|
2023-05-06 09:06:39 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
feat(ui): add support for custom field types
Node authors may now create their own arbitrary/custom field types. Any pydantic model is supported.
Two notes:
1. Your field type's class name must be unique.
Suggest prefixing fields with something related to the node pack as a kind of namespace.
2. Custom field types function as connection-only fields.
For example, if your custom field has string attributes, you will not get a text input for that attribute when you give a node a field with your custom type.
This is the same behaviour as other complex fields that don't have custom UIs in the workflow editor - like, say, a string collection.
feat(ui): fix tooltips for custom types
We need to hold onto the original type of the field so they don't all just show up as "Unknown".
fix(ui): fix ts error with custom fields
feat(ui): custom field types connection validation
In the initial commit, a custom field's original type was added to the *field templates* only as `originalType`. Custom fields' `type` property was `"Custom"`*. This allowed for type safety throughout the UI logic.
*Actually, it was `"Unknown"`, but I changed it to custom for clarity.
Connection validation logic, however, uses the *field instance* of the node/field. Like the templates, *field instances* with custom types have their `type` set to `"Custom"`, but they didn't have an `originalType` property. As a result, all custom fields could be connected to all other custom fields.
To resolve this, we need to add `originalType` to the *field instances*, then switch the validation logic to use this instead of `type`.
This ended up needing a bit of fanagling:
- If we make `originalType` a required property on field instances, existing workflows will break during connection validation, because they won't have this property. We'd need a new layer of logic to migrate the workflows, adding the new `originalType` property.
While this layer is probably needed anyways, typing `originalType` as optional is much simpler. Workflow migration logic can come layer.
(Technically, we could remove all references to field types from the workflow files, and let the templates hold all this information. This feels like a significant change and I'm reluctant to do it now.)
- Because `originalType` is optional, anywhere we care about the type of a field, we need to use it over `type`. So there are a number of `field.originalType ?? field.type` expressions. This is a bit of a gotcha, we'll need to remember this in the future.
- We use `Array.prototype.includes()` often in the workflow editor, e.g. `COLLECTION_TYPES.includes(type)`. In these cases, the const array is of type `FieldType[]`, and `type` is is `FieldType`.
Because we now support custom types, the arg `type` is now widened from `FieldType` to `string`.
This causes a TS error. This behaviour is somewhat controversial (see https://github.com/microsoft/TypeScript/issues/14520). These expressions are now rewritten as `COLLECTION_TYPES.some((t) => t === type)` to satisfy TS. It's logically equivalent.
fix(ui): typo
feat(ui): add CustomCollection and CustomPolymorphic field types
feat(ui): add validation for CustomCollection & CustomPolymorphic types
- Update connection validation for custom types
- Use simple string parsing to determine if a field is a collection or polymorphic type.
- No longer need to keep a list of collection and polymorphic types.
- Added runtime checks in `baseinvocation.py` to ensure no fields are named in such a way that it could mess up the new parsing
chore(ui): remove errant console.log
fix(ui): rename 'nodes.currentConnectionFieldType' -> 'nodes.connectionStartFieldType'
This was confusingly named and kept tripping me up. Renamed to be consistent with the `reactflow` `ConnectionStartParams` type.
fix(ui): fix ts error
feat(nodes): add runtime check for custom field names
"Custom", "CustomCollection" and "CustomPolymorphic" are reserved field names.
chore(ui): add TODO for revising field type names
wip refactor fieldtype structured
wip refactor field types
wip refactor types
wip refactor types
fix node layout
refactor field types
chore: mypy
organisation
organisation
organisation
fix(nodes): fix field orig_required, field_kind and input statuses
feat(nodes): remove broken implementation of default_factory on InputField
Use of this could break connection validation due to the difference in node schemas required fields and invoke() required args.
Removed entirely for now. It wasn't ever actually used by the system, because all graphs always had values provided for fields where default_factory was used.
Also, pydantic is smart enough to not reuse the same object when specifying a default value - it clones the object first. So, the common pattern of `default_factory=list` is extraneous. It can just be `default=[]`.
fix(nodes): fix InputField name validation
workflow validation
validation
chore: ruff
feat(nodes): fix up baseinvocation comments
fix(ui): improve typing & logic of buildFieldInputTemplate
improved error handling in parseFieldType
fix: back compat for deprecated default_factory and UIType
feat(nodes): do not show node packs loaded log if none loaded
chore(ui): typegen
2023-11-17 00:32:35 +00:00
|
|
|
@invocation("infill_tile", title="Tile Infill", tags=["image", "inpaint"], category="inpaint", version="1.1.1")
|
2023-10-17 06:23:10 +00:00
|
|
|
class InfillTileInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
2023-05-06 09:06:39 +00:00
|
|
|
"""Infills transparent areas of an image with tiles of the image"""
|
|
|
|
|
2023-08-14 03:23:09 +00:00
|
|
|
image: ImageField = InputField(description="The image to infill")
|
|
|
|
tile_size: int = InputField(default=32, ge=1, description="The tile size (px)")
|
|
|
|
seed: int = InputField(
|
feat(ui): add support for custom field types
Node authors may now create their own arbitrary/custom field types. Any pydantic model is supported.
Two notes:
1. Your field type's class name must be unique.
Suggest prefixing fields with something related to the node pack as a kind of namespace.
2. Custom field types function as connection-only fields.
For example, if your custom field has string attributes, you will not get a text input for that attribute when you give a node a field with your custom type.
This is the same behaviour as other complex fields that don't have custom UIs in the workflow editor - like, say, a string collection.
feat(ui): fix tooltips for custom types
We need to hold onto the original type of the field so they don't all just show up as "Unknown".
fix(ui): fix ts error with custom fields
feat(ui): custom field types connection validation
In the initial commit, a custom field's original type was added to the *field templates* only as `originalType`. Custom fields' `type` property was `"Custom"`*. This allowed for type safety throughout the UI logic.
*Actually, it was `"Unknown"`, but I changed it to custom for clarity.
Connection validation logic, however, uses the *field instance* of the node/field. Like the templates, *field instances* with custom types have their `type` set to `"Custom"`, but they didn't have an `originalType` property. As a result, all custom fields could be connected to all other custom fields.
To resolve this, we need to add `originalType` to the *field instances*, then switch the validation logic to use this instead of `type`.
This ended up needing a bit of fanagling:
- If we make `originalType` a required property on field instances, existing workflows will break during connection validation, because they won't have this property. We'd need a new layer of logic to migrate the workflows, adding the new `originalType` property.
While this layer is probably needed anyways, typing `originalType` as optional is much simpler. Workflow migration logic can come layer.
(Technically, we could remove all references to field types from the workflow files, and let the templates hold all this information. This feels like a significant change and I'm reluctant to do it now.)
- Because `originalType` is optional, anywhere we care about the type of a field, we need to use it over `type`. So there are a number of `field.originalType ?? field.type` expressions. This is a bit of a gotcha, we'll need to remember this in the future.
- We use `Array.prototype.includes()` often in the workflow editor, e.g. `COLLECTION_TYPES.includes(type)`. In these cases, the const array is of type `FieldType[]`, and `type` is is `FieldType`.
Because we now support custom types, the arg `type` is now widened from `FieldType` to `string`.
This causes a TS error. This behaviour is somewhat controversial (see https://github.com/microsoft/TypeScript/issues/14520). These expressions are now rewritten as `COLLECTION_TYPES.some((t) => t === type)` to satisfy TS. It's logically equivalent.
fix(ui): typo
feat(ui): add CustomCollection and CustomPolymorphic field types
feat(ui): add validation for CustomCollection & CustomPolymorphic types
- Update connection validation for custom types
- Use simple string parsing to determine if a field is a collection or polymorphic type.
- No longer need to keep a list of collection and polymorphic types.
- Added runtime checks in `baseinvocation.py` to ensure no fields are named in such a way that it could mess up the new parsing
chore(ui): remove errant console.log
fix(ui): rename 'nodes.currentConnectionFieldType' -> 'nodes.connectionStartFieldType'
This was confusingly named and kept tripping me up. Renamed to be consistent with the `reactflow` `ConnectionStartParams` type.
fix(ui): fix ts error
feat(nodes): add runtime check for custom field names
"Custom", "CustomCollection" and "CustomPolymorphic" are reserved field names.
chore(ui): add TODO for revising field type names
wip refactor fieldtype structured
wip refactor field types
wip refactor types
wip refactor types
fix node layout
refactor field types
chore: mypy
organisation
organisation
organisation
fix(nodes): fix field orig_required, field_kind and input statuses
feat(nodes): remove broken implementation of default_factory on InputField
Use of this could break connection validation due to the difference in node schemas required fields and invoke() required args.
Removed entirely for now. It wasn't ever actually used by the system, because all graphs always had values provided for fields where default_factory was used.
Also, pydantic is smart enough to not reuse the same object when specifying a default value - it clones the object first. So, the common pattern of `default_factory=list` is extraneous. It can just be `default=[]`.
fix(nodes): fix InputField name validation
workflow validation
validation
chore: ruff
feat(nodes): fix up baseinvocation comments
fix(ui): improve typing & logic of buildFieldInputTemplate
improved error handling in parseFieldType
fix: back compat for deprecated default_factory and UIType
feat(nodes): do not show node packs loaded log if none loaded
chore(ui): typegen
2023-11-17 00:32:35 +00:00
|
|
|
default=0,
|
2023-05-06 09:06:39 +00:00
|
|
|
ge=0,
|
|
|
|
le=SEED_MAX,
|
|
|
|
description="The seed to use for tile generation (omit for random)",
|
2023-05-05 05:16:26 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
def invoke(self, context: InvocationContext) -> ImageOutput:
|
2023-06-14 11:40:09 +00:00
|
|
|
image = context.services.images.get_pil_image(self.image.image_name)
|
2023-05-05 05:16:26 +00:00
|
|
|
|
2023-05-06 09:06:39 +00:00
|
|
|
infilled = tile_fill_missing(image.copy(), seed=self.seed, tile_size=self.tile_size)
|
|
|
|
infilled.paste(image, (0, 0), image.split()[-1])
|
|
|
|
|
2023-05-24 05:50:55 +00:00
|
|
|
image_dto = context.services.images.create(
|
|
|
|
image=infilled,
|
2023-05-27 11:39:20 +00:00
|
|
|
image_origin=ResourceOrigin.INTERNAL,
|
2023-05-24 05:50:55 +00:00
|
|
|
image_category=ImageCategory.GENERAL,
|
|
|
|
node_id=self.id,
|
|
|
|
session_id=context.graph_execution_state_id,
|
2023-05-26 01:39:19 +00:00
|
|
|
is_intermediate=self.is_intermediate,
|
2023-10-17 06:23:10 +00:00
|
|
|
metadata=self.metadata,
|
2023-08-24 11:42:32 +00:00
|
|
|
workflow=self.workflow,
|
2023-05-06 09:06:39 +00:00
|
|
|
)
|
|
|
|
|
2023-05-24 05:50:55 +00:00
|
|
|
return ImageOutput(
|
2023-06-14 11:40:09 +00:00
|
|
|
image=ImageField(image_name=image_dto.image_name),
|
2023-05-24 05:50:55 +00:00
|
|
|
width=image_dto.width,
|
|
|
|
height=image_dto.height,
|
2023-05-06 09:06:39 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-09-04 08:11:56 +00:00
|
|
|
@invocation(
|
2023-11-16 00:14:26 +00:00
|
|
|
"infill_patchmatch", title="PatchMatch Infill", tags=["image", "inpaint"], category="inpaint", version="1.1.0"
|
2023-09-04 08:11:56 +00:00
|
|
|
)
|
2023-10-17 06:23:10 +00:00
|
|
|
class InfillPatchMatchInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
2023-05-06 09:36:51 +00:00
|
|
|
"""Infills transparent areas of an image using the PatchMatch algorithm"""
|
2023-05-06 09:06:39 +00:00
|
|
|
|
2023-08-14 03:23:09 +00:00
|
|
|
image: ImageField = InputField(description="The image to infill")
|
2023-09-05 01:23:13 +00:00
|
|
|
downscale: float = InputField(default=2.0, gt=0, description="Run patchmatch on downscaled image to speedup infill")
|
|
|
|
resample_mode: PIL_RESAMPLING_MODES = InputField(default="bicubic", description="The resampling mode")
|
2023-07-18 14:26:45 +00:00
|
|
|
|
2023-05-06 09:06:39 +00:00
|
|
|
def invoke(self, context: InvocationContext) -> ImageOutput:
|
2023-09-01 20:08:46 +00:00
|
|
|
image = context.services.images.get_pil_image(self.image.image_name).convert("RGBA")
|
|
|
|
|
|
|
|
resample_mode = PIL_RESAMPLING_MAP[self.resample_mode]
|
|
|
|
|
|
|
|
infill_image = image.copy()
|
|
|
|
width = int(image.width / self.downscale)
|
|
|
|
height = int(image.height / self.downscale)
|
|
|
|
infill_image = infill_image.resize(
|
|
|
|
(width, height),
|
|
|
|
resample=resample_mode,
|
|
|
|
)
|
2023-05-06 09:06:39 +00:00
|
|
|
|
|
|
|
if PatchMatch.patchmatch_available():
|
2023-09-01 20:08:46 +00:00
|
|
|
infilled = infill_patchmatch(infill_image)
|
2023-05-05 05:16:26 +00:00
|
|
|
else:
|
2023-05-06 09:06:39 +00:00
|
|
|
raise ValueError("PatchMatch is not available on this system")
|
2023-05-05 05:16:26 +00:00
|
|
|
|
2023-09-01 20:08:46 +00:00
|
|
|
infilled = infilled.resize(
|
|
|
|
(image.width, image.height),
|
|
|
|
resample=resample_mode,
|
|
|
|
)
|
|
|
|
|
|
|
|
infilled.paste(image, (0, 0), mask=image.split()[-1])
|
2023-09-01 20:36:01 +00:00
|
|
|
# image.paste(infilled, (0, 0), mask=image.split()[-1])
|
2023-09-01 20:08:46 +00:00
|
|
|
|
2023-05-24 05:50:55 +00:00
|
|
|
image_dto = context.services.images.create(
|
|
|
|
image=infilled,
|
2023-05-27 11:39:20 +00:00
|
|
|
image_origin=ResourceOrigin.INTERNAL,
|
2023-05-24 05:50:55 +00:00
|
|
|
image_category=ImageCategory.GENERAL,
|
|
|
|
node_id=self.id,
|
|
|
|
session_id=context.graph_execution_state_id,
|
2023-05-26 01:39:19 +00:00
|
|
|
is_intermediate=self.is_intermediate,
|
2023-10-17 06:23:10 +00:00
|
|
|
metadata=self.metadata,
|
2023-08-24 11:42:32 +00:00
|
|
|
workflow=self.workflow,
|
2023-05-05 05:16:26 +00:00
|
|
|
)
|
|
|
|
|
2023-05-24 05:50:55 +00:00
|
|
|
return ImageOutput(
|
2023-06-14 11:40:09 +00:00
|
|
|
image=ImageField(image_name=image_dto.image_name),
|
2023-05-24 05:50:55 +00:00
|
|
|
width=image_dto.width,
|
|
|
|
height=image_dto.height,
|
2023-05-05 05:16:26 +00:00
|
|
|
)
|
2023-08-23 19:25:24 +00:00
|
|
|
|
|
|
|
|
2023-11-16 00:14:26 +00:00
|
|
|
@invocation("infill_lama", title="LaMa Infill", tags=["image", "inpaint"], category="inpaint", version="1.1.0")
|
2023-10-17 06:23:10 +00:00
|
|
|
class LaMaInfillInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
2023-08-23 19:25:24 +00:00
|
|
|
"""Infills transparent areas of an image using the LaMa model"""
|
|
|
|
|
|
|
|
image: ImageField = InputField(description="The image to infill")
|
|
|
|
|
|
|
|
def invoke(self, context: InvocationContext) -> ImageOutput:
|
|
|
|
image = context.services.images.get_pil_image(self.image.image_name)
|
|
|
|
|
|
|
|
infilled = infill_lama(image.copy())
|
|
|
|
|
|
|
|
image_dto = context.services.images.create(
|
|
|
|
image=infilled,
|
|
|
|
image_origin=ResourceOrigin.INTERNAL,
|
|
|
|
image_category=ImageCategory.GENERAL,
|
|
|
|
node_id=self.id,
|
|
|
|
session_id=context.graph_execution_state_id,
|
|
|
|
is_intermediate=self.is_intermediate,
|
2023-10-17 06:23:10 +00:00
|
|
|
metadata=self.metadata,
|
|
|
|
workflow=self.workflow,
|
2023-08-23 19:25:24 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
return ImageOutput(
|
|
|
|
image=ImageField(image_name=image_dto.image_name),
|
|
|
|
width=image_dto.width,
|
|
|
|
height=image_dto.height,
|
|
|
|
)
|
2023-09-01 16:48:18 +00:00
|
|
|
|
|
|
|
|
2023-11-16 00:14:26 +00:00
|
|
|
@invocation("infill_cv2", title="CV2 Infill", tags=["image", "inpaint"], category="inpaint", version="1.1.0")
|
2023-10-17 06:23:10 +00:00
|
|
|
class CV2InfillInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
2023-09-01 16:48:18 +00:00
|
|
|
"""Infills transparent areas of an image using OpenCV Inpainting"""
|
|
|
|
|
|
|
|
image: ImageField = InputField(description="The image to infill")
|
|
|
|
|
|
|
|
def invoke(self, context: InvocationContext) -> ImageOutput:
|
|
|
|
image = context.services.images.get_pil_image(self.image.image_name)
|
|
|
|
|
|
|
|
infilled = infill_cv2(image.copy())
|
|
|
|
|
|
|
|
image_dto = context.services.images.create(
|
|
|
|
image=infilled,
|
|
|
|
image_origin=ResourceOrigin.INTERNAL,
|
|
|
|
image_category=ImageCategory.GENERAL,
|
|
|
|
node_id=self.id,
|
|
|
|
session_id=context.graph_execution_state_id,
|
|
|
|
is_intermediate=self.is_intermediate,
|
2023-10-17 06:23:10 +00:00
|
|
|
metadata=self.metadata,
|
|
|
|
workflow=self.workflow,
|
2023-09-01 16:48:18 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
return ImageOutput(
|
|
|
|
image=ImageField(image_name=image_dto.image_name),
|
|
|
|
width=image_dto.width,
|
|
|
|
height=image_dto.height,
|
|
|
|
)
|