mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
Refactored ControlNet nodes so they subclass from PreprocessedControlInvocation, and only need to override run_processor(image) (instead of reimplementing invoke())
This commit is contained in:
parent
0864fca641
commit
f2f4c37f19
@ -30,7 +30,7 @@ class ControlField(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class ControlOutput(BaseInvocationOutput):
|
class ControlOutput(BaseInvocationOutput):
|
||||||
"""Base class for invocations that output ControlNet info"""
|
"""node output for ControlNet info"""
|
||||||
|
|
||||||
# fmt: off
|
# fmt: off
|
||||||
type: Literal["control_output"] = "control_output"
|
type: Literal["control_output"] = "control_output"
|
||||||
@ -38,12 +38,11 @@ class ControlOutput(BaseInvocationOutput):
|
|||||||
# image: ImageField = Field(default=None, description="outputs just them image info (which is also included in control output)")
|
# image: ImageField = Field(default=None, description="outputs just them image info (which is also included in control output)")
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
|
class PreprocessedControlInvocation(BaseInvocation, PILInvocationConfig):
|
||||||
class CannyControlInvocation(BaseInvocation, PILInvocationConfig):
|
"""Base class for invocations that preprocess images for ControlNet"""
|
||||||
"""Canny edge detection for ControlNet"""
|
|
||||||
|
|
||||||
# fmt: off
|
# fmt: off
|
||||||
type: Literal["cannycontrol"] = "cannycontrol"
|
type: Literal["preprocessed_control"] = "preprocessed_control"
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
image: ImageField = Field(default=None, description="image to process")
|
image: ImageField = Field(default=None, description="image to process")
|
||||||
@ -54,17 +53,20 @@ class CannyControlInvocation(BaseInvocation, PILInvocationConfig):
|
|||||||
# end_step_percent: float = Field(default=1, ge=0, le=1,
|
# end_step_percent: float = Field(default=1, ge=0, le=1,
|
||||||
# description="% of total steps at which controlnet is last applied")
|
# description="% of total steps at which controlnet is last applied")
|
||||||
# guess_mode: bool = Field(default=False, description="use guess mode (controlnet ignores prompt)")
|
# guess_mode: bool = Field(default=False, description="use guess mode (controlnet ignores prompt)")
|
||||||
|
|
||||||
low_threshold: float = Field(default=100, ge=0, description="low threshold of Canny pixel gradient")
|
|
||||||
high_threshold: float = Field(default=200, ge=0, description="high threshold of Canny pixel gradient")
|
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
|
# This super class handles invoke() call, which in turn calls run_processor(image)
|
||||||
|
# subclasses override run_processor instead of implementing their own invoke()
|
||||||
|
def run_processor(self, image):
|
||||||
|
# super class pass through of image
|
||||||
|
return image
|
||||||
|
|
||||||
def invoke(self, context: InvocationContext) -> ControlOutput:
|
def invoke(self, context: InvocationContext) -> ControlOutput:
|
||||||
image = context.services.images.get(
|
image = context.services.images.get(
|
||||||
self.image.image_type, self.image.image_name
|
self.image.image_type, self.image.image_name
|
||||||
)
|
)
|
||||||
canny_processor = CannyDetector()
|
# image type should be PIL.PngImagePlugin.PngImageFile ?
|
||||||
processed_image = canny_processor(image, self.low_threshold, self.high_threshold)
|
processed_image = self.run_processor(image)
|
||||||
image_type = ImageType.INTERMEDIATE
|
image_type = ImageType.INTERMEDIATE
|
||||||
image_name = context.services.images.create_name(
|
image_name = context.services.images.create_name(
|
||||||
context.graph_execution_state_id, self.id
|
context.graph_execution_state_id, self.id
|
||||||
@ -87,3 +89,23 @@ class CannyControlInvocation(BaseInvocation, PILInvocationConfig):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CannyControlInvocation(PreprocessedControlInvocation, PILInvocationConfig):
|
||||||
|
"""Canny edge detection for ControlNet"""
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
type: Literal["cannycontrol"] = "cannycontrol"
|
||||||
|
# Inputs
|
||||||
|
low_threshold: float = Field(default=100, ge=0, description="low threshold of Canny pixel gradient")
|
||||||
|
high_threshold: float = Field(default=200, ge=0, description="high threshold of Canny pixel gradient")
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
def run_processor(self, image):
|
||||||
|
print("**** running Canny processor ****")
|
||||||
|
print("image type: ", type(image))
|
||||||
|
canny_processor = CannyDetector()
|
||||||
|
processed_image = canny_processor(image, self.low_threshold, self.high_threshold)
|
||||||
|
print("processed image type: ", type(image))
|
||||||
|
return processed_image
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user