2022-10-03 18:39:58 +00:00
|
|
|
import warnings
|
|
|
|
import math
|
|
|
|
from PIL import Image, ImageFilter
|
|
|
|
|
refactor how postprocessors work
- similar call structures for outpainting, outcropping and face restoration modules
- added documentation for outcropping
- post-processing steps now leave a provenance chain (of sorts) in the sd-metadata field:
~~~
scripts/sd-metadata.py outputs/img-samples/curly.942491079.upscale.png
outputs/img-samples/curly.942491079.upscale.png:
{
"model": "stable diffusion",
"model_id": "stable-diffusion-1.4",
"model_hash": "fe4efff1e174c627256e44ec2991ba279b3816e364b49f9be2abc0b3ff3f8556",
"app_id": "lstein/stable-diffusion",
"app_version": "v1.15",
"image": {
"height": 512,
"width": 512,
"steps": 50,
"cfg_scale": 7.5,
"seed": 942491079,
"prompt": [
{
"prompt": "pretty curly-haired redhead woman",
"weight": 1.0
}
],
"postprocessing": [
{
"tool": "outcrop",
"dream_command": "!fix \"test-pictures/curly.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -c top 64 right 64"
},
{
"tool": "gfpgan",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.outcrop-02.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -G 0.8"
},
{
"tool": "upscale",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.gfpgan.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -U 4.0 0.75"
}
],
"sampler": "k_lms",
"variations": [],
"type": "txt2img"
}
}
~~~
2022-10-03 20:53:12 +00:00
|
|
|
class Outcrop(object):
|
2022-10-03 18:39:58 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
image,
|
refactor how postprocessors work
- similar call structures for outpainting, outcropping and face restoration modules
- added documentation for outcropping
- post-processing steps now leave a provenance chain (of sorts) in the sd-metadata field:
~~~
scripts/sd-metadata.py outputs/img-samples/curly.942491079.upscale.png
outputs/img-samples/curly.942491079.upscale.png:
{
"model": "stable diffusion",
"model_id": "stable-diffusion-1.4",
"model_hash": "fe4efff1e174c627256e44ec2991ba279b3816e364b49f9be2abc0b3ff3f8556",
"app_id": "lstein/stable-diffusion",
"app_version": "v1.15",
"image": {
"height": 512,
"width": 512,
"steps": 50,
"cfg_scale": 7.5,
"seed": 942491079,
"prompt": [
{
"prompt": "pretty curly-haired redhead woman",
"weight": 1.0
}
],
"postprocessing": [
{
"tool": "outcrop",
"dream_command": "!fix \"test-pictures/curly.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -c top 64 right 64"
},
{
"tool": "gfpgan",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.outcrop-02.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -G 0.8"
},
{
"tool": "upscale",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.gfpgan.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -U 4.0 0.75"
}
],
"sampler": "k_lms",
"variations": [],
"type": "txt2img"
}
}
~~~
2022-10-03 20:53:12 +00:00
|
|
|
generate, # current generate object
|
2022-10-03 18:39:58 +00:00
|
|
|
):
|
|
|
|
self.image = image
|
refactor how postprocessors work
- similar call structures for outpainting, outcropping and face restoration modules
- added documentation for outcropping
- post-processing steps now leave a provenance chain (of sorts) in the sd-metadata field:
~~~
scripts/sd-metadata.py outputs/img-samples/curly.942491079.upscale.png
outputs/img-samples/curly.942491079.upscale.png:
{
"model": "stable diffusion",
"model_id": "stable-diffusion-1.4",
"model_hash": "fe4efff1e174c627256e44ec2991ba279b3816e364b49f9be2abc0b3ff3f8556",
"app_id": "lstein/stable-diffusion",
"app_version": "v1.15",
"image": {
"height": 512,
"width": 512,
"steps": 50,
"cfg_scale": 7.5,
"seed": 942491079,
"prompt": [
{
"prompt": "pretty curly-haired redhead woman",
"weight": 1.0
}
],
"postprocessing": [
{
"tool": "outcrop",
"dream_command": "!fix \"test-pictures/curly.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -c top 64 right 64"
},
{
"tool": "gfpgan",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.outcrop-02.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -G 0.8"
},
{
"tool": "upscale",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.gfpgan.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -U 4.0 0.75"
}
],
"sampler": "k_lms",
"variations": [],
"type": "txt2img"
}
}
~~~
2022-10-03 20:53:12 +00:00
|
|
|
self.generate = generate
|
2022-10-03 18:39:58 +00:00
|
|
|
|
refactor how postprocessors work
- similar call structures for outpainting, outcropping and face restoration modules
- added documentation for outcropping
- post-processing steps now leave a provenance chain (of sorts) in the sd-metadata field:
~~~
scripts/sd-metadata.py outputs/img-samples/curly.942491079.upscale.png
outputs/img-samples/curly.942491079.upscale.png:
{
"model": "stable diffusion",
"model_id": "stable-diffusion-1.4",
"model_hash": "fe4efff1e174c627256e44ec2991ba279b3816e364b49f9be2abc0b3ff3f8556",
"app_id": "lstein/stable-diffusion",
"app_version": "v1.15",
"image": {
"height": 512,
"width": 512,
"steps": 50,
"cfg_scale": 7.5,
"seed": 942491079,
"prompt": [
{
"prompt": "pretty curly-haired redhead woman",
"weight": 1.0
}
],
"postprocessing": [
{
"tool": "outcrop",
"dream_command": "!fix \"test-pictures/curly.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -c top 64 right 64"
},
{
"tool": "gfpgan",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.outcrop-02.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -G 0.8"
},
{
"tool": "upscale",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.gfpgan.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -U 4.0 0.75"
}
],
"sampler": "k_lms",
"variations": [],
"type": "txt2img"
}
}
~~~
2022-10-03 20:53:12 +00:00
|
|
|
def process (
|
2022-10-03 18:39:58 +00:00
|
|
|
self,
|
|
|
|
extents:dict,
|
refactor how postprocessors work
- similar call structures for outpainting, outcropping and face restoration modules
- added documentation for outcropping
- post-processing steps now leave a provenance chain (of sorts) in the sd-metadata field:
~~~
scripts/sd-metadata.py outputs/img-samples/curly.942491079.upscale.png
outputs/img-samples/curly.942491079.upscale.png:
{
"model": "stable diffusion",
"model_id": "stable-diffusion-1.4",
"model_hash": "fe4efff1e174c627256e44ec2991ba279b3816e364b49f9be2abc0b3ff3f8556",
"app_id": "lstein/stable-diffusion",
"app_version": "v1.15",
"image": {
"height": 512,
"width": 512,
"steps": 50,
"cfg_scale": 7.5,
"seed": 942491079,
"prompt": [
{
"prompt": "pretty curly-haired redhead woman",
"weight": 1.0
}
],
"postprocessing": [
{
"tool": "outcrop",
"dream_command": "!fix \"test-pictures/curly.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -c top 64 right 64"
},
{
"tool": "gfpgan",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.outcrop-02.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -G 0.8"
},
{
"tool": "upscale",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.gfpgan.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -U 4.0 0.75"
}
],
"sampler": "k_lms",
"variations": [],
"type": "txt2img"
}
}
~~~
2022-10-03 20:53:12 +00:00
|
|
|
opt, # current options
|
|
|
|
orig_opt, # ones originally used to generate the image
|
2022-10-03 18:39:58 +00:00
|
|
|
image_callback = None,
|
|
|
|
prefix = None
|
|
|
|
):
|
refactor how postprocessors work
- similar call structures for outpainting, outcropping and face restoration modules
- added documentation for outcropping
- post-processing steps now leave a provenance chain (of sorts) in the sd-metadata field:
~~~
scripts/sd-metadata.py outputs/img-samples/curly.942491079.upscale.png
outputs/img-samples/curly.942491079.upscale.png:
{
"model": "stable diffusion",
"model_id": "stable-diffusion-1.4",
"model_hash": "fe4efff1e174c627256e44ec2991ba279b3816e364b49f9be2abc0b3ff3f8556",
"app_id": "lstein/stable-diffusion",
"app_version": "v1.15",
"image": {
"height": 512,
"width": 512,
"steps": 50,
"cfg_scale": 7.5,
"seed": 942491079,
"prompt": [
{
"prompt": "pretty curly-haired redhead woman",
"weight": 1.0
}
],
"postprocessing": [
{
"tool": "outcrop",
"dream_command": "!fix \"test-pictures/curly.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -c top 64 right 64"
},
{
"tool": "gfpgan",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.outcrop-02.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -G 0.8"
},
{
"tool": "upscale",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.gfpgan.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -U 4.0 0.75"
}
],
"sampler": "k_lms",
"variations": [],
"type": "txt2img"
}
}
~~~
2022-10-03 20:53:12 +00:00
|
|
|
# grow and mask the image
|
2022-10-03 18:39:58 +00:00
|
|
|
extended_image = self._extend_all(extents)
|
|
|
|
|
|
|
|
# switch samplers temporarily
|
refactor how postprocessors work
- similar call structures for outpainting, outcropping and face restoration modules
- added documentation for outcropping
- post-processing steps now leave a provenance chain (of sorts) in the sd-metadata field:
~~~
scripts/sd-metadata.py outputs/img-samples/curly.942491079.upscale.png
outputs/img-samples/curly.942491079.upscale.png:
{
"model": "stable diffusion",
"model_id": "stable-diffusion-1.4",
"model_hash": "fe4efff1e174c627256e44ec2991ba279b3816e364b49f9be2abc0b3ff3f8556",
"app_id": "lstein/stable-diffusion",
"app_version": "v1.15",
"image": {
"height": 512,
"width": 512,
"steps": 50,
"cfg_scale": 7.5,
"seed": 942491079,
"prompt": [
{
"prompt": "pretty curly-haired redhead woman",
"weight": 1.0
}
],
"postprocessing": [
{
"tool": "outcrop",
"dream_command": "!fix \"test-pictures/curly.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -c top 64 right 64"
},
{
"tool": "gfpgan",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.outcrop-02.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -G 0.8"
},
{
"tool": "upscale",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.gfpgan.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -U 4.0 0.75"
}
],
"sampler": "k_lms",
"variations": [],
"type": "txt2img"
}
}
~~~
2022-10-03 20:53:12 +00:00
|
|
|
curr_sampler = self.generate.sampler
|
|
|
|
self.generate.sampler_name = opt.sampler_name
|
|
|
|
self.generate._set_sampler()
|
2022-10-03 18:39:58 +00:00
|
|
|
|
|
|
|
def wrapped_callback(img,seed,**kwargs):
|
refactor how postprocessors work
- similar call structures for outpainting, outcropping and face restoration modules
- added documentation for outcropping
- post-processing steps now leave a provenance chain (of sorts) in the sd-metadata field:
~~~
scripts/sd-metadata.py outputs/img-samples/curly.942491079.upscale.png
outputs/img-samples/curly.942491079.upscale.png:
{
"model": "stable diffusion",
"model_id": "stable-diffusion-1.4",
"model_hash": "fe4efff1e174c627256e44ec2991ba279b3816e364b49f9be2abc0b3ff3f8556",
"app_id": "lstein/stable-diffusion",
"app_version": "v1.15",
"image": {
"height": 512,
"width": 512,
"steps": 50,
"cfg_scale": 7.5,
"seed": 942491079,
"prompt": [
{
"prompt": "pretty curly-haired redhead woman",
"weight": 1.0
}
],
"postprocessing": [
{
"tool": "outcrop",
"dream_command": "!fix \"test-pictures/curly.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -c top 64 right 64"
},
{
"tool": "gfpgan",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.outcrop-02.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -G 0.8"
},
{
"tool": "upscale",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.gfpgan.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -U 4.0 0.75"
}
],
"sampler": "k_lms",
"variations": [],
"type": "txt2img"
}
}
~~~
2022-10-03 20:53:12 +00:00
|
|
|
image_callback(img,orig_opt.seed,use_prefix=prefix,**kwargs)
|
2022-10-03 18:39:58 +00:00
|
|
|
|
refactor how postprocessors work
- similar call structures for outpainting, outcropping and face restoration modules
- added documentation for outcropping
- post-processing steps now leave a provenance chain (of sorts) in the sd-metadata field:
~~~
scripts/sd-metadata.py outputs/img-samples/curly.942491079.upscale.png
outputs/img-samples/curly.942491079.upscale.png:
{
"model": "stable diffusion",
"model_id": "stable-diffusion-1.4",
"model_hash": "fe4efff1e174c627256e44ec2991ba279b3816e364b49f9be2abc0b3ff3f8556",
"app_id": "lstein/stable-diffusion",
"app_version": "v1.15",
"image": {
"height": 512,
"width": 512,
"steps": 50,
"cfg_scale": 7.5,
"seed": 942491079,
"prompt": [
{
"prompt": "pretty curly-haired redhead woman",
"weight": 1.0
}
],
"postprocessing": [
{
"tool": "outcrop",
"dream_command": "!fix \"test-pictures/curly.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -c top 64 right 64"
},
{
"tool": "gfpgan",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.outcrop-02.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -G 0.8"
},
{
"tool": "upscale",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.gfpgan.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -U 4.0 0.75"
}
],
"sampler": "k_lms",
"variations": [],
"type": "txt2img"
}
}
~~~
2022-10-03 20:53:12 +00:00
|
|
|
result= self.generate.prompt2image(
|
|
|
|
orig_opt.prompt,
|
|
|
|
# seed = orig_opt.seed, # uncomment to make it deterministic
|
|
|
|
sampler = self.generate.sampler,
|
2022-10-03 18:39:58 +00:00
|
|
|
steps = opt.steps,
|
|
|
|
cfg_scale = opt.cfg_scale,
|
refactor how postprocessors work
- similar call structures for outpainting, outcropping and face restoration modules
- added documentation for outcropping
- post-processing steps now leave a provenance chain (of sorts) in the sd-metadata field:
~~~
scripts/sd-metadata.py outputs/img-samples/curly.942491079.upscale.png
outputs/img-samples/curly.942491079.upscale.png:
{
"model": "stable diffusion",
"model_id": "stable-diffusion-1.4",
"model_hash": "fe4efff1e174c627256e44ec2991ba279b3816e364b49f9be2abc0b3ff3f8556",
"app_id": "lstein/stable-diffusion",
"app_version": "v1.15",
"image": {
"height": 512,
"width": 512,
"steps": 50,
"cfg_scale": 7.5,
"seed": 942491079,
"prompt": [
{
"prompt": "pretty curly-haired redhead woman",
"weight": 1.0
}
],
"postprocessing": [
{
"tool": "outcrop",
"dream_command": "!fix \"test-pictures/curly.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -c top 64 right 64"
},
{
"tool": "gfpgan",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.outcrop-02.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -G 0.8"
},
{
"tool": "upscale",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.gfpgan.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -U 4.0 0.75"
}
],
"sampler": "k_lms",
"variations": [],
"type": "txt2img"
}
}
~~~
2022-10-03 20:53:12 +00:00
|
|
|
ddim_eta = self.generate.ddim_eta,
|
2022-10-03 18:39:58 +00:00
|
|
|
width = extended_image.width,
|
|
|
|
height = extended_image.height,
|
|
|
|
init_img = extended_image,
|
|
|
|
strength = opt.strength,
|
|
|
|
image_callback = wrapped_callback,
|
2022-10-31 04:20:53 +00:00
|
|
|
seam_size = 32,
|
|
|
|
seam_blur = 10,
|
|
|
|
force_outpaint = True, # this just stops the warning about erased regions
|
2022-10-03 18:39:58 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
# swap sampler back
|
refactor how postprocessors work
- similar call structures for outpainting, outcropping and face restoration modules
- added documentation for outcropping
- post-processing steps now leave a provenance chain (of sorts) in the sd-metadata field:
~~~
scripts/sd-metadata.py outputs/img-samples/curly.942491079.upscale.png
outputs/img-samples/curly.942491079.upscale.png:
{
"model": "stable diffusion",
"model_id": "stable-diffusion-1.4",
"model_hash": "fe4efff1e174c627256e44ec2991ba279b3816e364b49f9be2abc0b3ff3f8556",
"app_id": "lstein/stable-diffusion",
"app_version": "v1.15",
"image": {
"height": 512,
"width": 512,
"steps": 50,
"cfg_scale": 7.5,
"seed": 942491079,
"prompt": [
{
"prompt": "pretty curly-haired redhead woman",
"weight": 1.0
}
],
"postprocessing": [
{
"tool": "outcrop",
"dream_command": "!fix \"test-pictures/curly.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -c top 64 right 64"
},
{
"tool": "gfpgan",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.outcrop-02.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -G 0.8"
},
{
"tool": "upscale",
"dream_command": "!fix \"outputs/img-samples/curly.942491079.gfpgan.png\" -s 50 -S 942491079 -W 512 -H 512 -C 7.5 -A k_lms -U 4.0 0.75"
}
],
"sampler": "k_lms",
"variations": [],
"type": "txt2img"
}
}
~~~
2022-10-03 20:53:12 +00:00
|
|
|
self.generate.sampler = curr_sampler
|
2022-10-03 18:39:58 +00:00
|
|
|
return result
|
|
|
|
|
|
|
|
def _extend_all(
|
|
|
|
self,
|
|
|
|
extents:dict,
|
|
|
|
) -> Image:
|
|
|
|
'''
|
|
|
|
Extend the image in direction ('top','bottom','left','right') by
|
|
|
|
the indicated value. The image canvas is extended, and the empty
|
|
|
|
rectangular section will be filled with a blurred copy of the
|
|
|
|
adjacent image.
|
|
|
|
'''
|
|
|
|
image = self.image
|
|
|
|
for direction in extents:
|
|
|
|
assert direction in ['top', 'left', 'bottom', 'right'],'Direction must be one of "top", "left", "bottom", "right"'
|
|
|
|
pixels = extents[direction]
|
|
|
|
# round pixels up to the nearest 64
|
|
|
|
pixels = math.ceil(pixels/64) * 64
|
|
|
|
print(f'>> extending image {direction}ward by {pixels} pixels')
|
|
|
|
image = self._rotate(image,direction)
|
|
|
|
image = self._extend(image,pixels)
|
|
|
|
image = self._rotate(image,direction,reverse=True)
|
|
|
|
return image
|
|
|
|
|
|
|
|
def _rotate(self,image:Image,direction:str,reverse=False) -> Image:
|
|
|
|
'''
|
|
|
|
Rotates image so that the area to extend is always at the top top.
|
|
|
|
Simplifies logic later. The reverse argument, if true, will undo the
|
|
|
|
previous transpose.
|
|
|
|
'''
|
|
|
|
transposes = {
|
|
|
|
'right': ['ROTATE_90','ROTATE_270'],
|
|
|
|
'bottom': ['ROTATE_180','ROTATE_180'],
|
|
|
|
'left': ['ROTATE_270','ROTATE_90']
|
|
|
|
}
|
|
|
|
if direction not in transposes:
|
|
|
|
return image
|
|
|
|
transpose = transposes[direction][1 if reverse else 0]
|
|
|
|
return image.transpose(Image.Transpose.__dict__[transpose])
|
|
|
|
|
|
|
|
def _extend(self,image:Image,pixels:int)-> Image:
|
|
|
|
extended_img = Image.new('RGBA',(image.width,image.height+pixels))
|
|
|
|
|
2022-10-31 04:20:53 +00:00
|
|
|
extended_img.paste((0,0,0),[0,0,image.width,image.height+pixels])
|
2022-10-03 18:39:58 +00:00
|
|
|
extended_img.paste(image,box=(0,pixels))
|
2022-10-31 04:20:53 +00:00
|
|
|
|
2022-10-03 18:39:58 +00:00
|
|
|
# now make the top part transparent to use as a mask
|
|
|
|
alpha = extended_img.getchannel('A')
|
2022-10-31 04:20:53 +00:00
|
|
|
alpha.paste(0,(0,0,extended_img.width,pixels))
|
2022-10-03 18:39:58 +00:00
|
|
|
extended_img.putalpha(alpha)
|
|
|
|
|
|
|
|
return extended_img
|