Merge branch 'main' into development

- this syncs documentation and code
This commit is contained in:
Lincoln Stein
2022-10-09 14:47:27 -04:00
100 changed files with 1976 additions and 1107 deletions

View File

@ -2,7 +2,7 @@
import k_diffusion as K
import torch
import torch.nn as nn
from ldm.dream.devices import choose_torch_device
from ldm.invoke.devices import choose_torch_device
from ldm.models.diffusion.sampler import Sampler
from ldm.util import rand_perlin_2d
from ldm.modules.diffusionmodules.util import (
@ -57,8 +57,9 @@ class KSampler(Sampler):
schedule,
steps=model.num_timesteps,
)
self.ds = None
self.s_in = None
self.sigmas = None
self.ds = None
self.s_in = None
def forward(self, x, sigma, uncond, cond, cond_scale):
x_in = torch.cat([x] * 2)
@ -190,7 +191,7 @@ class KSampler(Sampler):
'uncond': unconditional_conditioning,
'cond_scale': unconditional_guidance_scale,
}
print(f'>> Sampling with k_{self.schedule}')
print(f'>> Sampling with k_{self.schedule} starting at step {len(self.sigmas)-S-1} of {len(self.sigmas)-1} ({S} new sampling steps)')
return (
K.sampling.__dict__[f'sample_{self.schedule}'](
model_wrap_cfg, x, sigmas, extra_args=extra_args,
@ -199,6 +200,8 @@ class KSampler(Sampler):
None,
)
# this code will support inpainting if and when ksampler API modified or
# a workaround is found.
@torch.no_grad()
def p_sample(
self,