mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
Merge branch 'development' into development
This commit is contained in:
commit
5d911b43c0
@ -32,7 +32,7 @@ class Txt2Img(Generator):
|
|||||||
if self.free_gpu_mem and self.model.model.device != self.model.device:
|
if self.free_gpu_mem and self.model.model.device != self.model.device:
|
||||||
self.model.model.to(self.model.device)
|
self.model.model.to(self.model.device)
|
||||||
|
|
||||||
sampler.make_schedule(ddim_num_steps=steps, ddim_eta=ddim_eta, verbose=True)
|
sampler.make_schedule(ddim_num_steps=steps, ddim_eta=ddim_eta, verbose=False)
|
||||||
|
|
||||||
samples, _ = sampler.sample(
|
samples, _ = sampler.sample(
|
||||||
batch_size = 1,
|
batch_size = 1,
|
||||||
|
@ -79,17 +79,9 @@ class KSampler(Sampler):
|
|||||||
ddim_eta=0.0,
|
ddim_eta=0.0,
|
||||||
verbose=False,
|
verbose=False,
|
||||||
)
|
)
|
||||||
self.model = outer_model
|
self.model = outer_model
|
||||||
self.ddim_num_steps = ddim_num_steps
|
self.ddim_num_steps = ddim_num_steps
|
||||||
sigmas = K.sampling.get_sigmas_karras(
|
sigmas = self.model.get_sigmas(ddim_num_steps)
|
||||||
n=ddim_num_steps,
|
|
||||||
sigma_min=self.model.sigmas[0].item(),
|
|
||||||
sigma_max=self.model.sigmas[-1].item(),
|
|
||||||
rho=7.,
|
|
||||||
device=self.device,
|
|
||||||
# Birch-san recommends this, but it doesn't match the call signature in his branch of k-diffusion
|
|
||||||
# concat_zero=False
|
|
||||||
)
|
|
||||||
self.sigmas = sigmas
|
self.sigmas = sigmas
|
||||||
|
|
||||||
# ALERT: We are completely overriding the sample() method in the base class, which
|
# ALERT: We are completely overriding the sample() method in the base class, which
|
||||||
@ -133,7 +125,8 @@ class KSampler(Sampler):
|
|||||||
|
|
||||||
# sigmas = self.model.get_sigmas(S)
|
# sigmas = self.model.get_sigmas(S)
|
||||||
# sigmas are now set up in make_schedule - we take the last steps items
|
# sigmas are now set up in make_schedule - we take the last steps items
|
||||||
sigmas = self.sigmas[-S:]
|
sigmas = self.sigmas[-S-1:]
|
||||||
|
|
||||||
if x_T is not None:
|
if x_T is not None:
|
||||||
x = x_T * sigmas[0]
|
x = x_T * sigmas[0]
|
||||||
else:
|
else:
|
||||||
@ -147,7 +140,7 @@ class KSampler(Sampler):
|
|||||||
'uncond': unconditional_conditioning,
|
'uncond': unconditional_conditioning,
|
||||||
'cond_scale': unconditional_guidance_scale,
|
'cond_scale': unconditional_guidance_scale,
|
||||||
}
|
}
|
||||||
print(f'>> Sampling with k__{self.schedule}')
|
print(f'>> Sampling with k_{self.schedule}')
|
||||||
return (
|
return (
|
||||||
K.sampling.__dict__[f'sample_{self.schedule}'](
|
K.sampling.__dict__[f'sample_{self.schedule}'](
|
||||||
model_wrap_cfg, x, sigmas, extra_args=extra_args,
|
model_wrap_cfg, x, sigmas, extra_args=extra_args,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user