|
|
|
@ -7,7 +7,7 @@ import torchsde
|
|
|
|
|
from tqdm.auto import trange, tqdm
|
|
|
|
|
|
|
|
|
|
from . import utils
|
|
|
|
|
|
|
|
|
|
import comfy.model_patcher
|
|
|
|
|
|
|
|
|
|
def append_zero(x):
|
|
|
|
|
return torch.cat([x, x.new_zeros([1])])
|
|
|
|
@ -945,3 +945,56 @@ def sample_ipndm_v(model, x, sigmas, extra_args=None, callback=None, disable=Non
|
|
|
|
|
buffer_model.append(d_cur.detach())
|
|
|
|
|
|
|
|
|
|
return x_next
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@torch.no_grad()
|
|
|
|
|
def sample_euler_pp(model, x, sigmas, extra_args=None, callback=None, disable=None):
|
|
|
|
|
extra_args = {} if extra_args is None else extra_args
|
|
|
|
|
|
|
|
|
|
temp = [0]
|
|
|
|
|
def post_cfg_function(args):
|
|
|
|
|
temp[0] = args["uncond_denoised"]
|
|
|
|
|
return args["denoised"]
|
|
|
|
|
|
|
|
|
|
model_options = extra_args.get("model_options", {}).copy()
|
|
|
|
|
extra_args["model_options"] = comfy.model_patcher.set_model_options_post_cfg_function(model_options, post_cfg_function, disable_cfg1_optimization=True)
|
|
|
|
|
|
|
|
|
|
s_in = x.new_ones([x.shape[0]])
|
|
|
|
|
for i in trange(len(sigmas) - 1, disable=disable):
|
|
|
|
|
sigma_hat = sigmas[i]
|
|
|
|
|
denoised = model(x, sigma_hat * s_in, **extra_args)
|
|
|
|
|
d = to_d(x - denoised + temp[0], sigma_hat, denoised)
|
|
|
|
|
if callback is not None:
|
|
|
|
|
callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised})
|
|
|
|
|
dt = sigmas[i + 1] - sigma_hat
|
|
|
|
|
# Euler method
|
|
|
|
|
x = x + d * dt
|
|
|
|
|
return x
|
|
|
|
|
|
|
|
|
|
@torch.no_grad()
|
|
|
|
|
def sample_euler_ancestral_pp(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None):
|
|
|
|
|
"""Ancestral sampling with Euler method steps."""
|
|
|
|
|
extra_args = {} if extra_args is None else extra_args
|
|
|
|
|
noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler
|
|
|
|
|
|
|
|
|
|
temp = [0]
|
|
|
|
|
def post_cfg_function(args):
|
|
|
|
|
temp[0] = args["uncond_denoised"]
|
|
|
|
|
return args["denoised"]
|
|
|
|
|
|
|
|
|
|
model_options = extra_args.get("model_options", {}).copy()
|
|
|
|
|
extra_args["model_options"] = comfy.model_patcher.set_model_options_post_cfg_function(model_options, post_cfg_function, disable_cfg1_optimization=True)
|
|
|
|
|
|
|
|
|
|
s_in = x.new_ones([x.shape[0]])
|
|
|
|
|
for i in trange(len(sigmas) - 1, disable=disable):
|
|
|
|
|
denoised = model(x, sigmas[i] * s_in, **extra_args)
|
|
|
|
|
sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta)
|
|
|
|
|
if callback is not None:
|
|
|
|
|
callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised})
|
|
|
|
|
d = to_d(x - denoised + temp[0], sigmas[i], denoised)
|
|
|
|
|
# Euler method
|
|
|
|
|
dt = sigma_down - sigmas[i]
|
|
|
|
|
x = x + d * dt
|
|
|
|
|
if sigmas[i + 1] > 0:
|
|
|
|
|
x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up
|
|
|
|
|
return x
|
|
|
|
|