prevent replacing torch_randn globally (instead replacing k_diffusion.sampling.torch) and add a setting to disable this all

This commit is contained in:
AUTOMATIC
2022-09-16 09:47:03 +03:00
parent 9d40212485
commit 87e8b9a2ab
3 changed files with 23 additions and 7 deletions

View File

@@ -122,7 +122,7 @@ def slerp(val, low, high):
def create_random_tensors(shape, seeds, subseeds=None, subseed_strength=0.0, seed_resize_from_h=0, seed_resize_from_w=0, p=None):
xs = []
if p is not None and p.sampler is not None and len(seeds) > 1:
if p is not None and p.sampler is not None and len(seeds) > 1 and opts.enable_batch_seeds:
sampler_noises = [[] for _ in range(p.sampler.number_of_needed_noises(p))]
else:
sampler_noises = None