fix autograd which i broke for no good reason when implementing SDXL

This commit is contained in:
AUTOMATIC1111 2023-07-26 13:03:52 +03:00
parent 6b877c35da
commit 5c8f91b229

View File

@ -273,9 +273,9 @@ class FrozenCLIPEmbedderWithCustomWordsBase(torch.nn.Module):
# restoring original mean is likely not correct, but it seems to work well to prevent artifacts that happen otherwise
batch_multipliers = torch.asarray(batch_multipliers).to(devices.device)
original_mean = z.mean()
z *= batch_multipliers.reshape(batch_multipliers.shape + (1,)).expand(z.shape)
z = z * batch_multipliers.reshape(batch_multipliers.shape + (1,)).expand(z.shape)
new_mean = z.mean()
z *= (original_mean / new_mean)
z = z * (original_mean / new_mean)
return z