fix autograd which i broke for no good reason when implementing SDXL
This commit is contained in:
parent
6b877c35da
commit
5c8f91b229
@ -273,9 +273,9 @@ class FrozenCLIPEmbedderWithCustomWordsBase(torch.nn.Module):
|
|||||||
# restoring original mean is likely not correct, but it seems to work well to prevent artifacts that happen otherwise
|
# restoring original mean is likely not correct, but it seems to work well to prevent artifacts that happen otherwise
|
||||||
batch_multipliers = torch.asarray(batch_multipliers).to(devices.device)
|
batch_multipliers = torch.asarray(batch_multipliers).to(devices.device)
|
||||||
original_mean = z.mean()
|
original_mean = z.mean()
|
||||||
z *= batch_multipliers.reshape(batch_multipliers.shape + (1,)).expand(z.shape)
|
z = z * batch_multipliers.reshape(batch_multipliers.shape + (1,)).expand(z.shape)
|
||||||
new_mean = z.mean()
|
new_mean = z.mean()
|
||||||
z *= (original_mean / new_mean)
|
z = z * (original_mean / new_mean)
|
||||||
|
|
||||||
return z
|
return z
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user