UNet from Scratch

Now backend rewrite is about 50% finished.
Estimated finish is in 72 hours.
After that, many newer features will land.
This commit is contained in:
layerdiffusion
2024-08-01 21:19:41 -07:00
parent e3522c8919
commit bc9977a305
20 changed files with 1393 additions and 56 deletions

1008
backend/nn/unet.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -381,7 +381,7 @@ class IntegratedAutoencoderKL(nn.Module, ConfigMixin):
norm_num_groups: int = 32,
sample_size: int = 32,
scaling_factor: float = 0.18215,
shift_factor: Optional[float] = None,
shift_factor: Optional[float] = 0.0,
latents_mean: Optional[Tuple[float]] = None,
latents_std: Optional[Tuple[float]] = None,
force_upcast: float = True,
@@ -403,6 +403,9 @@ class IntegratedAutoencoderKL(nn.Module, ConfigMixin):
self.scaling_factor = scaling_factor
self.shift_factor = shift_factor
if not isinstance(self.shift_factor, float):
self.shift_factor = 0.0
def encode(self, x, regulation=None):
z = self.encoder(x)
z = self.quant_conv(z)
@@ -416,3 +419,9 @@ class IntegratedAutoencoderKL(nn.Module, ConfigMixin):
z = self.post_quant_conv(z)
x = self.decoder(z)
return x
def process_in(self, latent):
return (latent - self.shift_factor) * self.scaling_factor
def process_out(self, latent):
return (latent / self.scaling_factor) + self.shift_factor