From a5436a3ac0d0048a36f0652bde56ec2bc9aeb2ca Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Tue, 20 Feb 2024 17:20:14 +0800 Subject: [PATCH 1/8] Update network_oft.py --- extensions-builtin/Lora/network_oft.py | 40 ++++++++++++-------------- 1 file changed, 19 insertions(+), 21 deletions(-) diff --git a/extensions-builtin/Lora/network_oft.py b/extensions-builtin/Lora/network_oft.py index d658ad10..5b899bd6 100644 --- a/extensions-builtin/Lora/network_oft.py +++ b/extensions-builtin/Lora/network_oft.py @@ -22,24 +22,24 @@ class NetworkModuleOFT(network.NetworkModule): self.org_module: list[torch.Module] = [self.sd_module] self.scale = 1.0 - self.is_kohya = False + self.is_R = False self.is_boft = False - # kohya-ss + # kohya-ss/New LyCORIS OFT/BOFT if "oft_blocks" in weights.w.keys(): - self.is_kohya = True self.oft_blocks = weights.w["oft_blocks"] # (num_blocks, block_size, block_size) - self.alpha = weights.w["alpha"] # alpha is constraint + self.alpha = weights.w.get("alpha", self.alpha) # alpha is constraint self.dim = self.oft_blocks.shape[0] # lora dim - # LyCORIS OFT + # Old LyCORIS OFT elif "oft_diag" in weights.w.keys(): + self.is_R = True self.oft_blocks = weights.w["oft_diag"] # self.alpha is unused self.dim = self.oft_blocks.shape[1] # (num_blocks, block_size, block_size) - # LyCORIS BOFT - if weights.w["oft_diag"].dim() == 4: - self.is_boft = True + # LyCORIS BOFT + if self.oft_blocks.dim() == 4: + self.is_boft = True self.rescale = weights.w.get('rescale', None) if self.rescale is not None: self.rescale = self.rescale.reshape(-1, *[1]*(self.org_module[0].weight.dim() - 1)) @@ -55,26 +55,24 @@ class NetworkModuleOFT(network.NetworkModule): elif is_other_linear: self.out_dim = self.sd_module.embed_dim - if self.is_kohya: - self.constraint = self.alpha * self.out_dim - self.num_blocks = self.dim - self.block_size = self.out_dim // self.dim + self.num_blocks = self.dim + self.block_size = self.out_dim // self.dim + self.constraint = (1 if self.alpha is None else self.alpha) * self.out_dim + if self.is_R: + self.constraint = None + self.block_size = self.dim + self.num_blocks = self.out_dim // self.dim elif self.is_boft: - self.constraint = None - self.boft_m = weights.w["oft_diag"].shape[0] - self.block_num = weights.w["oft_diag"].shape[1] - self.block_size = weights.w["oft_diag"].shape[2] + self.boft_m = self.oft_blocks.shape[0] + self.num_blocks = self.oft_blocks.shape[1] + self.block_size = self.oft_blocks.shape[2] self.boft_b = self.block_size - #self.block_size, self.block_num = butterfly_factor(self.out_dim, self.dim) - else: - self.constraint = None - self.block_size, self.num_blocks = factorization(self.out_dim, self.dim) def calc_updown(self, orig_weight): oft_blocks = self.oft_blocks.to(orig_weight.device) eye = torch.eye(self.block_size, device=oft_blocks.device) - if self.is_kohya: + if not self.is_R: block_Q = oft_blocks - oft_blocks.transpose(1, 2) # ensure skew-symmetric orthogonal matrix norm_Q = torch.norm(block_Q.flatten()) new_norm_Q = torch.clamp(norm_Q, max=self.constraint.to(oft_blocks.device)) From 591470d86d565559d79d14a66ff14ecea2bd7706 Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Tue, 20 Feb 2024 17:21:34 +0800 Subject: [PATCH 2/8] linting --- extensions-builtin/Lora/network_oft.py | 1 - 1 file changed, 1 deletion(-) diff --git a/extensions-builtin/Lora/network_oft.py b/extensions-builtin/Lora/network_oft.py index 5b899bd6..f14c183a 100644 --- a/extensions-builtin/Lora/network_oft.py +++ b/extensions-builtin/Lora/network_oft.py @@ -1,6 +1,5 @@ import torch import network -from lyco_helpers import factorization from einops import rearrange From 64179c32213f986d1378b2f414be6ef86af1a82f Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Wed, 21 Feb 2024 22:50:43 +0800 Subject: [PATCH 3/8] Update network_oft.py --- extensions-builtin/Lora/network_oft.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extensions-builtin/Lora/network_oft.py b/extensions-builtin/Lora/network_oft.py index f14c183a..ce931c62 100644 --- a/extensions-builtin/Lora/network_oft.py +++ b/extensions-builtin/Lora/network_oft.py @@ -72,7 +72,7 @@ class NetworkModuleOFT(network.NetworkModule): eye = torch.eye(self.block_size, device=oft_blocks.device) if not self.is_R: - block_Q = oft_blocks - oft_blocks.transpose(1, 2) # ensure skew-symmetric orthogonal matrix + block_Q = oft_blocks - oft_blocks.transpose(-1, -2) # ensure skew-symmetric orthogonal matrix norm_Q = torch.norm(block_Q.flatten()) new_norm_Q = torch.clamp(norm_Q, max=self.constraint.to(oft_blocks.device)) block_Q = block_Q * ((new_norm_Q + 1e-8) / (norm_Q + 1e-8)) From c4afdb7895a5a5224915b3c6f27f8e800e18ef41 Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Thu, 22 Feb 2024 00:43:32 +0800 Subject: [PATCH 4/8] For no constraint --- extensions-builtin/Lora/network_oft.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/extensions-builtin/Lora/network_oft.py b/extensions-builtin/Lora/network_oft.py index ce931c62..7821a8a7 100644 --- a/extensions-builtin/Lora/network_oft.py +++ b/extensions-builtin/Lora/network_oft.py @@ -27,7 +27,7 @@ class NetworkModuleOFT(network.NetworkModule): # kohya-ss/New LyCORIS OFT/BOFT if "oft_blocks" in weights.w.keys(): self.oft_blocks = weights.w["oft_blocks"] # (num_blocks, block_size, block_size) - self.alpha = weights.w.get("alpha", self.alpha) # alpha is constraint + self.alpha = weights.w.get("alpha", None) # alpha is constraint self.dim = self.oft_blocks.shape[0] # lora dim # Old LyCORIS OFT elif "oft_diag" in weights.w.keys(): @@ -56,7 +56,7 @@ class NetworkModuleOFT(network.NetworkModule): self.num_blocks = self.dim self.block_size = self.out_dim // self.dim - self.constraint = (1 if self.alpha is None else self.alpha) * self.out_dim + self.constraint = (0 if self.alpha is None else self.alpha) * self.out_dim if self.is_R: self.constraint = None self.block_size = self.dim @@ -73,9 +73,10 @@ class NetworkModuleOFT(network.NetworkModule): if not self.is_R: block_Q = oft_blocks - oft_blocks.transpose(-1, -2) # ensure skew-symmetric orthogonal matrix - norm_Q = torch.norm(block_Q.flatten()) - new_norm_Q = torch.clamp(norm_Q, max=self.constraint.to(oft_blocks.device)) - block_Q = block_Q * ((new_norm_Q + 1e-8) / (norm_Q + 1e-8)) + if self.constraint != 0: + norm_Q = torch.norm(block_Q.flatten()) + new_norm_Q = torch.clamp(norm_Q, max=self.constraint.to(oft_blocks.device)) + block_Q = block_Q * ((new_norm_Q + 1e-8) / (norm_Q + 1e-8)) oft_blocks = torch.matmul(eye + block_Q, (eye - block_Q).float().inverse()) R = oft_blocks.to(orig_weight.device) From f537e5a519d080fd2b16d94d91e7fed8dd3fd680 Mon Sep 17 00:00:00 2001 From: dtlnor Date: Thu, 22 Feb 2024 12:26:57 +0900 Subject: [PATCH 5/8] fix #14591 - using translated content to do categories mapping --- javascript/settings.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/javascript/settings.js b/javascript/settings.js index e6009290..b2d981c2 100644 --- a/javascript/settings.js +++ b/javascript/settings.js @@ -55,8 +55,8 @@ onOptionsChanged(function() { }); opts._categories.forEach(function(x) { - var section = x[0]; - var category = x[1]; + var section = localization[x[0]] ?? x[0]; + var category = localization[x[1]] ?? x[1]; var span = document.createElement('SPAN'); span.textContent = category; From 9211febbfc9ce45bdd2dc33e73939d67924c3f1e Mon Sep 17 00:00:00 2001 From: Andray Date: Fri, 23 Feb 2024 02:20:42 +0400 Subject: [PATCH 6/8] ResizeHandleRow - allow overriden column scale parametr --- javascript/resizeHandle.js | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/javascript/resizeHandle.js b/javascript/resizeHandle.js index f22aa51d..cd3e68c6 100644 --- a/javascript/resizeHandle.js +++ b/javascript/resizeHandle.js @@ -1,6 +1,5 @@ (function() { const GRADIO_MIN_WIDTH = 320; - const GRID_TEMPLATE_COLUMNS = '1fr 16px 1fr'; const PAD = 16; const DEBOUNCE_TIME = 100; @@ -37,7 +36,7 @@ } function afterResize(parent) { - if (displayResizeHandle(parent) && parent.style.gridTemplateColumns != GRID_TEMPLATE_COLUMNS) { + if (displayResizeHandle(parent) && parent.style.gridTemplateColumns != parent.style.originalGridTemplateColumns) { const oldParentWidth = R.parentWidth; const newParentWidth = parent.offsetWidth; const widthL = parseInt(parent.style.gridTemplateColumns.split(' ')[0]); @@ -59,7 +58,9 @@ parent.style.display = 'grid'; parent.style.gap = '0'; - parent.style.gridTemplateColumns = GRID_TEMPLATE_COLUMNS; + const gridTemplateColumns = `${parent.children[0].style.flexGrow}fr ${PAD}px ${parent.children[1].style.flexGrow}fr`; + parent.style.gridTemplateColumns = gridTemplateColumns; + parent.style.originalGridTemplateColumns = gridTemplateColumns; const resizeHandle = document.createElement('div'); resizeHandle.classList.add('resize-handle'); @@ -96,7 +97,7 @@ evt.preventDefault(); evt.stopPropagation(); - parent.style.gridTemplateColumns = GRID_TEMPLATE_COLUMNS; + parent.style.gridTemplateColumns = parent.style.originalGridTemplateColumns; }); afterResize(parent); From ed594d7ba69cf065222348f5aabc0374525d8ad5 Mon Sep 17 00:00:00 2001 From: DB Eriospermum Date: Fri, 23 Feb 2024 13:37:37 +0800 Subject: [PATCH 7/8] fix: the `split_threshold` parameter does not work when running Split oversized images --- scripts/postprocessing_split_oversized.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/postprocessing_split_oversized.py b/scripts/postprocessing_split_oversized.py index c4a03160..133e199b 100644 --- a/scripts/postprocessing_split_oversized.py +++ b/scripts/postprocessing_split_oversized.py @@ -61,7 +61,7 @@ class ScriptPostprocessingSplitOversized(scripts_postprocessing.ScriptPostproces ratio = (pp.image.height * width) / (pp.image.width * height) inverse_xy = True - if ratio >= 1.0 and ratio > split_threshold: + if ratio >= 1.0 or ratio > split_threshold: return result, *others = split_pic(pp.image, inverse_xy, width, height, overlap_ratio) From 1546fa8b896ed78d8d43220df0503a4a1c6cf6d2 Mon Sep 17 00:00:00 2001 From: lllyasviel Date: Sun, 25 Feb 2024 20:49:04 -0800 Subject: [PATCH 8/8] upstream sync --- extensions-builtin/Lora/network_oft.py | 118 ------------------------- 1 file changed, 118 deletions(-) delete mode 100644 extensions-builtin/Lora/network_oft.py diff --git a/extensions-builtin/Lora/network_oft.py b/extensions-builtin/Lora/network_oft.py deleted file mode 100644 index 7821a8a7..00000000 --- a/extensions-builtin/Lora/network_oft.py +++ /dev/null @@ -1,118 +0,0 @@ -import torch -import network -from einops import rearrange - - -class ModuleTypeOFT(network.ModuleType): - def create_module(self, net: network.Network, weights: network.NetworkWeights): - if all(x in weights.w for x in ["oft_blocks"]) or all(x in weights.w for x in ["oft_diag"]): - return NetworkModuleOFT(net, weights) - - return None - -# Supports both kohya-ss' implementation of COFT https://github.com/kohya-ss/sd-scripts/blob/main/networks/oft.py -# and KohakuBlueleaf's implementation of OFT/COFT https://github.com/KohakuBlueleaf/LyCORIS/blob/dev/lycoris/modules/diag_oft.py -class NetworkModuleOFT(network.NetworkModule): - def __init__(self, net: network.Network, weights: network.NetworkWeights): - - super().__init__(net, weights) - - self.lin_module = None - self.org_module: list[torch.Module] = [self.sd_module] - - self.scale = 1.0 - self.is_R = False - self.is_boft = False - - # kohya-ss/New LyCORIS OFT/BOFT - if "oft_blocks" in weights.w.keys(): - self.oft_blocks = weights.w["oft_blocks"] # (num_blocks, block_size, block_size) - self.alpha = weights.w.get("alpha", None) # alpha is constraint - self.dim = self.oft_blocks.shape[0] # lora dim - # Old LyCORIS OFT - elif "oft_diag" in weights.w.keys(): - self.is_R = True - self.oft_blocks = weights.w["oft_diag"] - # self.alpha is unused - self.dim = self.oft_blocks.shape[1] # (num_blocks, block_size, block_size) - - # LyCORIS BOFT - if self.oft_blocks.dim() == 4: - self.is_boft = True - self.rescale = weights.w.get('rescale', None) - if self.rescale is not None: - self.rescale = self.rescale.reshape(-1, *[1]*(self.org_module[0].weight.dim() - 1)) - - is_linear = type(self.sd_module) in [torch.nn.Linear, torch.nn.modules.linear.NonDynamicallyQuantizableLinear] - is_conv = type(self.sd_module) in [torch.nn.Conv2d] - is_other_linear = type(self.sd_module) in [torch.nn.MultiheadAttention] # unsupported - - if is_linear: - self.out_dim = self.sd_module.out_features - elif is_conv: - self.out_dim = self.sd_module.out_channels - elif is_other_linear: - self.out_dim = self.sd_module.embed_dim - - self.num_blocks = self.dim - self.block_size = self.out_dim // self.dim - self.constraint = (0 if self.alpha is None else self.alpha) * self.out_dim - if self.is_R: - self.constraint = None - self.block_size = self.dim - self.num_blocks = self.out_dim // self.dim - elif self.is_boft: - self.boft_m = self.oft_blocks.shape[0] - self.num_blocks = self.oft_blocks.shape[1] - self.block_size = self.oft_blocks.shape[2] - self.boft_b = self.block_size - - def calc_updown(self, orig_weight): - oft_blocks = self.oft_blocks.to(orig_weight.device) - eye = torch.eye(self.block_size, device=oft_blocks.device) - - if not self.is_R: - block_Q = oft_blocks - oft_blocks.transpose(-1, -2) # ensure skew-symmetric orthogonal matrix - if self.constraint != 0: - norm_Q = torch.norm(block_Q.flatten()) - new_norm_Q = torch.clamp(norm_Q, max=self.constraint.to(oft_blocks.device)) - block_Q = block_Q * ((new_norm_Q + 1e-8) / (norm_Q + 1e-8)) - oft_blocks = torch.matmul(eye + block_Q, (eye - block_Q).float().inverse()) - - R = oft_blocks.to(orig_weight.device) - - if not self.is_boft: - # This errors out for MultiheadAttention, might need to be handled up-stream - merged_weight = rearrange(orig_weight, '(k n) ... -> k n ...', k=self.num_blocks, n=self.block_size) - merged_weight = torch.einsum( - 'k n m, k n ... -> k m ...', - R, - merged_weight - ) - merged_weight = rearrange(merged_weight, 'k m ... -> (k m) ...') - else: - # TODO: determine correct value for scale - scale = 1.0 - m = self.boft_m - b = self.boft_b - r_b = b // 2 - inp = orig_weight - for i in range(m): - bi = R[i] # b_num, b_size, b_size - if i == 0: - # Apply multiplier/scale and rescale into first weight - bi = bi * scale + (1 - scale) * eye - inp = rearrange(inp, "(c g k) ... -> (c k g) ...", g=2, k=2**i * r_b) - inp = rearrange(inp, "(d b) ... -> d b ...", b=b) - inp = torch.einsum("b i j, b j ... -> b i ...", bi, inp) - inp = rearrange(inp, "d b ... -> (d b) ...") - inp = rearrange(inp, "(c k g) ... -> (c g k) ...", g=2, k=2**i * r_b) - merged_weight = inp - - # Rescale mechanism - if self.rescale is not None: - merged_weight = self.rescale.to(merged_weight) * merged_weight - - updown = merged_weight.to(orig_weight.device) - orig_weight.to(merged_weight.dtype) - output_shape = orig_weight.shape - return self.finalize_updown(updown, orig_weight, output_shape)