From 204e65b8dcb2db2014f40e1b4c8def3a00150cde Mon Sep 17 00:00:00 2001 From: comfyanonymous <121283862+comfyanonymous@users.noreply.github.com> Date: Fri, 6 Feb 2026 16:48:20 -0800 Subject: [PATCH] Fix bug with last pr (#12338) --- comfy/text_encoders/anima.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/comfy/text_encoders/anima.py b/comfy/text_encoders/anima.py index fcba097cb..d8c5a6f92 100644 --- a/comfy/text_encoders/anima.py +++ b/comfy/text_encoders/anima.py @@ -23,7 +23,7 @@ class AnimaTokenizer: def tokenize_with_weights(self, text:str, return_word_ids=False, **kwargs): out = {} qwen_ids = self.qwen3_06b.tokenize_with_weights(text, return_word_ids, **kwargs) - out["qwen3_06b"] = [[(token, 1.0, id) if return_word_ids else (token, 1.0) for token, _, id in inner_list] for inner_list in qwen_ids] # Set weights to 1.0 + out["qwen3_06b"] = [[(k[0], 1.0, k[2]) if return_word_ids else (k[0], 1.0) for k in inner_list] for inner_list in qwen_ids] # Set weights to 1.0 out["t5xxl"] = self.t5xxl.tokenize_with_weights(text, return_word_ids, **kwargs) return out