Force min length 1 when tokenizing for text generation. (#12538)

This commit is contained in:
comfyanonymous
2026-02-19 19:57:44 -08:00
committed by GitHub
parent 0301ccf745
commit 5f2117528a
2 changed files with 3 additions and 1 deletions

View File

@@ -573,6 +573,8 @@ class SDTokenizer:
min_length = tokenizer_options.get("{}_min_length".format(self.embedding_key), self.min_length)
min_padding = tokenizer_options.get("{}_min_padding".format(self.embedding_key), self.min_padding)
min_length = kwargs.get("min_length", min_length)
text = escape_important(text)
if kwargs.get("disable_weights", self.disable_weights):
parsed_weights = [(text, 1.0)]

View File

@@ -42,7 +42,7 @@ class TextGenerate(io.ComfyNode):
@classmethod
def execute(cls, clip, prompt, max_length, sampling_mode, image=None) -> io.NodeOutput:
tokens = clip.tokenize(prompt, image=image, skip_template=False)
tokens = clip.tokenize(prompt, image=image, skip_template=False, min_length=1)
# Get sampling parameters from dynamic combo
do_sample = sampling_mode.get("sampling_mode") == "on"