handle reasoning start token

This commit is contained in:
Orion-zhen
2025-03-06 11:42:56 +08:00
parent f0888437b1
commit 9efb7aab39

View File

@@ -392,7 +392,19 @@ async def stream_generate_chat_completion(
if isinstance(generation, Exception):
raise generation
if unwrap(generation.get("text"), "") == config.network.reasoning_end_token:
if (
unwrap(generation.get("text"), "")
== config.network.reasoning_start_token
and config.network.reasoning_parser
):
# Update reasoning chunk flag
is_reasoning_chunk = True
# And skip this token
continue
if (
unwrap(generation.get("text"), "") == config.network.reasoning_end_token
and config.network.reasoning_parser
):
# Update reasoning chunk flag
is_reasoning_chunk = False
# And skip this token