mirror of
https://github.com/theroyallab/tabbyAPI.git
synced 2026-03-15 00:07:28 +00:00
Model: Fix tokenizer bugs
Some tokenizer variables don't get cleaned up on init, so these can persist. Clean these up manually before creating a new tokenizer for now. Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
@@ -390,7 +390,12 @@ class ExllamaV2Container:
|
||||
# Notify that the model is being loaded
|
||||
self.model_is_loading = True
|
||||
|
||||
# Load tokenizer
|
||||
# Reset tokenizer namespace vars and create a tokenizer
|
||||
ExLlamaV2Tokenizer.unspecial_piece_to_id = {}
|
||||
ExLlamaV2Tokenizer.unspecial_id_to_piece = {}
|
||||
ExLlamaV2Tokenizer.extended_id_to_piece = {}
|
||||
ExLlamaV2Tokenizer.extended_piece_to_id = {}
|
||||
|
||||
self.tokenizer = ExLlamaV2Tokenizer(self.config)
|
||||
|
||||
# Calculate autosplit reserve for all GPUs
|
||||
|
||||
Reference in New Issue
Block a user