Dependencies: Require tokenizers

This is used for some models and isn't too big in size (compared to
other huggingface dependencies), so include it by default.

Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
kingbri
2024-03-23 01:12:21 -04:00
parent 1755f284cf
commit 26496c4db2
2 changed files with 2 additions and 14 deletions

View File

@@ -399,19 +399,7 @@ class ExllamaV2Container:
ExLlamaV2Tokenizer.extended_id_to_piece = {}
ExLlamaV2Tokenizer.extended_piece_to_id = {}
try:
self.tokenizer = ExLlamaV2Tokenizer(self.config)
except AssertionError as exc:
if "HF tokenizer" in str(exc):
raise ImportError(
"Could not create ExllamaV2's tokenizer for this model "
"because tokenizers is not installed.\n"
"Please run the following command in your environment "
"to install extra packages:\n"
"pip install -U .[extras]"
) from exc
else:
raise exc
self.tokenizer = ExLlamaV2Tokenizer(self.config)
# Calculate autosplit reserve for all GPUs
gpu_count = torch.cuda.device_count()

View File

@@ -25,6 +25,7 @@ dependencies = [
"loguru",
"sse-starlette",
"packaging",
"tokenizers",
]
[project.urls]
@@ -32,7 +33,6 @@ dependencies = [
[project.optional-dependencies]
extras = [
"tokenizers",
"outlines",
"lm-format-enforcer",
]