Model: Account for tokenizer lazy init

This commit is contained in:
turboderp
2024-08-23 23:51:53 +02:00
parent a676c4bf38
commit fe3253f3a9

View File

@@ -871,7 +871,7 @@ class ExllamaV2Container:
def get_logprobs(self, token_ids: torch.Tensor, token_probs: torch.Tensor): def get_logprobs(self, token_ids: torch.Tensor, token_probs: torch.Tensor):
top_tokens = [ top_tokens = [
self.tokenizer.extended_id_to_piece.get( self.tokenizer.extended_id_to_piece.get(
index, self.tokenizer.id_to_piece[index] index, self.tokenizer.get_id_to_piece_list(True)[index]
) )
for index in token_ids.flatten().tolist() for index in token_ids.flatten().tolist()
] ]
@@ -1146,7 +1146,7 @@ class ExllamaV2Container:
# Map logits to the tensor with their biases # Map logits to the tensor with their biases
for token_id, bias in logit_bias.items(): for token_id, bias in logit_bias.items():
if 0 <= token_id < len(self.tokenizer.id_to_piece): if 0 <= token_id < len(self.tokenizer.get_id_to_piece_list(True)):
gen_settings.token_bias[token_id] = bias gen_settings.token_bias[token_id] = bias
else: else:
logger.warning( logger.warning(