Files
tabbyAPI/backends/exllamav3/utils.py
2026-03-30 00:19:07 +02:00

13 lines
373 B
Python

import platform
import torch
from common.logger import xlogger
def exllama_supports_nccl():
if platform.system() == "Windows":
unsupported_message = "The NCCL tensor parallel backend is not supported on Windows."
xlogger.warning(unsupported_message)
return False
return torch.cuda.is_available() and torch.distributed.is_nccl_available()