Fix is_ccl_available & is_xpu_available imports
This commit is contained in:
parent
778a010df8
commit
839a87bac8
4 changed files with 5 additions and 9 deletions
|
|
@ -5,7 +5,7 @@ from pathlib import Path
|
|||
import accelerate
|
||||
import torch
|
||||
import transformers
|
||||
from accelerate import is_xpu_available
|
||||
from accelerate.utils import is_xpu_available
|
||||
from gptq_for_llama import llama_inference_offload
|
||||
from gptq_for_llama.modelutils import find_layers
|
||||
from gptq_for_llama.quant import make_quant
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue