transformers: Add a flag to force load from safetensors (#4450)

This commit is contained in:
Julien Chaumond 2023-11-02 20:20:54 +01:00 committed by GitHub
parent c0655475ae
commit fdcaa955e3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 4 additions and 1 deletions

View file

@ -123,7 +123,8 @@ def huggingface_loader(model_name):
params = {
'low_cpu_mem_usage': True,
'trust_remote_code': shared.args.trust_remote_code,
'torch_dtype': torch.bfloat16 if shared.args.bf16 else torch.float16
'torch_dtype': torch.bfloat16 if shared.args.bf16 else torch.float16,
'use_safetensors': True if shared.args.force_safetensors else None
}
config = AutoConfig.from_pretrained(path_to_model, trust_remote_code=params['trust_remote_code'])