transformers: Add a flag to force load from safetensors (#4450)
This commit is contained in:
parent
c0655475ae
commit
fdcaa955e3
3 changed files with 4 additions and 1 deletions
|
@ -91,6 +91,7 @@ parser.add_argument('--no-cache', action='store_true', help='Set use_cache to Fa
|
|||
parser.add_argument('--xformers', action='store_true', help='Use xformer\'s memory efficient attention. This is really old and probably doesn\'t do anything.')
|
||||
parser.add_argument('--sdp-attention', action='store_true', help='Use PyTorch 2.0\'s SDP attention. Same as above.')
|
||||
parser.add_argument('--trust-remote-code', action='store_true', help='Set trust_remote_code=True while loading the model. Necessary for some models.')
|
||||
parser.add_argument('--force-safetensors', action='store_true', help='Set use_safetensors=True while loading the model. This prevents arbitrary code execution.')
|
||||
parser.add_argument('--use_fast', action='store_true', help='Set use_fast=True while loading the tokenizer.')
|
||||
|
||||
# Accelerate 4-bit
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue