Add disable_exllama to Transformers loader (for GPTQ LoRA training)

This commit is contained in:
oobabooga 2023-09-24 20:03:11 -07:00
parent c0fca23cb9
commit 36c38d7561
3 changed files with 22 additions and 10 deletions

View file

@ -23,6 +23,7 @@ loaders_and_params = OrderedDict({
'alpha_value',
'rope_freq_base',
'compress_pos_emb',
'disable_exllama',
'transformers_info'
],
'ExLlama_HF': [