Add RoPE scaling support for transformers (including dynamic NTK)

https://github.com/huggingface/transformers/pull/24653
This commit is contained in:
oobabooga 2023-08-08 21:24:28 -07:00
parent f4caaf337a
commit d8fb506aff
5 changed files with 16 additions and 9 deletions

View file

@ -39,8 +39,8 @@ loaders_and_params = {
'low_vram',
'mlock',
'llama_cpp_seed',
'compress_pos_emb',
'alpha_value',
'compress_pos_emb',
'cpu',
],
'llamacpp_HF': [
@ -54,8 +54,8 @@ loaders_and_params = {
'low_vram',
'mlock',
'llama_cpp_seed',
'compress_pos_emb',
'alpha_value',
'compress_pos_emb',
'cpu',
'llamacpp_HF_info',
],
@ -73,20 +73,22 @@ loaders_and_params = {
'quant_type',
'compute_dtype',
'trust_remote_code',
'alpha_value',
'compress_pos_emb',
'transformers_info'
],
'ExLlama': [
'gpu_split',
'max_seq_len',
'compress_pos_emb',
'alpha_value',
'compress_pos_emb',
'exllama_info',
],
'ExLlama_HF': [
'gpu_split',
'max_seq_len',
'compress_pos_emb',
'alpha_value',
'compress_pos_emb',
'exllama_HF_info',
]
}