Add no_flash_attn option

This commit is contained in:
oobabooga 2023-11-02 08:19:42 -07:00
parent aaf726dbfb
commit 77abd9b69b
4 changed files with 4 additions and 0 deletions

View file

@ -152,5 +152,6 @@ class Exllamav2HF(PreTrainedModel):
config.max_seq_len = shared.args.max_seq_len
config.scale_pos_emb = shared.args.compress_pos_emb
config.scale_alpha_value = shared.args.alpha_value
config.no_flash_attn = shared.args.no_flash_attn
return Exllamav2HF(config)