Update llama_attn_hijack.py (#4231)
This commit is contained in:
parent
2e8b5f7c80
commit
2e471071af
1 changed files with 1 additions and 0 deletions
|
@ -17,6 +17,7 @@ if shared.args.xformers:
|
|||
|
||||
|
||||
def hijack_llama_attention():
|
||||
import transformers.models.llama.modeling_llama
|
||||
if shared.args.xformers:
|
||||
transformers.models.llama.modeling_llama.LlamaAttention.forward = xformers_forward
|
||||
logger.info("Replaced attention with xformers_attention")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue