Remove --sdp-attention, --xformers flags (#5126)

This commit is contained in:
oobabooga 2023-12-31 01:36:51 -03:00 committed by GitHub
parent b7dd1f9542
commit 8e397915c9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 1 additions and 180 deletions

View file

@ -21,7 +21,7 @@ from transformers import (
)
import modules.shared as shared
from modules import RoPE, llama_attn_hijack, sampler_hijack
from modules import RoPE, sampler_hijack
from modules.logging_colors import logger
from modules.models_settings import get_model_metadata
from modules.relative_imports import RelativeImport
@ -97,10 +97,6 @@ def load_model(model_name, loader=None):
else:
tokenizer = load_tokenizer(model_name, model)
# Hijack attention with xformers
if any((shared.args.xformers, shared.args.sdp_attention)):
llama_attn_hijack.hijack_llama_attention()
shared.settings.update({k: v for k, v in metadata.items() if k in shared.settings})
if loader.lower().startswith('exllama'):
shared.settings['truncation_length'] = shared.args.max_seq_len