Increase the sequence length, decrease batch size
I have no idea what I am doing
This commit is contained in:
parent
e62b9b1074
commit
5a79863df3
1 changed files with 1 additions and 1 deletions
|
@ -70,7 +70,7 @@ class LLaMAModel:
|
|||
pass
|
||||
|
||||
@classmethod
|
||||
def from_pretrained(self, path, max_seq_len=512, max_batch_size=32):
|
||||
def from_pretrained(self, path, max_seq_len=2048, max_batch_size=1):
|
||||
tokenizer_path = path / "tokenizer.model"
|
||||
path = os.path.abspath(path)
|
||||
tokenizer_path = os.path.abspath(tokenizer_path)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue