Add decode functions to llama.cpp/exllama
This commit is contained in:
parent
1ba2e88551
commit
b6643e5039
2 changed files with 6 additions and 0 deletions
|
@ -120,3 +120,6 @@ class ExllamaModel:
|
|||
|
||||
def encode(self, string, **kwargs):
|
||||
return self.tokenizer.encode(string)
|
||||
|
||||
def decode(self, string, **kwargs):
|
||||
return self.tokenizer.decode(string)[0]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue