Add types to the encode/decode/token-count endpoints
This commit is contained in:
parent
f6ca9cfcdc
commit
1b69694fe9
5 changed files with 47 additions and 36 deletions
|
@ -145,7 +145,7 @@ def decode(output_ids, skip_special_tokens=True):
|
|||
if shared.tokenizer is None:
|
||||
raise ValueError('No tokenizer is loaded')
|
||||
|
||||
return shared.tokenizer.decode(output_ids, skip_special_tokens)
|
||||
return shared.tokenizer.decode(output_ids, skip_special_tokens=skip_special_tokens)
|
||||
|
||||
|
||||
def get_encoded_length(prompt):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue