fix(tokenizer): add special tokens to empty inputs (#13091)

This commit is contained in:
Michael Yang
2025-11-18 11:16:56 -08:00
committed by GitHub
parent 718961de68
commit 440a3823a6
5 changed files with 98 additions and 7 deletions

View File

@@ -140,7 +140,7 @@ func (wpm WordPiece) Encode(s string, addSpecial bool) ([]int32, error) {
}
}
if addSpecial && len(ids) > 0 {
if addSpecial {
ids = wpm.vocab.addSpecials(ids)
}