add_tokens和add_special_tokens一点点尝试
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from transformers import BertTokenizer
def show_token_info(input_str, my_tokenizer):
print("vocab_size: ", my_tokenizer.vocab_size)
encode_info = my_tokenizer([input_str])
convert_token_list = my_tokenizer.convert_ids_to_tokens(encode_info[