{"unk_token": "<|endoftext|>", "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "add_prefix_space": false, "keep_accents": true, "max_len": 50, "special_tokens_map_file": "classcat/gpt2-base-french/special_tokens_map.json", "name_or_path": "classcat/gpt2-base-french", "tokenizer_class": "GPT2Tokenizer"} |