rouabenyahia commited on
Commit
90128db
·
verified ·
1 Parent(s): 061ee62

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -13,13 +13,6 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": {
17
- "content": "</s>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
  "unk_token": {
24
  "content": "<unk>",
25
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
 
 
 
 
 
 
 
16
  "unk_token": {
17
  "content": "<unk>",
18
  "lstrip": false,
tokenizer.json CHANGED
@@ -52,12 +52,6 @@
52
  "id": "A",
53
  "type_id": 0
54
  }
55
- },
56
- {
57
- "SpecialToken": {
58
- "id": "</s>",
59
- "type_id": 0
60
- }
61
  }
62
  ],
63
  "pair": [
@@ -73,12 +67,6 @@
73
  "type_id": 0
74
  }
75
  },
76
- {
77
- "SpecialToken": {
78
- "id": "</s>",
79
- "type_id": 0
80
- }
81
- },
82
  {
83
  "SpecialToken": {
84
  "id": "<s>",
@@ -90,24 +78,9 @@
90
  "id": "B",
91
  "type_id": 1
92
  }
93
- },
94
- {
95
- "SpecialToken": {
96
- "id": "</s>",
97
- "type_id": 1
98
- }
99
  }
100
  ],
101
  "special_tokens": {
102
- "</s>": {
103
- "id": "</s>",
104
- "ids": [
105
- 2
106
- ],
107
- "tokens": [
108
- "</s>"
109
- ]
110
- },
111
  "<s>": {
112
  "id": "<s>",
113
  "ids": [
 
52
  "id": "A",
53
  "type_id": 0
54
  }
 
 
 
 
 
 
55
  }
56
  ],
57
  "pair": [
 
67
  "type_id": 0
68
  }
69
  },
 
 
 
 
 
 
70
  {
71
  "SpecialToken": {
72
  "id": "<s>",
 
78
  "id": "B",
79
  "type_id": 1
80
  }
 
 
 
 
 
 
81
  }
82
  ],
83
  "special_tokens": {
 
 
 
 
 
 
 
 
 
84
  "<s>": {
85
  "id": "<s>",
86
  "ids": [
tokenizer_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "add_bos_token": true,
3
- "add_eos_token": true,
4
  "add_prefix_space": null,
5
  "added_tokens_decoder": {
6
  "0": {
@@ -35,7 +35,7 @@
35
  "extra_special_tokens": {},
36
  "legacy": false,
37
  "model_max_length": 1000000000000000019884624838656,
38
- "pad_token": "</s>",
39
  "sp_model_kwargs": {},
40
  "spaces_between_special_tokens": false,
41
  "tokenizer_class": "LlamaTokenizer",
 
1
  {
2
  "add_bos_token": true,
3
+ "add_eos_token": false,
4
  "add_prefix_space": null,
5
  "added_tokens_decoder": {
6
  "0": {
 
35
  "extra_special_tokens": {},
36
  "legacy": false,
37
  "model_max_length": 1000000000000000019884624838656,
38
+ "pad_token": null,
39
  "sp_model_kwargs": {},
40
  "spaces_between_special_tokens": false,
41
  "tokenizer_class": "LlamaTokenizer",