Skip to content

Commit 58022e4

Browse files
#23388 Issue: Update RoBERTa configuration (#23863)
1 parent 6fc0454 commit 58022e4

File tree

2 files changed

+4
-4
lines changed

2 files changed

+4
-4
lines changed

src/transformers/models/roberta/configuration_roberta.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ class RobertaConfig(PretrainedConfig):
4646
4747
4848
Args:
49-
vocab_size (`int`, *optional*, defaults to 30522):
49+
vocab_size (`int`, *optional*, defaults to 50265):
5050
Vocabulary size of the RoBERTa model. Defines the number of different tokens that can be represented by the
5151
`inputs_ids` passed when calling [`RobertaModel`] or [`TFRobertaModel`].
5252
hidden_size (`int`, *optional*, defaults to 768):
@@ -105,7 +105,7 @@ class RobertaConfig(PretrainedConfig):
105105

106106
def __init__(
107107
self,
108-
vocab_size=30522,
108+
vocab_size=50265,
109109
hidden_size=768,
110110
num_hidden_layers=12,
111111
num_attention_heads=12,

src/transformers/models/roberta_prelayernorm/configuration_roberta_prelayernorm.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ class RobertaPreLayerNormConfig(PretrainedConfig):
4545
4646
4747
Args:
48-
vocab_size (`int`, *optional*, defaults to 30522):
48+
vocab_size (`int`, *optional*, defaults to 50265):
4949
Vocabulary size of the RoBERTa-PreLayerNorm model. Defines the number of different tokens that can be
5050
represented by the `inputs_ids` passed when calling [`RobertaPreLayerNormModel`] or
5151
[`TFRobertaPreLayerNormModel`].
@@ -106,7 +106,7 @@ class RobertaPreLayerNormConfig(PretrainedConfig):
106106

107107
def __init__(
108108
self,
109-
vocab_size=30522,
109+
vocab_size=50265,
110110
hidden_size=768,
111111
num_hidden_layers=12,
112112
num_attention_heads=12,

0 commit comments

Comments
 (0)