Update config.json (#1)
Browse files- Update config.json (ef9013406ce49bc78a20594e6a640d20fc236f4a)
Co-authored-by: Lucas Charpentier <lgcharpe@users.noreply.huggingface.co>
- config.json +3 -3
config.json
CHANGED
|
@@ -11,12 +11,12 @@
|
|
| 11 |
},
|
| 12 |
"classifier_dropout": 0.2,
|
| 13 |
"hidden_dropout_prob": 0.1,
|
| 14 |
-
"hidden_size":
|
| 15 |
-
"intermediate_size":
|
| 16 |
"layer_norm_eps": 1e-07,
|
| 17 |
"max_position_embeddings": 512,
|
| 18 |
"model_type": "ltgbert",
|
| 19 |
-
"num_attention_heads":
|
| 20 |
"num_hidden_layers": 12,
|
| 21 |
"output_all_encoded_layers": true,
|
| 22 |
"pad_token_id": 4,
|
|
|
|
| 11 |
},
|
| 12 |
"classifier_dropout": 0.2,
|
| 13 |
"hidden_dropout_prob": 0.1,
|
| 14 |
+
"hidden_size": 384,
|
| 15 |
+
"intermediate_size": 1024,
|
| 16 |
"layer_norm_eps": 1e-07,
|
| 17 |
"max_position_embeddings": 512,
|
| 18 |
"model_type": "ltgbert",
|
| 19 |
+
"num_attention_heads": 6,
|
| 20 |
"num_hidden_layers": 12,
|
| 21 |
"output_all_encoded_layers": true,
|
| 22 |
"pad_token_id": 4,
|