ArunNyp7 commited on
Commit
8cf22cc
·
verified ·
1 Parent(s): 1cd8fe9

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -5
config.json CHANGED
@@ -5,15 +5,15 @@
5
  "attention_probs_dropout_prob": 0.1,
6
  "gradient_checkpointing": false,
7
  "hidden_act": "gelu",
8
- "hidden_dropout_prob": 0.3,
9
- "hidden_size": 768,
10
  "initializer_range": 0.02,
11
- "intermediate_size": 3072,
12
  "layer_norm_eps": 1e-12,
13
  "max_position_embeddings": 512,
14
  "model_type": "bert",
15
- "num_attention_heads": 12,
16
- "num_hidden_layers": 12,
17
  "num_labels": 3,
18
  "pad_token_id": 0,
19
  "position_embedding_type": "absolute",
 
5
  "attention_probs_dropout_prob": 0.1,
6
  "gradient_checkpointing": false,
7
  "hidden_act": "gelu",
8
+ "hidden_dropout_prob": 0.1, // Reduced dropout probability
9
+ "hidden_size": 1024, // Increased hidden size
10
  "initializer_range": 0.02,
11
+ "intermediate_size": 4096, // Increased intermediate size
12
  "layer_norm_eps": 1e-12,
13
  "max_position_embeddings": 512,
14
  "model_type": "bert",
15
+ "num_attention_heads": 16, // Increased number of attention heads
16
+ "num_hidden_layers": 24, // Increased number of hidden layers
17
  "num_labels": 3,
18
  "pad_token_id": 0,
19
  "position_embedding_type": "absolute",