makiart commited on
Commit
cff2009
·
verified ·
1 Parent(s): fe753f1

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +5 -5
config.json CHANGED
@@ -13,9 +13,9 @@
13
  "decoder_bias": true,
14
  "deterministic_flash_attn": false,
15
  "embedding_dropout": 0.0,
16
- "eos_token_id": 151644,
17
  "global_attn_every_n_layers": 3,
18
- "global_rope_theta": 10000.0,
19
  "gradient_checkpointing": false,
20
  "hidden_activation": "gelu",
21
  "hidden_size": 768,
@@ -24,7 +24,7 @@
24
  "intermediate_size": 1152,
25
  "layer_norm_eps": 1e-05,
26
  "local_attention": 128,
27
- "local_rope_theta": -1,
28
  "max_position_embeddings": 8192,
29
  "mlp_bias": false,
30
  "mlp_dropout": 0.0,
@@ -33,9 +33,9 @@
33
  "norm_eps": 1e-05,
34
  "num_attention_heads": 12,
35
  "num_hidden_layers": 22,
36
- "pad_token_id": 151644,
37
  "position_embedding_type": "absolute",
38
- "sep_token_id": 151644,
39
  "tie_word_embeddings": true,
40
  "torch_dtype": "float32",
41
  "transformers_version": "4.48.0",
 
13
  "decoder_bias": true,
14
  "deterministic_flash_attn": false,
15
  "embedding_dropout": 0.0,
16
+ "eos_token_id": 151645,
17
  "global_attn_every_n_layers": 3,
18
+ "global_rope_theta": 160000.0,
19
  "gradient_checkpointing": false,
20
  "hidden_activation": "gelu",
21
  "hidden_size": 768,
 
24
  "intermediate_size": 1152,
25
  "layer_norm_eps": 1e-05,
26
  "local_attention": 128,
27
+ "local_rope_theta": 10000.0,
28
  "max_position_embeddings": 8192,
29
  "mlp_bias": false,
30
  "mlp_dropout": 0.0,
 
33
  "norm_eps": 1e-05,
34
  "num_attention_heads": 12,
35
  "num_hidden_layers": 22,
36
+ "pad_token_id": 151646,
37
  "position_embedding_type": "absolute",
38
+ "sep_token_id": 151645,
39
  "tie_word_embeddings": true,
40
  "torch_dtype": "float32",
41
  "transformers_version": "4.48.0",