bigdefence commited on
Commit
0158cfb
·
verified ·
1 Parent(s): 64f3d01

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "./hf_hub/hcx",
3
  "architectures": [
4
- "OmniSpeechLlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
@@ -20,7 +20,7 @@
20
  "max_position_embeddings": 8192,
21
  "mlp_bias": false,
22
  "mm_tunable_parts": "speech_projector,backbone",
23
- "model_type": "omni_speech_llama",
24
  "num_attention_heads": 16,
25
  "num_hidden_layers": 24,
26
  "num_key_value_heads": 8,
@@ -44,4 +44,4 @@
44
  "use_cache": true,
45
  "use_duplex": false,
46
  "vocab_size": 110592
47
- }
 
1
  {
2
  "_name_or_path": "./hf_hub/hcx",
3
  "architectures": [
4
+ "OmniSpeechHyperCLOVAXForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
 
20
  "max_position_embeddings": 8192,
21
  "mlp_bias": false,
22
  "mm_tunable_parts": "speech_projector,backbone",
23
+ "model_type": "omni_speech_HyperCLOVAX",
24
  "num_attention_heads": 16,
25
  "num_hidden_layers": 24,
26
  "num_key_value_heads": 8,
 
44
  "use_cache": true,
45
  "use_duplex": false,
46
  "vocab_size": 110592
47
+ }