danielhanchen commited on
Commit
b45237c
·
verified ·
1 Parent(s): e0e53d6

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -52
config.json DELETED
@@ -1,52 +0,0 @@
1
- {
2
- "architectures": [
3
- "Mistral3ForConditionalGeneration"
4
- ],
5
- "bos_token_id": 1,
6
- "eos_token_id": 2,
7
- "image_token_index": 10,
8
- "model_type": "mistral3",
9
- "multimodal_projector_bias": false,
10
- "pad_token_id": 11,
11
- "projector_hidden_act": "gelu",
12
- "spatial_merge_size": 2,
13
- "text_config": {
14
- "attention_dropout": 0.0,
15
- "head_dim": 128,
16
- "hidden_act": "silu",
17
- "hidden_size": 5120,
18
- "initializer_range": 0.02,
19
- "intermediate_size": 32768,
20
- "max_position_embeddings": 131072,
21
- "model_type": "mistral",
22
- "num_attention_heads": 32,
23
- "num_hidden_layers": 40,
24
- "num_key_value_heads": 8,
25
- "rms_norm_eps": 1e-05,
26
- "rope_theta": 1000000000.0,
27
- "sliding_window": null,
28
- "torch_dtype": "bfloat16",
29
- "use_cache": true,
30
- "vocab_size": 131072
31
- },
32
- "torch_dtype": "bfloat16",
33
- "transformers_version": "4.52.4",
34
- "unsloth_fixed": true,
35
- "vision_config": {
36
- "attention_dropout": 0.0,
37
- "head_dim": 64,
38
- "hidden_act": "silu",
39
- "hidden_size": 1024,
40
- "image_size": 1540,
41
- "initializer_range": 0.02,
42
- "intermediate_size": 4096,
43
- "model_type": "pixtral",
44
- "num_attention_heads": 16,
45
- "num_channels": 3,
46
- "num_hidden_layers": 24,
47
- "patch_size": 14,
48
- "rope_theta": 10000.0,
49
- "torch_dtype": "bfloat16"
50
- },
51
- "vision_feature_layer": -1
52
- }