Upload folder using huggingface_hub
Browse files- README.md +1 -1
- config.json +1 -1
- model.safetensors +2 -2
README.md
CHANGED
@@ -76,7 +76,7 @@ with open(hf_hub_download(source_model_id, filename='config.json', repo_type='mo
|
|
76 |
config_json = json.load(f)
|
77 |
config_json.update({
|
78 |
"head_dim": 32,
|
79 |
-
"hidden_size":
|
80 |
"intermediate_size": 64,
|
81 |
"layer_types": ["sliding_attention", "full_attention"],
|
82 |
"num_attention_heads": 2,
|
|
|
76 |
config_json = json.load(f)
|
77 |
config_json.update({
|
78 |
"head_dim": 32,
|
79 |
+
"hidden_size": 32, # required by Mxfp4GptOssExperts codes
|
80 |
"intermediate_size": 64,
|
81 |
"layer_types": ["sliding_attention", "full_attention"],
|
82 |
"num_attention_heads": 2,
|
config.json
CHANGED
@@ -8,7 +8,7 @@
|
|
8 |
"experts_per_token": 4,
|
9 |
"head_dim": 32,
|
10 |
"hidden_act": "silu",
|
11 |
-
"hidden_size":
|
12 |
"initial_context_length": 4096,
|
13 |
"initializer_range": 0.02,
|
14 |
"intermediate_size": 64,
|
|
|
8 |
"experts_per_token": 4,
|
9 |
"head_dim": 32,
|
10 |
"hidden_act": "silu",
|
11 |
+
"hidden_size": 32,
|
12 |
"initial_context_length": 4096,
|
13 |
"initializer_range": 0.02,
|
14 |
"intermediate_size": 64,
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:aefe8b9c4b4969f6d13c5d778760f3dce4e25134324b33677934550d9df02a7c
|
3 |
+
size 13710176
|