doublemathew commited on
Commit
9c8842d
·
verified ·
1 Parent(s): fa98975

Training in progress, step 63

Browse files
adapter_config.json CHANGED
@@ -28,10 +28,10 @@
28
  "rank_pattern": {},
29
  "revision": null,
30
  "target_modules": [
31
- "o_proj",
32
- "v_proj",
33
  "k_proj",
34
- "q_proj"
 
 
35
  ],
36
  "target_parameters": [
37
  "7.mlp.experts.gate_up_proj",
 
28
  "rank_pattern": {},
29
  "revision": null,
30
  "target_modules": [
 
 
31
  "k_proj",
32
+ "o_proj",
33
+ "q_proj",
34
+ "v_proj"
35
  ],
36
  "target_parameters": [
37
  "7.mlp.experts.gate_up_proj",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8bf871883caaf9870459a423902f2423b555714acc320516a5605adfefe19a6f
3
  size 60189176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b32f79cd3c80589fa50975a6cc7b55175d0fc48e8eed8973f2f518f9d4bbf9c
3
  size 60189176
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7db5dc6ea3b27ed6372e63c172e23fab0fd55f05b17c101cd72b97f6e75a8977
3
  size 6225
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:105b1f162b4cc4e9b0873c650a03e0dcdc35da2ee1ec6d5df58f3228bc8ed1e1
3
  size 6225