Upload folder using huggingface_hub
Browse files- .gitattributes +2 -0
- checkpoint-73000/config.json +79 -0
- checkpoint-73000/generation_config.json +6 -0
- checkpoint-73000/model.safetensors +3 -0
- checkpoint-73000/optimizer.pt +3 -0
- checkpoint-73000/rng_state_0.pth +3 -0
- checkpoint-73000/rng_state_1.pth +3 -0
- checkpoint-73000/rng_state_10.pth +3 -0
- checkpoint-73000/rng_state_11.pth +3 -0
- checkpoint-73000/rng_state_12.pth +3 -0
- checkpoint-73000/rng_state_13.pth +3 -0
- checkpoint-73000/rng_state_14.pth +3 -0
- checkpoint-73000/rng_state_15.pth +3 -0
- checkpoint-73000/rng_state_2.pth +3 -0
- checkpoint-73000/rng_state_3.pth +3 -0
- checkpoint-73000/rng_state_4.pth +3 -0
- checkpoint-73000/rng_state_5.pth +3 -0
- checkpoint-73000/rng_state_6.pth +3 -0
- checkpoint-73000/rng_state_7.pth +3 -0
- checkpoint-73000/rng_state_8.pth +3 -0
- checkpoint-73000/rng_state_9.pth +3 -0
- checkpoint-73000/scheduler.pt +3 -0
- checkpoint-73000/trainer_state.json +3 -0
- checkpoint-73000/training_args.bin +3 -0
- checkpoint-74000/config.json +79 -0
- checkpoint-74000/generation_config.json +6 -0
- checkpoint-74000/model.safetensors +3 -0
- checkpoint-74000/optimizer.pt +3 -0
- checkpoint-74000/rng_state_0.pth +3 -0
- checkpoint-74000/rng_state_1.pth +3 -0
- checkpoint-74000/rng_state_10.pth +3 -0
- checkpoint-74000/rng_state_11.pth +3 -0
- checkpoint-74000/rng_state_12.pth +3 -0
- checkpoint-74000/rng_state_13.pth +3 -0
- checkpoint-74000/rng_state_14.pth +3 -0
- checkpoint-74000/rng_state_15.pth +3 -0
- checkpoint-74000/rng_state_2.pth +3 -0
- checkpoint-74000/rng_state_3.pth +3 -0
- checkpoint-74000/rng_state_4.pth +3 -0
- checkpoint-74000/rng_state_5.pth +3 -0
- checkpoint-74000/rng_state_6.pth +3 -0
- checkpoint-74000/rng_state_7.pth +3 -0
- checkpoint-74000/rng_state_8.pth +3 -0
- checkpoint-74000/rng_state_9.pth +3 -0
- checkpoint-74000/scheduler.pt +3 -0
- checkpoint-74000/trainer_state.json +3 -0
- checkpoint-74000/training_args.bin +3 -0
.gitattributes
CHANGED
@@ -35,3 +35,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
checkpoint-69000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
37 |
checkpoint-70000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
checkpoint-69000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
37 |
checkpoint-70000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
38 |
+
checkpoint-73000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
39 |
+
checkpoint-74000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
checkpoint-73000/config.json
ADDED
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"ar_steps": 1,
|
3 |
+
"architectures": [
|
4 |
+
"DiffVLMDiffusion"
|
5 |
+
],
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 151643,
|
8 |
+
"condition_layer": -1,
|
9 |
+
"eos_token_id": 151645,
|
10 |
+
"hidden_act": "silu",
|
11 |
+
"hidden_size": 1536,
|
12 |
+
"image_token_id": 151655,
|
13 |
+
"img_cross_attention_dim": 2048,
|
14 |
+
"img_diffuser_depth": 2,
|
15 |
+
"img_ffn_dim_multiplier": null,
|
16 |
+
"img_hidden_size": 1536,
|
17 |
+
"img_multiple_of": 256,
|
18 |
+
"img_norm_eps": 1e-05,
|
19 |
+
"img_num_attention_heads": 12,
|
20 |
+
"img_num_kv_heads": 12,
|
21 |
+
"img_qk_norm": true,
|
22 |
+
"in_channels": 32,
|
23 |
+
"initializer_range": 0.02,
|
24 |
+
"inject_img_diffuser": false,
|
25 |
+
"input_size": 32,
|
26 |
+
"intermediate_size": 8960,
|
27 |
+
"layer_group_size": 7,
|
28 |
+
"layerwise_start_idx": 0,
|
29 |
+
"lora_alpha": 128,
|
30 |
+
"lora_bias": "none",
|
31 |
+
"lora_dropout": 0.05,
|
32 |
+
"lora_enable": false,
|
33 |
+
"lora_r": 64,
|
34 |
+
"max_position_embeddings": 32768,
|
35 |
+
"max_window_layers": 28,
|
36 |
+
"model_type": "qwen2_vl",
|
37 |
+
"non_linearity": 1,
|
38 |
+
"norm_elementwise_affine": true,
|
39 |
+
"num_attention_heads": 12,
|
40 |
+
"num_hidden_layers": 28,
|
41 |
+
"num_key_value_heads": 2,
|
42 |
+
"patch_size": 2,
|
43 |
+
"repa_coeff": 0.1,
|
44 |
+
"repa_layers": "2",
|
45 |
+
"repa_shared": false,
|
46 |
+
"rms_norm_eps": 1e-06,
|
47 |
+
"rope_scaling": {
|
48 |
+
"mrope_section": [
|
49 |
+
16,
|
50 |
+
24,
|
51 |
+
24
|
52 |
+
],
|
53 |
+
"rope_type": "default",
|
54 |
+
"type": "default"
|
55 |
+
},
|
56 |
+
"rope_theta": 1000000.0,
|
57 |
+
"sample_size": 128,
|
58 |
+
"sampling_steps": 28,
|
59 |
+
"sliding_window": null,
|
60 |
+
"tie_word_embeddings": true,
|
61 |
+
"torch_dtype": "bfloat16",
|
62 |
+
"transformers_version": "4.47.0",
|
63 |
+
"use_cache": true,
|
64 |
+
"use_repa": false,
|
65 |
+
"use_residual_attn": false,
|
66 |
+
"use_sliding_window": false,
|
67 |
+
"vae_path": "mit-han-lab/dc-ae-f32c32-sana-1.1-diffusers",
|
68 |
+
"video_token_id": 151656,
|
69 |
+
"vision_config": {
|
70 |
+
"hidden_size": 1536,
|
71 |
+
"in_chans": 3,
|
72 |
+
"model_type": "qwen2_vl",
|
73 |
+
"spatial_patch_size": 14
|
74 |
+
},
|
75 |
+
"vision_end_token_id": 151653,
|
76 |
+
"vision_start_token_id": 151652,
|
77 |
+
"vision_token_id": 151654,
|
78 |
+
"vocab_size": 151936
|
79 |
+
}
|
checkpoint-73000/generation_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 151643,
|
4 |
+
"eos_token_id": 151645,
|
5 |
+
"transformers_version": "4.47.0"
|
6 |
+
}
|
checkpoint-73000/model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:171c85fed93b51ff86cd4364df45650411c5eef6b1bd61b777dea7e48096a817
|
3 |
+
size 4538452374
|
checkpoint-73000/optimizer.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ef35c17079c29cc1a8450838578170cf56d1699fa96162b1857765b024c7a576
|
3 |
+
size 6630551906
|
checkpoint-73000/rng_state_0.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3fc594d7a62910bfee84b8e393c65dcf86a4656f6de3e9c168f49b8883bc07e8
|
3 |
+
size 15984
|
checkpoint-73000/rng_state_1.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1556639e60c76c37aa3931108c5e6839e702c4e6a7d805bdcc9b114baebf19ac
|
3 |
+
size 15984
|
checkpoint-73000/rng_state_10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d30c3844dfee2c53a042cb343f0d8d1b883ba3bdb42dad3f58ee9ba3cd85944e
|
3 |
+
size 15997
|
checkpoint-73000/rng_state_11.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:276a3deb5a715b7e1648e675c9d40733cbd9e0ed6b96e7c1effec2de986c4abb
|
3 |
+
size 15997
|
checkpoint-73000/rng_state_12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7966d6465f8ce767b0d2a328651de2171ebfd926b40f72113f8d7a1fc9c99aa5
|
3 |
+
size 15997
|
checkpoint-73000/rng_state_13.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:498e006872a4307094721f1b03a43070f9a81eaaf6edda1af6ac72027c30fcec
|
3 |
+
size 15997
|
checkpoint-73000/rng_state_14.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:084ea76048191daac4d0bc67b6eb2fc78a12f801f5907818cc52eb8219fdf7ba
|
3 |
+
size 15997
|
checkpoint-73000/rng_state_15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d57cab5be6e912d754ee8004b1abb673977c17ff93c15def42d3c28f2894d12a
|
3 |
+
size 15997
|
checkpoint-73000/rng_state_2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:992cf70d1989114af1cf228b844f12a3b3b901ce064ad72873089293739c91e0
|
3 |
+
size 15984
|
checkpoint-73000/rng_state_3.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e8e579ee4a6ba759cc7d6758244cb9ab7b36b4f1a1980786c37d20dd7c7f0506
|
3 |
+
size 15984
|
checkpoint-73000/rng_state_4.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1be16f016ba7f715a8835f937157a539a7a46346fb624b6dfcd7ad2d20262a84
|
3 |
+
size 15984
|
checkpoint-73000/rng_state_5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:14af3d7f488edbce1500cfbaa0f4ad7bbb006431b511df2d1197aae2acd5fa16
|
3 |
+
size 15984
|
checkpoint-73000/rng_state_6.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:25d01df3812cb1e4d6eaea01dc4c072e2905a01bc757c8269e49de27e1ce548a
|
3 |
+
size 15984
|
checkpoint-73000/rng_state_7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:550ed5212afd509ebf9ae62a837d92bbd2bc3805334246d7727f374bda9b80c8
|
3 |
+
size 15984
|
checkpoint-73000/rng_state_8.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6fca693a739f2b65568decd7ece673c7d8277c538a6859945fd9367a01fc923f
|
3 |
+
size 15984
|
checkpoint-73000/rng_state_9.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c2dc7911e583c19c4a67ff367dafc158dceea3eb5267cf7f7be2d6141f7d3610
|
3 |
+
size 15984
|
checkpoint-73000/scheduler.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:225c8b96777b56fa05aaf0a3eade66563e89ffa13886339fc7edffddb609a9ad
|
3 |
+
size 1064
|
checkpoint-73000/trainer_state.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:881da467bba932a4875e3d8bb98961d84c4a0c2302faffaf7c610c5503168510
|
3 |
+
size 11284534
|
checkpoint-73000/training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:66dae48782f2bae2f1a9a444156fc7e4975feba99bb3a2a7f3312fbb75a4ac66
|
3 |
+
size 6008
|
checkpoint-74000/config.json
ADDED
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"ar_steps": 1,
|
3 |
+
"architectures": [
|
4 |
+
"DiffVLMDiffusion"
|
5 |
+
],
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 151643,
|
8 |
+
"condition_layer": -1,
|
9 |
+
"eos_token_id": 151645,
|
10 |
+
"hidden_act": "silu",
|
11 |
+
"hidden_size": 1536,
|
12 |
+
"image_token_id": 151655,
|
13 |
+
"img_cross_attention_dim": 2048,
|
14 |
+
"img_diffuser_depth": 2,
|
15 |
+
"img_ffn_dim_multiplier": null,
|
16 |
+
"img_hidden_size": 1536,
|
17 |
+
"img_multiple_of": 256,
|
18 |
+
"img_norm_eps": 1e-05,
|
19 |
+
"img_num_attention_heads": 12,
|
20 |
+
"img_num_kv_heads": 12,
|
21 |
+
"img_qk_norm": true,
|
22 |
+
"in_channels": 32,
|
23 |
+
"initializer_range": 0.02,
|
24 |
+
"inject_img_diffuser": false,
|
25 |
+
"input_size": 32,
|
26 |
+
"intermediate_size": 8960,
|
27 |
+
"layer_group_size": 7,
|
28 |
+
"layerwise_start_idx": 0,
|
29 |
+
"lora_alpha": 128,
|
30 |
+
"lora_bias": "none",
|
31 |
+
"lora_dropout": 0.05,
|
32 |
+
"lora_enable": false,
|
33 |
+
"lora_r": 64,
|
34 |
+
"max_position_embeddings": 32768,
|
35 |
+
"max_window_layers": 28,
|
36 |
+
"model_type": "qwen2_vl",
|
37 |
+
"non_linearity": 1,
|
38 |
+
"norm_elementwise_affine": true,
|
39 |
+
"num_attention_heads": 12,
|
40 |
+
"num_hidden_layers": 28,
|
41 |
+
"num_key_value_heads": 2,
|
42 |
+
"patch_size": 2,
|
43 |
+
"repa_coeff": 0.1,
|
44 |
+
"repa_layers": "2",
|
45 |
+
"repa_shared": false,
|
46 |
+
"rms_norm_eps": 1e-06,
|
47 |
+
"rope_scaling": {
|
48 |
+
"mrope_section": [
|
49 |
+
16,
|
50 |
+
24,
|
51 |
+
24
|
52 |
+
],
|
53 |
+
"rope_type": "default",
|
54 |
+
"type": "default"
|
55 |
+
},
|
56 |
+
"rope_theta": 1000000.0,
|
57 |
+
"sample_size": 128,
|
58 |
+
"sampling_steps": 28,
|
59 |
+
"sliding_window": null,
|
60 |
+
"tie_word_embeddings": true,
|
61 |
+
"torch_dtype": "bfloat16",
|
62 |
+
"transformers_version": "4.47.0",
|
63 |
+
"use_cache": true,
|
64 |
+
"use_repa": false,
|
65 |
+
"use_residual_attn": false,
|
66 |
+
"use_sliding_window": false,
|
67 |
+
"vae_path": "mit-han-lab/dc-ae-f32c32-sana-1.1-diffusers",
|
68 |
+
"video_token_id": 151656,
|
69 |
+
"vision_config": {
|
70 |
+
"hidden_size": 1536,
|
71 |
+
"in_chans": 3,
|
72 |
+
"model_type": "qwen2_vl",
|
73 |
+
"spatial_patch_size": 14
|
74 |
+
},
|
75 |
+
"vision_end_token_id": 151653,
|
76 |
+
"vision_start_token_id": 151652,
|
77 |
+
"vision_token_id": 151654,
|
78 |
+
"vocab_size": 151936
|
79 |
+
}
|
checkpoint-74000/generation_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 151643,
|
4 |
+
"eos_token_id": 151645,
|
5 |
+
"transformers_version": "4.47.0"
|
6 |
+
}
|
checkpoint-74000/model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5ef6725fff24ce491628d4e398b6affd95ac0fba766ae8ebcacd56696d1a287a
|
3 |
+
size 4538452374
|
checkpoint-74000/optimizer.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6d196b74ffe9e938aa10af34476cdcd0f889ccd9634ec458e0a2f4f57e8fc445
|
3 |
+
size 6630551906
|
checkpoint-74000/rng_state_0.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cf6122ee06aff99e6a17788d05537142dd9166160696ee8ffb4227d9da2e4e9d
|
3 |
+
size 15984
|
checkpoint-74000/rng_state_1.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fec0742d738ff5a459eeb76afefeff35951d1945ce20309cef1c57cfb42ecf48
|
3 |
+
size 15984
|
checkpoint-74000/rng_state_10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:233e1209ea02426679634fa1721bcd620366e7a334f82e8c7e888e6857aa953a
|
3 |
+
size 15997
|
checkpoint-74000/rng_state_11.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e5f29230c9e1e2d3945985a8120cbedcbd97d2bd6de2b4289b7c4274b824803c
|
3 |
+
size 15997
|
checkpoint-74000/rng_state_12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cb096d628da5ed6499de1a76c88428dd530870123a357226422098c0dad26c8d
|
3 |
+
size 15997
|
checkpoint-74000/rng_state_13.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5f41864730976792f1e3461ed1d755d44583df7114f2c5794d524727507d4612
|
3 |
+
size 15997
|
checkpoint-74000/rng_state_14.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:78fa213147dbbc78579b7381b1a529ddaeb67cd410a1bf42b805e5eb8e55b422
|
3 |
+
size 15997
|
checkpoint-74000/rng_state_15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1257e2c3aa2a35fd4caf06bc2cfae4a27795cfd5b93a364796a9865d0b0e55b2
|
3 |
+
size 15997
|
checkpoint-74000/rng_state_2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b15cc03b38dddce8e79339ce8e224d64748d1b0fa3517735d6481af414b45194
|
3 |
+
size 15984
|
checkpoint-74000/rng_state_3.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:548965f679c78caffb509cb8b5c21b13c76a456065a27d0ca0c2fe18cb2b998c
|
3 |
+
size 15984
|
checkpoint-74000/rng_state_4.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f5b347e6c322eb7a0f12a635995b87924dff5806bf7a448425388685c28f2048
|
3 |
+
size 15984
|
checkpoint-74000/rng_state_5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6c0c44a29d5a2667b9c5813a64f452214b78216e9bcab183096662db4652b6d7
|
3 |
+
size 15984
|
checkpoint-74000/rng_state_6.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d9623ea4497cb730e611e17b41b8d9da74184eb9ecb708d197da559bd6d83de1
|
3 |
+
size 15984
|
checkpoint-74000/rng_state_7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:16dadfe078d645ef2b835c0f618facc0d248aae64f623e327a2d6623ac88a47c
|
3 |
+
size 15984
|
checkpoint-74000/rng_state_8.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:34295588577cbb48680692156851b99b7265fadc0c600a661cfeba24148e3f63
|
3 |
+
size 15984
|
checkpoint-74000/rng_state_9.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a3f395f33e81a9dffc66f8850527e7df4f5e114e4ab1f7efd296db1fba37b74b
|
3 |
+
size 15984
|
checkpoint-74000/scheduler.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5af2e8f9ec57165de0972ea9f30a3a97df51b25dda972be1051ca444c42b07d4
|
3 |
+
size 1064
|
checkpoint-74000/trainer_state.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9f936c158ecf82a53cc0d2ec08e0c5e499057154207237bdb175f0205bf06701
|
3 |
+
size 11439762
|
checkpoint-74000/training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:de4a14e4f4119baeb5cf83167a3c2abe6f5e373f0d8da019d36754ae4a1098e4
|
3 |
+
size 5944
|