zhiyang1 commited on
Commit
1900850
·
verified ·
1 Parent(s): b95f13f

Upload folder using huggingface_hub

Browse files
Files changed (47) hide show
  1. .gitattributes +2 -0
  2. checkpoint-208000/config.json +79 -0
  3. checkpoint-208000/generation_config.json +6 -0
  4. checkpoint-208000/model.safetensors +3 -0
  5. checkpoint-208000/optimizer.pt +3 -0
  6. checkpoint-208000/rng_state_0.pth +3 -0
  7. checkpoint-208000/rng_state_1.pth +3 -0
  8. checkpoint-208000/rng_state_10.pth +3 -0
  9. checkpoint-208000/rng_state_11.pth +3 -0
  10. checkpoint-208000/rng_state_12.pth +3 -0
  11. checkpoint-208000/rng_state_13.pth +3 -0
  12. checkpoint-208000/rng_state_14.pth +3 -0
  13. checkpoint-208000/rng_state_15.pth +3 -0
  14. checkpoint-208000/rng_state_2.pth +3 -0
  15. checkpoint-208000/rng_state_3.pth +3 -0
  16. checkpoint-208000/rng_state_4.pth +3 -0
  17. checkpoint-208000/rng_state_5.pth +3 -0
  18. checkpoint-208000/rng_state_6.pth +3 -0
  19. checkpoint-208000/rng_state_7.pth +3 -0
  20. checkpoint-208000/rng_state_8.pth +3 -0
  21. checkpoint-208000/rng_state_9.pth +3 -0
  22. checkpoint-208000/scheduler.pt +3 -0
  23. checkpoint-208000/trainer_state.json +3 -0
  24. checkpoint-208000/training_args.bin +3 -0
  25. checkpoint-210000/config.json +79 -0
  26. checkpoint-210000/generation_config.json +6 -0
  27. checkpoint-210000/model.safetensors +3 -0
  28. checkpoint-210000/optimizer.pt +3 -0
  29. checkpoint-210000/rng_state_0.pth +3 -0
  30. checkpoint-210000/rng_state_1.pth +3 -0
  31. checkpoint-210000/rng_state_10.pth +3 -0
  32. checkpoint-210000/rng_state_11.pth +3 -0
  33. checkpoint-210000/rng_state_12.pth +3 -0
  34. checkpoint-210000/rng_state_13.pth +3 -0
  35. checkpoint-210000/rng_state_14.pth +3 -0
  36. checkpoint-210000/rng_state_15.pth +3 -0
  37. checkpoint-210000/rng_state_2.pth +3 -0
  38. checkpoint-210000/rng_state_3.pth +3 -0
  39. checkpoint-210000/rng_state_4.pth +3 -0
  40. checkpoint-210000/rng_state_5.pth +3 -0
  41. checkpoint-210000/rng_state_6.pth +3 -0
  42. checkpoint-210000/rng_state_7.pth +3 -0
  43. checkpoint-210000/rng_state_8.pth +3 -0
  44. checkpoint-210000/rng_state_9.pth +3 -0
  45. checkpoint-210000/scheduler.pt +3 -0
  46. checkpoint-210000/trainer_state.json +3 -0
  47. checkpoint-210000/training_args.bin +3 -0
.gitattributes CHANGED
@@ -80,3 +80,5 @@ checkpoint-198000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
80
  checkpoint-200000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
81
  checkpoint-202000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
82
  checkpoint-204000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
 
 
 
80
  checkpoint-200000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
81
  checkpoint-202000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
82
  checkpoint-204000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
83
+ checkpoint-208000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
84
+ checkpoint-210000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
checkpoint-208000/config.json ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "ar_steps": 1,
3
+ "architectures": [
4
+ "DiffVLMDiffusion"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "condition_layer": -1,
9
+ "eos_token_id": 151645,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 1536,
12
+ "image_token_id": 151655,
13
+ "img_cross_attention_dim": 2048,
14
+ "img_diffuser_depth": 2,
15
+ "img_ffn_dim_multiplier": null,
16
+ "img_hidden_size": 1536,
17
+ "img_multiple_of": 256,
18
+ "img_norm_eps": 1e-05,
19
+ "img_num_attention_heads": 12,
20
+ "img_num_kv_heads": 12,
21
+ "img_qk_norm": true,
22
+ "in_channels": 32,
23
+ "initializer_range": 0.02,
24
+ "inject_img_diffuser": false,
25
+ "input_size": 32,
26
+ "intermediate_size": 8960,
27
+ "layer_group_size": 7,
28
+ "layerwise_start_idx": 0,
29
+ "lora_alpha": 128,
30
+ "lora_bias": "none",
31
+ "lora_dropout": 0.05,
32
+ "lora_enable": false,
33
+ "lora_r": 64,
34
+ "max_position_embeddings": 32768,
35
+ "max_window_layers": 28,
36
+ "model_type": "qwen2_vl",
37
+ "non_linearity": 1,
38
+ "norm_elementwise_affine": true,
39
+ "num_attention_heads": 12,
40
+ "num_hidden_layers": 28,
41
+ "num_key_value_heads": 2,
42
+ "patch_size": 2,
43
+ "repa_coeff": 0.1,
44
+ "repa_layers": "2",
45
+ "repa_shared": false,
46
+ "rms_norm_eps": 1e-06,
47
+ "rope_scaling": {
48
+ "mrope_section": [
49
+ 16,
50
+ 24,
51
+ 24
52
+ ],
53
+ "rope_type": "default",
54
+ "type": "default"
55
+ },
56
+ "rope_theta": 1000000.0,
57
+ "sample_size": 128,
58
+ "sampling_steps": 28,
59
+ "sliding_window": null,
60
+ "tie_word_embeddings": true,
61
+ "torch_dtype": "bfloat16",
62
+ "transformers_version": "4.47.0",
63
+ "use_cache": true,
64
+ "use_repa": false,
65
+ "use_residual_attn": true,
66
+ "use_sliding_window": false,
67
+ "vae_path": "mit-han-lab/dc-ae-f32c32-sana-1.1-diffusers",
68
+ "video_token_id": 151656,
69
+ "vision_config": {
70
+ "hidden_size": 1536,
71
+ "in_chans": 3,
72
+ "model_type": "qwen2_vl",
73
+ "spatial_patch_size": 14
74
+ },
75
+ "vision_end_token_id": 151653,
76
+ "vision_start_token_id": 151652,
77
+ "vision_token_id": 151654,
78
+ "vocab_size": 151936
79
+ }
checkpoint-208000/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 151643,
4
+ "eos_token_id": 151645,
5
+ "transformers_version": "4.47.0"
6
+ }
checkpoint-208000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c79582e93bec653101ef8236881702158a9f1e53db7a367a9e67dd75d7ffa4d
3
+ size 4539487822
checkpoint-208000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4647abd302cf31474a6804d45378134196b525e8be5a474a6ce3227263a90618
3
+ size 6632345482
checkpoint-208000/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:777d16168b33050d0a3460cc8127b8f9c79a695b4c43cd2468801b94cf77259e
3
+ size 15984
checkpoint-208000/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b280bc6634d81afe5aac994d5c79e6078585bb13c349a01648f6757690042e82
3
+ size 15984
checkpoint-208000/rng_state_10.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02be1d2647070e3df0c04088c193b74f9c6e65f7fb6d8095a20b6e1f9e212ba0
3
+ size 15997
checkpoint-208000/rng_state_11.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55332ce61dbc33c91d2d92fce4ec75d5c2a104081d836d71e2b6b97cc59adb1f
3
+ size 15997
checkpoint-208000/rng_state_12.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2de69416763a421eb9772dcc77b57562e34eb7489fc48a2de420c123b6c0dc74
3
+ size 15997
checkpoint-208000/rng_state_13.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d29d27b2b7098c5fa49462ab2480893e2c452cfbfb607ef946c323351afd156
3
+ size 15997
checkpoint-208000/rng_state_14.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2010bf968c5c37c75d13bd2adc5ff652ff47de95df563835ad61ff329e229bf4
3
+ size 15997
checkpoint-208000/rng_state_15.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8402a8b6d54ae64f869161a77a9811c8b7cb83f97dd1e9fd7216de43c1e10acb
3
+ size 15997
checkpoint-208000/rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:214b328975554f330673163b4a77081bad292a874c6245bfdb7b0ab58a97cc40
3
+ size 15984
checkpoint-208000/rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:98727039dd8151119e3b2dcacee9f4ab3bcb30d5aca0a28cc06728ec65d3d82c
3
+ size 15984
checkpoint-208000/rng_state_4.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e3f3590e7d192a64497ace971b0a1aa4e4d6bf7517226b9339db2948da7ddba
3
+ size 15984
checkpoint-208000/rng_state_5.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f2623519f42ae0776ed03f6ac98ea89b062310332c3dee12adaaf3e48a4303f5
3
+ size 15984
checkpoint-208000/rng_state_6.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e59a9c48ff032e2c6bad7631522190c5fb2f19670ff3f6bb05984add5a612ca1
3
+ size 15984
checkpoint-208000/rng_state_7.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f924e0bb74b3a236ab352160905877efb5e6f7b48bf66aee346f4d18f86c2e46
3
+ size 15984
checkpoint-208000/rng_state_8.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b992035c2ae730b3aa29c8af94e9aa32727230c46f29e2d2b1a00083d3a2315
3
+ size 15984
checkpoint-208000/rng_state_9.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:412fbfbfd3b3ba1e86baf2b8cc050da549ecb830735d801d299ed58bab1917b2
3
+ size 15984
checkpoint-208000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63df0663fc26b123b4ad06332880999a0edd752b2d6e3a329e27417b70fd3740
3
+ size 1064
checkpoint-208000/trainer_state.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b8ff97801de8b4910f15aa528ccc9dcec1e95c1e01bf58b09b13e232b58b3e7
3
+ size 32161664
checkpoint-208000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1744514a0664f5e1cf2fa7dc918d71acde90b2e4f855ea1c15336c885ed73935
3
+ size 6008
checkpoint-210000/config.json ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "ar_steps": 1,
3
+ "architectures": [
4
+ "DiffVLMDiffusion"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "condition_layer": -1,
9
+ "eos_token_id": 151645,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 1536,
12
+ "image_token_id": 151655,
13
+ "img_cross_attention_dim": 2048,
14
+ "img_diffuser_depth": 2,
15
+ "img_ffn_dim_multiplier": null,
16
+ "img_hidden_size": 1536,
17
+ "img_multiple_of": 256,
18
+ "img_norm_eps": 1e-05,
19
+ "img_num_attention_heads": 12,
20
+ "img_num_kv_heads": 12,
21
+ "img_qk_norm": true,
22
+ "in_channels": 32,
23
+ "initializer_range": 0.02,
24
+ "inject_img_diffuser": false,
25
+ "input_size": 32,
26
+ "intermediate_size": 8960,
27
+ "layer_group_size": 7,
28
+ "layerwise_start_idx": 0,
29
+ "lora_alpha": 128,
30
+ "lora_bias": "none",
31
+ "lora_dropout": 0.05,
32
+ "lora_enable": false,
33
+ "lora_r": 64,
34
+ "max_position_embeddings": 32768,
35
+ "max_window_layers": 28,
36
+ "model_type": "qwen2_vl",
37
+ "non_linearity": 1,
38
+ "norm_elementwise_affine": true,
39
+ "num_attention_heads": 12,
40
+ "num_hidden_layers": 28,
41
+ "num_key_value_heads": 2,
42
+ "patch_size": 2,
43
+ "repa_coeff": 0.1,
44
+ "repa_layers": "2",
45
+ "repa_shared": false,
46
+ "rms_norm_eps": 1e-06,
47
+ "rope_scaling": {
48
+ "mrope_section": [
49
+ 16,
50
+ 24,
51
+ 24
52
+ ],
53
+ "rope_type": "default",
54
+ "type": "default"
55
+ },
56
+ "rope_theta": 1000000.0,
57
+ "sample_size": 128,
58
+ "sampling_steps": 28,
59
+ "sliding_window": null,
60
+ "tie_word_embeddings": true,
61
+ "torch_dtype": "bfloat16",
62
+ "transformers_version": "4.47.0",
63
+ "use_cache": true,
64
+ "use_repa": false,
65
+ "use_residual_attn": true,
66
+ "use_sliding_window": false,
67
+ "vae_path": "mit-han-lab/dc-ae-f32c32-sana-1.1-diffusers",
68
+ "video_token_id": 151656,
69
+ "vision_config": {
70
+ "hidden_size": 1536,
71
+ "in_chans": 3,
72
+ "model_type": "qwen2_vl",
73
+ "spatial_patch_size": 14
74
+ },
75
+ "vision_end_token_id": 151653,
76
+ "vision_start_token_id": 151652,
77
+ "vision_token_id": 151654,
78
+ "vocab_size": 151936
79
+ }
checkpoint-210000/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 151643,
4
+ "eos_token_id": 151645,
5
+ "transformers_version": "4.47.0"
6
+ }
checkpoint-210000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:441e9ea8af9563006a19301d5fccc1bf418193f48392fd82cfadf888054f3067
3
+ size 4539487822
checkpoint-210000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d1e8a9f4d2ee0022566875b7ba34b418b77e56c1693c597c27505ceba1e00a0
3
+ size 6632345482
checkpoint-210000/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:82888134df07909a9d3114fa6f5f07d4fbc2670cee11dc6ed6ccee324e6f7224
3
+ size 15984
checkpoint-210000/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0eb59f5a7d455fc8cd08b4b7bd6af39b84af7daf9d5d26c5970f47fd1a3a1ef
3
+ size 15984
checkpoint-210000/rng_state_10.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcec1f17488f0f59f1c03802cedbf6e034e812ee56cae543d0628554c1140200
3
+ size 15997
checkpoint-210000/rng_state_11.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c4e36a17364310fbff1e91e3708ded73ddb20130b160d0f967e0791c1ed8258
3
+ size 15997
checkpoint-210000/rng_state_12.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3fbd5ad242185ea09b85332b624636b02512b5f95d131662ea576b62b2db0cc1
3
+ size 15997
checkpoint-210000/rng_state_13.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78d488ac7d85923b6c5686444af60ef05b407550006289f2bc2fce982f6b043c
3
+ size 15997
checkpoint-210000/rng_state_14.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:672cba6be214fe634874c83723b99d5a961f9ba6af118381f0eb2a1de01b4389
3
+ size 15997
checkpoint-210000/rng_state_15.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e8d17f73792f863602f631ee1f7f9c05b664f6ffd14a88035a434c6dc61df81
3
+ size 15997
checkpoint-210000/rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c73b1cd04f1d1e619b19848921aded4b2b0e47fe1641a650cc8351083f67b18
3
+ size 15984
checkpoint-210000/rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3373db3c68a3f7ad206698db82bf13dd8414c94ac84fad878902102dea918af
3
+ size 15984
checkpoint-210000/rng_state_4.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:05cd6fb9133568d31d60a4beb5bf2e0a9a39e7dba1e9f9d0a24ec4027218cae9
3
+ size 15984
checkpoint-210000/rng_state_5.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74bf7ae2622919f74c86acc7783e5fa095e3bca067141b3bd86ee7511610da34
3
+ size 15984
checkpoint-210000/rng_state_6.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ef7308027ed55875182a00f6d70687c2ad871acf54578fe6a1db225346a7811
3
+ size 15984
checkpoint-210000/rng_state_7.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c056bdb3b55170756fd2747b2242f06259593cc5f0a6ce23415d5ffa19ca700
3
+ size 15984
checkpoint-210000/rng_state_8.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d49d05ed1ed1973a173d88fe560f4fb030cbc7017eaf9b2f81adbce7be675396
3
+ size 15984
checkpoint-210000/rng_state_9.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42001ca1992c48bac82ce653ff130b4b76549f26a85136a61cd0ff79a75648e4
3
+ size 15984
checkpoint-210000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5ead708e8a92b2004d0f2a90155606e70dd9afe6eefe9cd5a9e82c083885698
3
+ size 1064
checkpoint-210000/trainer_state.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73b91d89acda5ff819bce71a5f2c1e8d81e408baac7fe388fba26dee00289b5c
3
+ size 32470959
checkpoint-210000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1744514a0664f5e1cf2fa7dc918d71acde90b2e4f855ea1c15336c885ed73935
3
+ size 6008