zhiyang1 commited on
Commit
1981b74
·
verified ·
1 Parent(s): 3a17e51

Upload folder using huggingface_hub

Browse files
Files changed (47) hide show
  1. .gitattributes +2 -0
  2. checkpoint-232000/config.json +79 -0
  3. checkpoint-232000/generation_config.json +6 -0
  4. checkpoint-232000/model.safetensors +3 -0
  5. checkpoint-232000/optimizer.pt +3 -0
  6. checkpoint-232000/rng_state_0.pth +3 -0
  7. checkpoint-232000/rng_state_1.pth +3 -0
  8. checkpoint-232000/rng_state_10.pth +3 -0
  9. checkpoint-232000/rng_state_11.pth +3 -0
  10. checkpoint-232000/rng_state_12.pth +3 -0
  11. checkpoint-232000/rng_state_13.pth +3 -0
  12. checkpoint-232000/rng_state_14.pth +3 -0
  13. checkpoint-232000/rng_state_15.pth +3 -0
  14. checkpoint-232000/rng_state_2.pth +3 -0
  15. checkpoint-232000/rng_state_3.pth +3 -0
  16. checkpoint-232000/rng_state_4.pth +3 -0
  17. checkpoint-232000/rng_state_5.pth +3 -0
  18. checkpoint-232000/rng_state_6.pth +3 -0
  19. checkpoint-232000/rng_state_7.pth +3 -0
  20. checkpoint-232000/rng_state_8.pth +3 -0
  21. checkpoint-232000/rng_state_9.pth +3 -0
  22. checkpoint-232000/scheduler.pt +3 -0
  23. checkpoint-232000/trainer_state.json +3 -0
  24. checkpoint-232000/training_args.bin +3 -0
  25. checkpoint-233000/config.json +79 -0
  26. checkpoint-233000/generation_config.json +6 -0
  27. checkpoint-233000/model.safetensors +3 -0
  28. checkpoint-233000/optimizer.pt +3 -0
  29. checkpoint-233000/rng_state_0.pth +3 -0
  30. checkpoint-233000/rng_state_1.pth +3 -0
  31. checkpoint-233000/rng_state_10.pth +3 -0
  32. checkpoint-233000/rng_state_11.pth +3 -0
  33. checkpoint-233000/rng_state_12.pth +3 -0
  34. checkpoint-233000/rng_state_13.pth +3 -0
  35. checkpoint-233000/rng_state_14.pth +3 -0
  36. checkpoint-233000/rng_state_15.pth +3 -0
  37. checkpoint-233000/rng_state_2.pth +3 -0
  38. checkpoint-233000/rng_state_3.pth +3 -0
  39. checkpoint-233000/rng_state_4.pth +3 -0
  40. checkpoint-233000/rng_state_5.pth +3 -0
  41. checkpoint-233000/rng_state_6.pth +3 -0
  42. checkpoint-233000/rng_state_7.pth +3 -0
  43. checkpoint-233000/rng_state_8.pth +3 -0
  44. checkpoint-233000/rng_state_9.pth +3 -0
  45. checkpoint-233000/scheduler.pt +3 -0
  46. checkpoint-233000/trainer_state.json +3 -0
  47. checkpoint-233000/training_args.bin +3 -0
.gitattributes CHANGED
@@ -90,3 +90,5 @@ checkpoint-220000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
90
  checkpoint-221000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
91
  checkpoint-229000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
92
  checkpoint-230000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
 
 
 
90
  checkpoint-221000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
91
  checkpoint-229000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
92
  checkpoint-230000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
93
+ checkpoint-232000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
94
+ checkpoint-233000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
checkpoint-232000/config.json ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "ar_steps": 1,
3
+ "architectures": [
4
+ "DiffVLMBaseline"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "condition_layer": -1,
9
+ "eos_token_id": 151645,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 1536,
12
+ "image_token_id": 151655,
13
+ "img_cross_attention_dim": 2048,
14
+ "img_diffuser_depth": 2,
15
+ "img_ffn_dim_multiplier": null,
16
+ "img_hidden_size": 1536,
17
+ "img_multiple_of": 256,
18
+ "img_norm_eps": 1e-05,
19
+ "img_num_attention_heads": 12,
20
+ "img_num_kv_heads": 12,
21
+ "img_qk_norm": true,
22
+ "in_channels": 32,
23
+ "initializer_range": 0.02,
24
+ "inject_img_diffuser": false,
25
+ "input_size": 32,
26
+ "intermediate_size": 8960,
27
+ "layer_group_size": 7,
28
+ "layerwise_start_idx": 0,
29
+ "lora_alpha": 128,
30
+ "lora_bias": "none",
31
+ "lora_dropout": 0.05,
32
+ "lora_enable": false,
33
+ "lora_r": 64,
34
+ "max_position_embeddings": 32768,
35
+ "max_window_layers": 28,
36
+ "model_type": "qwen2_vl",
37
+ "non_linearity": 1,
38
+ "norm_elementwise_affine": true,
39
+ "num_attention_heads": 12,
40
+ "num_hidden_layers": 28,
41
+ "num_key_value_heads": 2,
42
+ "patch_size": 1,
43
+ "repa_coeff": 0.1,
44
+ "repa_layers": "2",
45
+ "repa_shared": false,
46
+ "rms_norm_eps": 1e-06,
47
+ "rope_scaling": {
48
+ "mrope_section": [
49
+ 16,
50
+ 24,
51
+ 24
52
+ ],
53
+ "rope_type": "default",
54
+ "type": "default"
55
+ },
56
+ "rope_theta": 1000000.0,
57
+ "sample_size": 128,
58
+ "sampling_steps": 28,
59
+ "sliding_window": null,
60
+ "tie_word_embeddings": true,
61
+ "torch_dtype": "bfloat16",
62
+ "transformers_version": "4.47.0",
63
+ "use_cache": true,
64
+ "use_repa": false,
65
+ "use_residual_attn": false,
66
+ "use_sliding_window": false,
67
+ "vae_path": "mit-han-lab/dc-ae-f32c32-in-1.0-diffusers",
68
+ "video_token_id": 151656,
69
+ "vision_config": {
70
+ "hidden_size": 1536,
71
+ "in_chans": 3,
72
+ "model_type": "qwen2_vl",
73
+ "spatial_patch_size": 14
74
+ },
75
+ "vision_end_token_id": 151653,
76
+ "vision_start_token_id": 151652,
77
+ "vision_token_id": 151654,
78
+ "vocab_size": 151936
79
+ }
checkpoint-232000/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 151643,
4
+ "eos_token_id": 151645,
5
+ "transformers_version": "4.47.0"
6
+ }
checkpoint-232000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6104546b334ab68112884d898c6c5282fa3c5c1af6982448222c35fc2e667ef4
3
+ size 4410723984
checkpoint-232000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6efdcefb21d24aa6713d42e6a824d0b7cd653d72af0aa19ca85bcb7c92acdf29
3
+ size 6330255386
checkpoint-232000/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a101aa080894ae0e18ccba8996d016c64c9b0a03db432147bd6d192a0a998c13
3
+ size 15984
checkpoint-232000/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d0755ac9856a7ce81b926c30445f89009c238aac600a1b504476bf90ee567b1
3
+ size 15984
checkpoint-232000/rng_state_10.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b594a3f6feb25413777c089b1d6ce4c3eedc961b7fa7034dd8f3bf81ef7c5d08
3
+ size 15997
checkpoint-232000/rng_state_11.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67d1f6d92b52f7b5b5ac96b0c49407b4b10f1e6578020a7cbce83b2731058cec
3
+ size 15997
checkpoint-232000/rng_state_12.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2fb46dd2e9ae141791ace1e1167514ded0620e40391dc56a27deb080ba60de59
3
+ size 15997
checkpoint-232000/rng_state_13.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:999b6dbf755ba7884998c1f41e6b489bcbe1c10f07e5aabbf22567ac7e81384e
3
+ size 15997
checkpoint-232000/rng_state_14.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5bf21d0823a73876a670476ef7a3d4941b3c749096f912950b2b360036556d78
3
+ size 15997
checkpoint-232000/rng_state_15.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16626a378f951a2e8820e9f80e2016930cc46df08d59251c4cda09a287db3614
3
+ size 15997
checkpoint-232000/rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a802d5322d98a21d988c2bfd842aba88c53f48d43b55c517383a9d56649372dd
3
+ size 15984
checkpoint-232000/rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ac75470fd17c3653a9e46eb8e7cf0720661bc2a572b7860c75f53b3162bd0d9
3
+ size 15984
checkpoint-232000/rng_state_4.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99a58d2bd20dfd03577ea5216a8698103d9f858e1f2f2e2b6863f5bdd29c3462
3
+ size 15984
checkpoint-232000/rng_state_5.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3fe9e9a76ab6812c2ee8ee2c12c91077e5d780829539c106c21125f4cf50331
3
+ size 15984
checkpoint-232000/rng_state_6.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:24c0784db00bbe67867877f11ffa8b47c739cb46b18e616161aa70d46cfb9823
3
+ size 15984
checkpoint-232000/rng_state_7.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c39e21ff5b973e90a5ce0047d80261260b798ca96c1c3bd11e45843d8468b6e7
3
+ size 15984
checkpoint-232000/rng_state_8.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a31d915b99098dfee79144aaa214b0517514eeff11de687b88816067bf8f01af
3
+ size 15984
checkpoint-232000/rng_state_9.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9b1c491e007ccbd1dfc69150086453adae49b3e85d150bff9eeed7555436aff
3
+ size 15984
checkpoint-232000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8bd2eae89027a56ad5e8538548a7ef196a3ef99873ca23154952f1eec298be4
3
+ size 1064
checkpoint-232000/trainer_state.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c65e39c095c3094bcd73bd7017b00510499e0e3d99496182def24f43741ba3bd
3
+ size 35481412
checkpoint-232000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c4b4a1999633d2ef5d6621a0cfc44e55edf3f678936bf8213da469c951c88a3
3
+ size 5944
checkpoint-233000/config.json ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "ar_steps": 1,
3
+ "architectures": [
4
+ "DiffVLMBaseline"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "condition_layer": -1,
9
+ "eos_token_id": 151645,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 1536,
12
+ "image_token_id": 151655,
13
+ "img_cross_attention_dim": 2048,
14
+ "img_diffuser_depth": 2,
15
+ "img_ffn_dim_multiplier": null,
16
+ "img_hidden_size": 1536,
17
+ "img_multiple_of": 256,
18
+ "img_norm_eps": 1e-05,
19
+ "img_num_attention_heads": 12,
20
+ "img_num_kv_heads": 12,
21
+ "img_qk_norm": true,
22
+ "in_channels": 32,
23
+ "initializer_range": 0.02,
24
+ "inject_img_diffuser": false,
25
+ "input_size": 32,
26
+ "intermediate_size": 8960,
27
+ "layer_group_size": 7,
28
+ "layerwise_start_idx": 0,
29
+ "lora_alpha": 128,
30
+ "lora_bias": "none",
31
+ "lora_dropout": 0.05,
32
+ "lora_enable": false,
33
+ "lora_r": 64,
34
+ "max_position_embeddings": 32768,
35
+ "max_window_layers": 28,
36
+ "model_type": "qwen2_vl",
37
+ "non_linearity": 1,
38
+ "norm_elementwise_affine": true,
39
+ "num_attention_heads": 12,
40
+ "num_hidden_layers": 28,
41
+ "num_key_value_heads": 2,
42
+ "patch_size": 1,
43
+ "repa_coeff": 0.1,
44
+ "repa_layers": "2",
45
+ "repa_shared": false,
46
+ "rms_norm_eps": 1e-06,
47
+ "rope_scaling": {
48
+ "mrope_section": [
49
+ 16,
50
+ 24,
51
+ 24
52
+ ],
53
+ "rope_type": "default",
54
+ "type": "default"
55
+ },
56
+ "rope_theta": 1000000.0,
57
+ "sample_size": 128,
58
+ "sampling_steps": 28,
59
+ "sliding_window": null,
60
+ "tie_word_embeddings": true,
61
+ "torch_dtype": "bfloat16",
62
+ "transformers_version": "4.47.0",
63
+ "use_cache": true,
64
+ "use_repa": false,
65
+ "use_residual_attn": false,
66
+ "use_sliding_window": false,
67
+ "vae_path": "mit-han-lab/dc-ae-f32c32-in-1.0-diffusers",
68
+ "video_token_id": 151656,
69
+ "vision_config": {
70
+ "hidden_size": 1536,
71
+ "in_chans": 3,
72
+ "model_type": "qwen2_vl",
73
+ "spatial_patch_size": 14
74
+ },
75
+ "vision_end_token_id": 151653,
76
+ "vision_start_token_id": 151652,
77
+ "vision_token_id": 151654,
78
+ "vocab_size": 151936
79
+ }
checkpoint-233000/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 151643,
4
+ "eos_token_id": 151645,
5
+ "transformers_version": "4.47.0"
6
+ }
checkpoint-233000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:627ea6722b0e47d3e4891e1e1fb3adc368e9ff6b18560d77849f2b48c73b92f4
3
+ size 4410723984
checkpoint-233000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48fce5b910b9f66f9fcf82bff0263d76e1a71ac6c11ecec1da57ad26fb0663ed
3
+ size 6330255386
checkpoint-233000/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23d93b6705563fd4996e2bbe74798834aa7c85f1b444108bef1a3603f178f497
3
+ size 15984
checkpoint-233000/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dec0e1213cc5bcaa53f2c5171da8d1f900a82258aa960d15c5d4f8bba93188fa
3
+ size 15984
checkpoint-233000/rng_state_10.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70753934ac4b33175185cd676f5b5204b59c573ff8d58171746fd8268d8bf1c0
3
+ size 15997
checkpoint-233000/rng_state_11.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:859d34b6bf0fe8493a405481bee550fa3ba639898d00fca9718feec97c93059c
3
+ size 15997
checkpoint-233000/rng_state_12.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8bd42308e3ed3e9ecb53374af25bbb3ee9dbcd1edb3fcd4fb5733eff1f737cbc
3
+ size 15997
checkpoint-233000/rng_state_13.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d632411c1e2c018e994bfe9ddedbd6ee299ff09195d7433f11f808ed4fe5d24d
3
+ size 15997
checkpoint-233000/rng_state_14.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e629a24231aeb1637e638ab223993a37eb7f3b1c25ce9cbfad234bb640732afb
3
+ size 15997
checkpoint-233000/rng_state_15.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:98f978591977409374d667f62170af1cfc7d3023c5d0eba99b4ad93f56ffb44e
3
+ size 15997
checkpoint-233000/rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31e83732db4afd3708841bfda1baa31151cd02b9fdaaa60843810b621733a3b1
3
+ size 15984
checkpoint-233000/rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07e58da555c7effd12f4734a2ac6b6e55167c9c510a626235e0a9ab470b10ef9
3
+ size 15984
checkpoint-233000/rng_state_4.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46e4152e381e70b47e707ee77c16724c9c0cfb6463b28e8329ac10980a51dfab
3
+ size 15984
checkpoint-233000/rng_state_5.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3772b014ff7363b405e564f161fb039204a95cc747d5fb41b8bb210be5d4e6ac
3
+ size 15984
checkpoint-233000/rng_state_6.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f1d09daeee607bdaef9895bfab928b35679c75e20c3e9d6027b87649d7aa558
3
+ size 15984
checkpoint-233000/rng_state_7.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44b12af89a1c08d43c0e5155079cd0f095cf0badec47d7d90b4675e206dda0df
3
+ size 15984
checkpoint-233000/rng_state_8.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41fe90a07ef567877d0be87bb31dd6e545266e1bd85b55eb16ac9de16050adbb
3
+ size 15984
checkpoint-233000/rng_state_9.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cbfaf37d2ace6cff88327b596c19ea4e5fd70a0be875b6adf3ed536e85905187
3
+ size 15984
checkpoint-233000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61ce08948503c2d6031d8806ffc5e45e9b20cdba60c3d79c3f4c170d7ec5ea92
3
+ size 1064
checkpoint-233000/trainer_state.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:541fcca19bd1d06024d9cfafc28d14e9a8a4fabcad077e49686f7aff1eaaccb1
3
+ size 35634783
checkpoint-233000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4725705d5011066bc1bc5225b36424cfe898b35e3656c30b704bdd1d3854be14
3
+ size 5944