diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..06ecf369fdcf452630b0917d95aa79c0c987de1a 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +checkpoint-250/tokenizer.json filter=lfs diff=lfs merge=lfs -text +tokenizer.json filter=lfs diff=lfs merge=lfs -text +checkpoint-500/tokenizer.json filter=lfs diff=lfs merge=lfs -text diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..76220a22ded644bf9cf8e207342317d6dd4a659f --- /dev/null +++ b/README.md @@ -0,0 +1,61 @@ +--- +library_name: transformers +license: other +base_model: meta-llama/Llama-3.1-8B-Instruct +tags: +- llama-factory +- full +- generated_from_trainer +model-index: +- name: sft + results: [] +--- + + + +# sft + +This model is a fine-tuned version of [meta-llama/Llama-3.1-8B-Instruct](https://huggingface.co/meta-llama/Llama-3.1-8B-Instruct) on the open_thoughts_indic dataset. + +## Model description + +More information needed + +## Intended uses & limitations + +More information needed + +## Training and evaluation data + +More information needed + +## Training procedure + +### Training hyperparameters + +The following hyperparameters were used during training: +- learning_rate: 1e-05 +- train_batch_size: 1 +- eval_batch_size: 8 +- seed: 42 +- distributed_type: multi-GPU +- num_devices: 8 +- gradient_accumulation_steps: 2 +- total_train_batch_size: 16 +- total_eval_batch_size: 64 +- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 +- lr_scheduler_type: cosine +- lr_scheduler_warmup_ratio: 0.1 +- num_epochs: 3.0 + +### Training results + + + +### Framework versions + +- Transformers 4.45.0 +- Pytorch 2.6.0.dev20241113+rocm6.2 +- Datasets 3.1.0 +- Tokenizers 0.20.3 diff --git a/all_results.json b/all_results.json new file mode 100644 index 0000000000000000000000000000000000000000..78fb4a8fcb13b4b8db3a0ca67d3ebc6bf903e822 --- /dev/null +++ b/all_results.json @@ -0,0 +1,8 @@ +{ + "epoch": 2.9938900203665986, + "total_flos": 48064094208000.0, + "train_loss": 0.08951896556025865, + "train_runtime": 6057.9419, + "train_samples_per_second": 1.945, + "train_steps_per_second": 0.121 +} \ No newline at end of file diff --git a/checkpoint-250/config.json b/checkpoint-250/config.json new file mode 100644 index 0000000000000000000000000000000000000000..c499423bcd77384986c270517f6957e2160cf584 --- /dev/null +++ b/checkpoint-250/config.json @@ -0,0 +1,40 @@ +{ + "_name_or_path": "meta-llama/Llama-3.1-8B-Instruct", + "architectures": [ + "LlamaForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "bos_token_id": 128000, + "eos_token_id": [ + 128001, + 128008, + 128009 + ], + "head_dim": 128, + "hidden_act": "silu", + "hidden_size": 4096, + "initializer_range": 0.02, + "intermediate_size": 14336, + "max_position_embeddings": 131072, + "mlp_bias": false, + "model_type": "llama", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 8, + "pretraining_tp": 1, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "factor": 8.0, + "high_freq_factor": 4.0, + "low_freq_factor": 1.0, + "original_max_position_embeddings": 8192, + "rope_type": "llama3" + }, + "rope_theta": 500000.0, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.45.0", + "use_cache": false, + "vocab_size": 128256 +} diff --git a/checkpoint-250/generation_config.json b/checkpoint-250/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..6a807a364acb034610b0c0959eb3727910a0babe --- /dev/null +++ b/checkpoint-250/generation_config.json @@ -0,0 +1,12 @@ +{ + "bos_token_id": 128000, + "do_sample": true, + "eos_token_id": [ + 128001, + 128008, + 128009 + ], + "temperature": 0.6, + "top_p": 0.9, + "transformers_version": "4.45.0" +} diff --git a/checkpoint-250/global_step250/zero_pp_rank_0_mp_rank_00_model_states.pt b/checkpoint-250/global_step250/zero_pp_rank_0_mp_rank_00_model_states.pt new file mode 100644 index 0000000000000000000000000000000000000000..27c86428fca9ea87a9552eeaeecc69979105d462 --- /dev/null +++ b/checkpoint-250/global_step250/zero_pp_rank_0_mp_rank_00_model_states.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:722629e05fdf0e6e316307f91bff9e9bf8ea02f9ec7e2100ad50dba4f69bf8d4 +size 151013 diff --git a/checkpoint-250/global_step250/zero_pp_rank_1_mp_rank_00_model_states.pt b/checkpoint-250/global_step250/zero_pp_rank_1_mp_rank_00_model_states.pt new file mode 100644 index 0000000000000000000000000000000000000000..3dd7953cf73667d6f42e19b6c7ccf6eefda32bc4 --- /dev/null +++ b/checkpoint-250/global_step250/zero_pp_rank_1_mp_rank_00_model_states.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4309d7320150779dbb433c3218be10e29ac613ce5a1b6ed3a478de7fb5e08994 +size 151013 diff --git a/checkpoint-250/global_step250/zero_pp_rank_2_mp_rank_00_model_states.pt b/checkpoint-250/global_step250/zero_pp_rank_2_mp_rank_00_model_states.pt new file mode 100644 index 0000000000000000000000000000000000000000..9b115fc101996749167af8de35d0c1a5df072085 --- /dev/null +++ b/checkpoint-250/global_step250/zero_pp_rank_2_mp_rank_00_model_states.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a1d08ec23174f78945635f23b5818fed32952ad1ce47020aae7fd068f2056f0 +size 151013 diff --git a/checkpoint-250/global_step250/zero_pp_rank_3_mp_rank_00_model_states.pt b/checkpoint-250/global_step250/zero_pp_rank_3_mp_rank_00_model_states.pt new file mode 100644 index 0000000000000000000000000000000000000000..1a5e4c0b8048d4c813acf2d2be67a5ea1fc82036 --- /dev/null +++ b/checkpoint-250/global_step250/zero_pp_rank_3_mp_rank_00_model_states.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c56ba0eba5b7d18a95937d9e51129968d07a335530af84dfb4e5c890dfc24668 +size 151013 diff --git a/checkpoint-250/global_step250/zero_pp_rank_4_mp_rank_00_model_states.pt b/checkpoint-250/global_step250/zero_pp_rank_4_mp_rank_00_model_states.pt new file mode 100644 index 0000000000000000000000000000000000000000..b8b7706eda99b2975dca8d645984f7ef69cc402e --- /dev/null +++ b/checkpoint-250/global_step250/zero_pp_rank_4_mp_rank_00_model_states.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6e0a6bc5a7b28409207fd4a17751905f6958207bce4dfe80ad218bf9b6b6e5d0 +size 151013 diff --git a/checkpoint-250/global_step250/zero_pp_rank_5_mp_rank_00_model_states.pt b/checkpoint-250/global_step250/zero_pp_rank_5_mp_rank_00_model_states.pt new file mode 100644 index 0000000000000000000000000000000000000000..7773da7bfc6db828da6582bbe335466b4e9b50ef --- /dev/null +++ b/checkpoint-250/global_step250/zero_pp_rank_5_mp_rank_00_model_states.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5b0104954ca3acd9f9194c7264621e2f2b4b0aa03224d8d02b02ffc7276b7f68 +size 151013 diff --git a/checkpoint-250/global_step250/zero_pp_rank_6_mp_rank_00_model_states.pt b/checkpoint-250/global_step250/zero_pp_rank_6_mp_rank_00_model_states.pt new file mode 100644 index 0000000000000000000000000000000000000000..0b030ae4e69e34c6295094ff7f1ecc40a81d0919 --- /dev/null +++ b/checkpoint-250/global_step250/zero_pp_rank_6_mp_rank_00_model_states.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cbe22acbaeb233d7b6f1a8409fea6a67edc5f3f025172c03fcc67bd2d2241317 +size 151013 diff --git a/checkpoint-250/global_step250/zero_pp_rank_7_mp_rank_00_model_states.pt b/checkpoint-250/global_step250/zero_pp_rank_7_mp_rank_00_model_states.pt new file mode 100644 index 0000000000000000000000000000000000000000..19fe210e92390488dd8b694b2f0e941b3cac3ce5 --- /dev/null +++ b/checkpoint-250/global_step250/zero_pp_rank_7_mp_rank_00_model_states.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2c15330e78d00776aa90aa965d946af8632b25190965d1fa149eb1e0edf8d3d2 +size 151013 diff --git a/checkpoint-250/latest b/checkpoint-250/latest new file mode 100644 index 0000000000000000000000000000000000000000..87449ff1a854ba4a77ea33fbc24adaed3311d6b1 --- /dev/null +++ b/checkpoint-250/latest @@ -0,0 +1 @@ +global_step250 \ No newline at end of file diff --git a/checkpoint-250/model.safetensors.index.json b/checkpoint-250/model.safetensors.index.json new file mode 100644 index 0000000000000000000000000000000000000000..0fd8120f1c6acddc268ebc2583058efaf699a771 --- /dev/null +++ b/checkpoint-250/model.safetensors.index.json @@ -0,0 +1,298 @@ +{ + "metadata": { + "total_size": 16060522496 + }, + "weight_map": { + "lm_head.weight": "model-00004-of-00004.safetensors", + "model.embed_tokens.weight": "model-00001-of-00004.safetensors", + "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.31.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.norm.weight": "model-00004-of-00004.safetensors" + } +} diff --git a/checkpoint-250/rng_state_0.pth b/checkpoint-250/rng_state_0.pth new file mode 100644 index 0000000000000000000000000000000000000000..b346349ce12dd5a17d4b91ed2a5722bb52550950 --- /dev/null +++ b/checkpoint-250/rng_state_0.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ad8a35afd8967cbb748405387e44426e43ad127028e826eddc9b67d2ca873c85 +size 15984 diff --git a/checkpoint-250/rng_state_1.pth b/checkpoint-250/rng_state_1.pth new file mode 100644 index 0000000000000000000000000000000000000000..68f3c6994456cb8d0592a5375d99503c8924b1c4 --- /dev/null +++ b/checkpoint-250/rng_state_1.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f338ce80d7c441076bfc8c53b84067a0181f5a14e80c13d5acb8150b659f4d73 +size 15984 diff --git a/checkpoint-250/rng_state_2.pth b/checkpoint-250/rng_state_2.pth new file mode 100644 index 0000000000000000000000000000000000000000..be044f6ceeed587d30e80c2f72d5aa19fdc9947b --- /dev/null +++ b/checkpoint-250/rng_state_2.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c9fbc9fa428939be10b46779f0eb5cd833e0da426b1cbdee77b3a55b6952235b +size 15984 diff --git a/checkpoint-250/rng_state_3.pth b/checkpoint-250/rng_state_3.pth new file mode 100644 index 0000000000000000000000000000000000000000..fc825249656a9b858782542bd3f4386250f1dfe0 --- /dev/null +++ b/checkpoint-250/rng_state_3.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ac55dba0b79d5fa4699d239da2f966d52040d576d31234ac8d4632e6956481bc +size 15984 diff --git a/checkpoint-250/rng_state_4.pth b/checkpoint-250/rng_state_4.pth new file mode 100644 index 0000000000000000000000000000000000000000..d30f52a44be563c152ae09db6ae934da6da0d3ed --- /dev/null +++ b/checkpoint-250/rng_state_4.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:af2d0c015100768ffa23faf3b6c2d54ea89eb045603e30e55cd211e06ff34972 +size 15984 diff --git a/checkpoint-250/rng_state_5.pth b/checkpoint-250/rng_state_5.pth new file mode 100644 index 0000000000000000000000000000000000000000..c8715d27ab23ae545d58039cf949cc44ecc1da5e --- /dev/null +++ b/checkpoint-250/rng_state_5.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c60a1b40608e34bc801c8231f97b81c53b5290dfaed1b9cd0ccbeca29574a991 +size 15984 diff --git a/checkpoint-250/rng_state_6.pth b/checkpoint-250/rng_state_6.pth new file mode 100644 index 0000000000000000000000000000000000000000..1ed791b6ef76eadf0b0c55a5733411771e2ae027 --- /dev/null +++ b/checkpoint-250/rng_state_6.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3ad6a142a403eb9aafc4a3a9a856bca648fe31fd22d796867baca31fb13656aa +size 15984 diff --git a/checkpoint-250/rng_state_7.pth b/checkpoint-250/rng_state_7.pth new file mode 100644 index 0000000000000000000000000000000000000000..800c3bbbc5edf7db01a8316069d439c5fb8d8c30 --- /dev/null +++ b/checkpoint-250/rng_state_7.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:38bc23a138cc800b22881742c0f3f9a71731a9a7111c6058a0077e6274d21773 +size 15984 diff --git a/checkpoint-250/scheduler.pt b/checkpoint-250/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..fb7ce6940ac3bae38a80530774ba51bda54bf44a --- /dev/null +++ b/checkpoint-250/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1021f1d7900b96b56ff8fcec8621728c5a9abde9f5bb3739261bb7beb6a5b1e6 +size 1064 diff --git a/checkpoint-250/special_tokens_map.json b/checkpoint-250/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..14daf4588e61b4e4983af0fccaba4d5500c0977c --- /dev/null +++ b/checkpoint-250/special_tokens_map.json @@ -0,0 +1,26 @@ +{ + "additional_special_tokens": [ + { + "content": "<|eom_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } + ], + "bos_token": { + "content": "<|begin_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "<|eot_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": "<|eot_id|>" +} diff --git a/checkpoint-250/tokenizer.json b/checkpoint-250/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2 --- /dev/null +++ b/checkpoint-250/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b +size 17209920 diff --git a/checkpoint-250/tokenizer_config.json b/checkpoint-250/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7d655d20e4224cc5f793625e97b5f1842407cbba --- /dev/null +++ b/checkpoint-250/tokenizer_config.json @@ -0,0 +1,2068 @@ +{ + "added_tokens_decoder": { + "128000": { + "content": "<|begin_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128001": { + "content": "<|end_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128002": { + "content": "<|reserved_special_token_0|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128003": { + "content": "<|reserved_special_token_1|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128004": { + "content": "<|finetune_right_pad_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128005": { + "content": "<|reserved_special_token_2|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128006": { + "content": "<|start_header_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128007": { + "content": "<|end_header_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128008": { + "content": "<|eom_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128009": { + "content": "<|eot_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128010": { + "content": "<|python_tag|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128011": { + "content": "<|reserved_special_token_3|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128012": { + "content": "<|reserved_special_token_4|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128013": { + "content": "<|reserved_special_token_5|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128014": { + "content": "<|reserved_special_token_6|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128015": { + "content": "<|reserved_special_token_7|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128016": { + "content": "<|reserved_special_token_8|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128017": { + "content": "<|reserved_special_token_9|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128018": { + "content": "<|reserved_special_token_10|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128019": { + "content": "<|reserved_special_token_11|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128020": { + "content": "<|reserved_special_token_12|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128021": { + "content": "<|reserved_special_token_13|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128022": { + "content": "<|reserved_special_token_14|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128023": { + "content": "<|reserved_special_token_15|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128024": { + "content": "<|reserved_special_token_16|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128025": { + "content": "<|reserved_special_token_17|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128026": { + "content": "<|reserved_special_token_18|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128027": { + "content": "<|reserved_special_token_19|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128028": { + "content": "<|reserved_special_token_20|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128029": { + "content": "<|reserved_special_token_21|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128030": { + "content": "<|reserved_special_token_22|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128031": { + "content": "<|reserved_special_token_23|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128032": { + "content": "<|reserved_special_token_24|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128033": { + "content": "<|reserved_special_token_25|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128034": { + "content": "<|reserved_special_token_26|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128035": { + "content": "<|reserved_special_token_27|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128036": { + "content": "<|reserved_special_token_28|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128037": { + "content": "<|reserved_special_token_29|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128038": { + "content": "<|reserved_special_token_30|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128039": { + "content": "<|reserved_special_token_31|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128040": { + "content": "<|reserved_special_token_32|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128041": { + "content": "<|reserved_special_token_33|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128042": { + "content": "<|reserved_special_token_34|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128043": { + "content": "<|reserved_special_token_35|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128044": { + "content": "<|reserved_special_token_36|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128045": { + "content": "<|reserved_special_token_37|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128046": { + "content": "<|reserved_special_token_38|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128047": { + "content": "<|reserved_special_token_39|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128048": { + "content": "<|reserved_special_token_40|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128049": { + "content": "<|reserved_special_token_41|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128050": { + "content": "<|reserved_special_token_42|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128051": { + "content": "<|reserved_special_token_43|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128052": { + "content": "<|reserved_special_token_44|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128053": { + "content": "<|reserved_special_token_45|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128054": { + "content": "<|reserved_special_token_46|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128055": { + "content": "<|reserved_special_token_47|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128056": { + "content": "<|reserved_special_token_48|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128057": { + "content": "<|reserved_special_token_49|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128058": { + "content": "<|reserved_special_token_50|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128059": { + "content": "<|reserved_special_token_51|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128060": { + "content": "<|reserved_special_token_52|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128061": { + "content": "<|reserved_special_token_53|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128062": { + "content": "<|reserved_special_token_54|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128063": { + "content": "<|reserved_special_token_55|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128064": { + "content": "<|reserved_special_token_56|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128065": { + "content": "<|reserved_special_token_57|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128066": { + "content": "<|reserved_special_token_58|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128067": { + "content": "<|reserved_special_token_59|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128068": { + "content": "<|reserved_special_token_60|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128069": { + "content": "<|reserved_special_token_61|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128070": { + "content": "<|reserved_special_token_62|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128071": { + "content": "<|reserved_special_token_63|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128072": { + "content": "<|reserved_special_token_64|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128073": { + "content": "<|reserved_special_token_65|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128074": { + "content": "<|reserved_special_token_66|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128075": { + "content": "<|reserved_special_token_67|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128076": { + "content": "<|reserved_special_token_68|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128077": { + "content": "<|reserved_special_token_69|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128078": { + "content": "<|reserved_special_token_70|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128079": { + "content": "<|reserved_special_token_71|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128080": { + "content": "<|reserved_special_token_72|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128081": { + "content": "<|reserved_special_token_73|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128082": { + "content": "<|reserved_special_token_74|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128083": { + "content": "<|reserved_special_token_75|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128084": { + "content": "<|reserved_special_token_76|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128085": { + "content": "<|reserved_special_token_77|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128086": { + "content": "<|reserved_special_token_78|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128087": { + "content": "<|reserved_special_token_79|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128088": { + "content": "<|reserved_special_token_80|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128089": { + "content": "<|reserved_special_token_81|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128090": { + "content": "<|reserved_special_token_82|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128091": { + "content": "<|reserved_special_token_83|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128092": { + "content": "<|reserved_special_token_84|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128093": { + "content": "<|reserved_special_token_85|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128094": { + "content": "<|reserved_special_token_86|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128095": { + "content": "<|reserved_special_token_87|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128096": { + "content": "<|reserved_special_token_88|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128097": { + "content": "<|reserved_special_token_89|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128098": { + "content": "<|reserved_special_token_90|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128099": { + "content": "<|reserved_special_token_91|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128100": { + "content": "<|reserved_special_token_92|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128101": { + "content": "<|reserved_special_token_93|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128102": { + "content": "<|reserved_special_token_94|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128103": { + "content": "<|reserved_special_token_95|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128104": { + "content": "<|reserved_special_token_96|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128105": { + "content": "<|reserved_special_token_97|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128106": { + "content": "<|reserved_special_token_98|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128107": { + "content": "<|reserved_special_token_99|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128108": { + "content": "<|reserved_special_token_100|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128109": { + "content": "<|reserved_special_token_101|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128110": { + "content": "<|reserved_special_token_102|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128111": { + "content": "<|reserved_special_token_103|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128112": { + "content": "<|reserved_special_token_104|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128113": { + "content": "<|reserved_special_token_105|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128114": { + "content": "<|reserved_special_token_106|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128115": { + "content": "<|reserved_special_token_107|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128116": { + "content": "<|reserved_special_token_108|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128117": { + "content": "<|reserved_special_token_109|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128118": { + "content": "<|reserved_special_token_110|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128119": { + "content": "<|reserved_special_token_111|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128120": { + "content": "<|reserved_special_token_112|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128121": { + "content": "<|reserved_special_token_113|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128122": { + "content": "<|reserved_special_token_114|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128123": { + "content": "<|reserved_special_token_115|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128124": { + "content": "<|reserved_special_token_116|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128125": { + "content": "<|reserved_special_token_117|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128126": { + "content": "<|reserved_special_token_118|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128127": { + "content": "<|reserved_special_token_119|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128128": { + "content": "<|reserved_special_token_120|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128129": { + "content": "<|reserved_special_token_121|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128130": { + "content": "<|reserved_special_token_122|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128131": { + "content": "<|reserved_special_token_123|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128132": { + "content": "<|reserved_special_token_124|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128133": { + "content": "<|reserved_special_token_125|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128134": { + "content": "<|reserved_special_token_126|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128135": { + "content": "<|reserved_special_token_127|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128136": { + "content": "<|reserved_special_token_128|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128137": { + "content": "<|reserved_special_token_129|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128138": { + "content": "<|reserved_special_token_130|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128139": { + "content": "<|reserved_special_token_131|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128140": { + "content": "<|reserved_special_token_132|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128141": { + "content": "<|reserved_special_token_133|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128142": { + "content": "<|reserved_special_token_134|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128143": { + "content": "<|reserved_special_token_135|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128144": { + "content": "<|reserved_special_token_136|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128145": { + "content": "<|reserved_special_token_137|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128146": { + "content": "<|reserved_special_token_138|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128147": { + "content": "<|reserved_special_token_139|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128148": { + "content": "<|reserved_special_token_140|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128149": { + "content": "<|reserved_special_token_141|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128150": { + "content": "<|reserved_special_token_142|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128151": { + "content": "<|reserved_special_token_143|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128152": { + "content": "<|reserved_special_token_144|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128153": { + "content": "<|reserved_special_token_145|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128154": { + "content": "<|reserved_special_token_146|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128155": { + "content": "<|reserved_special_token_147|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128156": { + "content": "<|reserved_special_token_148|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128157": { + "content": "<|reserved_special_token_149|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128158": { + "content": "<|reserved_special_token_150|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128159": { + "content": "<|reserved_special_token_151|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128160": { + "content": "<|reserved_special_token_152|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128161": { + "content": "<|reserved_special_token_153|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128162": { + "content": "<|reserved_special_token_154|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128163": { + "content": "<|reserved_special_token_155|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128164": { + "content": "<|reserved_special_token_156|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128165": { + "content": "<|reserved_special_token_157|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128166": { + "content": "<|reserved_special_token_158|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128167": { + "content": "<|reserved_special_token_159|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128168": { + "content": "<|reserved_special_token_160|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128169": { + "content": "<|reserved_special_token_161|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128170": { + "content": "<|reserved_special_token_162|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128171": { + "content": "<|reserved_special_token_163|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128172": { + "content": "<|reserved_special_token_164|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128173": { + "content": "<|reserved_special_token_165|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128174": { + "content": "<|reserved_special_token_166|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128175": { + "content": "<|reserved_special_token_167|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128176": { + "content": "<|reserved_special_token_168|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128177": { + "content": "<|reserved_special_token_169|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128178": { + "content": "<|reserved_special_token_170|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128179": { + "content": "<|reserved_special_token_171|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128180": { + "content": "<|reserved_special_token_172|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128181": { + "content": "<|reserved_special_token_173|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128182": { + "content": "<|reserved_special_token_174|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128183": { + "content": "<|reserved_special_token_175|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128184": { + "content": "<|reserved_special_token_176|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128185": { + "content": "<|reserved_special_token_177|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128186": { + "content": "<|reserved_special_token_178|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128187": { + "content": "<|reserved_special_token_179|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128188": { + "content": "<|reserved_special_token_180|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128189": { + "content": "<|reserved_special_token_181|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128190": { + "content": "<|reserved_special_token_182|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128191": { + "content": "<|reserved_special_token_183|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128192": { + "content": "<|reserved_special_token_184|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128193": { + "content": "<|reserved_special_token_185|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128194": { + "content": "<|reserved_special_token_186|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128195": { + "content": "<|reserved_special_token_187|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128196": { + "content": "<|reserved_special_token_188|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128197": { + "content": "<|reserved_special_token_189|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128198": { + "content": "<|reserved_special_token_190|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128199": { + "content": "<|reserved_special_token_191|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128200": { + "content": "<|reserved_special_token_192|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128201": { + "content": "<|reserved_special_token_193|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128202": { + "content": "<|reserved_special_token_194|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128203": { + "content": "<|reserved_special_token_195|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128204": { + "content": "<|reserved_special_token_196|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128205": { + "content": "<|reserved_special_token_197|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128206": { + "content": "<|reserved_special_token_198|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128207": { + "content": "<|reserved_special_token_199|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128208": { + "content": "<|reserved_special_token_200|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128209": { + "content": "<|reserved_special_token_201|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128210": { + "content": "<|reserved_special_token_202|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128211": { + "content": "<|reserved_special_token_203|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128212": { + "content": "<|reserved_special_token_204|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128213": { + "content": "<|reserved_special_token_205|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128214": { + "content": "<|reserved_special_token_206|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128215": { + "content": "<|reserved_special_token_207|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128216": { + "content": "<|reserved_special_token_208|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128217": { + "content": "<|reserved_special_token_209|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128218": { + "content": "<|reserved_special_token_210|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128219": { + "content": "<|reserved_special_token_211|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128220": { + "content": "<|reserved_special_token_212|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128221": { + "content": "<|reserved_special_token_213|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128222": { + "content": "<|reserved_special_token_214|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128223": { + "content": "<|reserved_special_token_215|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128224": { + "content": "<|reserved_special_token_216|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128225": { + "content": "<|reserved_special_token_217|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128226": { + "content": "<|reserved_special_token_218|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128227": { + "content": "<|reserved_special_token_219|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128228": { + "content": "<|reserved_special_token_220|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128229": { + "content": "<|reserved_special_token_221|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128230": { + "content": "<|reserved_special_token_222|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128231": { + "content": "<|reserved_special_token_223|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128232": { + "content": "<|reserved_special_token_224|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128233": { + "content": "<|reserved_special_token_225|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128234": { + "content": "<|reserved_special_token_226|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128235": { + "content": "<|reserved_special_token_227|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128236": { + "content": "<|reserved_special_token_228|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128237": { + "content": "<|reserved_special_token_229|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128238": { + "content": "<|reserved_special_token_230|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128239": { + "content": "<|reserved_special_token_231|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128240": { + "content": "<|reserved_special_token_232|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128241": { + "content": "<|reserved_special_token_233|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128242": { + "content": "<|reserved_special_token_234|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128243": { + "content": "<|reserved_special_token_235|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128244": { + "content": "<|reserved_special_token_236|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128245": { + "content": "<|reserved_special_token_237|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128246": { + "content": "<|reserved_special_token_238|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128247": { + "content": "<|reserved_special_token_239|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128248": { + "content": "<|reserved_special_token_240|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128249": { + "content": "<|reserved_special_token_241|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128250": { + "content": "<|reserved_special_token_242|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128251": { + "content": "<|reserved_special_token_243|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128252": { + "content": "<|reserved_special_token_244|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128253": { + "content": "<|reserved_special_token_245|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128254": { + "content": "<|reserved_special_token_246|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128255": { + "content": "<|reserved_special_token_247|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + } + }, + "additional_special_tokens": [ + "<|eom_id|>" + ], + "bos_token": "<|begin_of_text|>", + "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n", + "clean_up_tokenization_spaces": true, + "eos_token": "<|eot_id|>", + "model_input_names": [ + "input_ids", + "attention_mask" + ], + "model_max_length": 131072, + "pad_token": "<|eot_id|>", + "padding_side": "right", + "split_special_tokens": false, + "tokenizer_class": "PreTrainedTokenizerFast" +} diff --git a/checkpoint-250/trainer_state.json b/checkpoint-250/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..01e55f576afc397bc33254f0f172ccc8b1969ec4 --- /dev/null +++ b/checkpoint-250/trainer_state.json @@ -0,0 +1,1783 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 1.0183299389002036, + "eval_steps": 500, + "global_step": 250, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.004073319755600814, + "grad_norm": 14.739597738910254, + "learning_rate": 1.3513513513513515e-07, + "loss": 0.3301, + "step": 1 + }, + { + "epoch": 0.008146639511201629, + "grad_norm": 12.452379475537562, + "learning_rate": 2.702702702702703e-07, + "loss": 0.3036, + "step": 2 + }, + { + "epoch": 0.012219959266802444, + "grad_norm": 12.86836901323053, + "learning_rate": 4.0540540540540546e-07, + "loss": 0.3424, + "step": 3 + }, + { + "epoch": 0.016293279022403257, + "grad_norm": 13.43090696417895, + "learning_rate": 5.405405405405406e-07, + "loss": 0.3184, + "step": 4 + }, + { + "epoch": 0.020366598778004074, + "grad_norm": 14.809270827070751, + "learning_rate": 6.756756756756758e-07, + "loss": 0.3138, + "step": 5 + }, + { + "epoch": 0.024439918533604887, + "grad_norm": 12.082295065079254, + "learning_rate": 8.108108108108109e-07, + "loss": 0.2982, + "step": 6 + }, + { + "epoch": 0.028513238289205704, + "grad_norm": 8.711027825602685, + "learning_rate": 9.459459459459461e-07, + "loss": 0.3063, + "step": 7 + }, + { + "epoch": 0.032586558044806514, + "grad_norm": 7.652261107879573, + "learning_rate": 1.0810810810810812e-06, + "loss": 0.2749, + "step": 8 + }, + { + "epoch": 0.03665987780040733, + "grad_norm": 7.726905307446712, + "learning_rate": 1.2162162162162164e-06, + "loss": 0.2718, + "step": 9 + }, + { + "epoch": 0.04073319755600815, + "grad_norm": 3.4676017287243446, + "learning_rate": 1.3513513513513515e-06, + "loss": 0.2238, + "step": 10 + }, + { + "epoch": 0.04480651731160896, + "grad_norm": 4.29492237739491, + "learning_rate": 1.4864864864864868e-06, + "loss": 0.2513, + "step": 11 + }, + { + "epoch": 0.048879837067209775, + "grad_norm": 3.5844077505877707, + "learning_rate": 1.6216216216216219e-06, + "loss": 0.2327, + "step": 12 + }, + { + "epoch": 0.05295315682281059, + "grad_norm": 7.031113851112456, + "learning_rate": 1.756756756756757e-06, + "loss": 0.239, + "step": 13 + }, + { + "epoch": 0.05702647657841141, + "grad_norm": 7.210450292846676, + "learning_rate": 1.8918918918918922e-06, + "loss": 0.277, + "step": 14 + }, + { + "epoch": 0.06109979633401222, + "grad_norm": 6.526062178388152, + "learning_rate": 2.0270270270270273e-06, + "loss": 0.2495, + "step": 15 + }, + { + "epoch": 0.06517311608961303, + "grad_norm": 5.157530413977274, + "learning_rate": 2.1621621621621623e-06, + "loss": 0.2425, + "step": 16 + }, + { + "epoch": 0.06924643584521385, + "grad_norm": 3.242500698401516, + "learning_rate": 2.297297297297298e-06, + "loss": 0.1985, + "step": 17 + }, + { + "epoch": 0.07331975560081466, + "grad_norm": 2.554097363562634, + "learning_rate": 2.432432432432433e-06, + "loss": 0.1822, + "step": 18 + }, + { + "epoch": 0.07739307535641547, + "grad_norm": 2.6535341941648047, + "learning_rate": 2.5675675675675675e-06, + "loss": 0.2252, + "step": 19 + }, + { + "epoch": 0.0814663951120163, + "grad_norm": 2.5516965886789076, + "learning_rate": 2.702702702702703e-06, + "loss": 0.1862, + "step": 20 + }, + { + "epoch": 0.0855397148676171, + "grad_norm": 2.6017363031504264, + "learning_rate": 2.837837837837838e-06, + "loss": 0.2098, + "step": 21 + }, + { + "epoch": 0.08961303462321792, + "grad_norm": 2.0129035331912184, + "learning_rate": 2.9729729729729736e-06, + "loss": 0.1699, + "step": 22 + }, + { + "epoch": 0.09368635437881874, + "grad_norm": 1.849938821231628, + "learning_rate": 3.1081081081081082e-06, + "loss": 0.1868, + "step": 23 + }, + { + "epoch": 0.09775967413441955, + "grad_norm": 1.764863585345639, + "learning_rate": 3.2432432432432437e-06, + "loss": 0.1669, + "step": 24 + }, + { + "epoch": 0.10183299389002037, + "grad_norm": 1.8494325730642949, + "learning_rate": 3.3783783783783788e-06, + "loss": 0.1507, + "step": 25 + }, + { + "epoch": 0.10590631364562118, + "grad_norm": 1.6304914383856781, + "learning_rate": 3.513513513513514e-06, + "loss": 0.1534, + "step": 26 + }, + { + "epoch": 0.109979633401222, + "grad_norm": 1.4579913516119778, + "learning_rate": 3.648648648648649e-06, + "loss": 0.1387, + "step": 27 + }, + { + "epoch": 0.11405295315682282, + "grad_norm": 1.6082049119577289, + "learning_rate": 3.7837837837837844e-06, + "loss": 0.1406, + "step": 28 + }, + { + "epoch": 0.11812627291242363, + "grad_norm": 1.4819461102449234, + "learning_rate": 3.918918918918919e-06, + "loss": 0.1323, + "step": 29 + }, + { + "epoch": 0.12219959266802444, + "grad_norm": 1.7009165157092279, + "learning_rate": 4.0540540540540545e-06, + "loss": 0.1577, + "step": 30 + }, + { + "epoch": 0.12627291242362526, + "grad_norm": 1.5721048343370418, + "learning_rate": 4.189189189189189e-06, + "loss": 0.1345, + "step": 31 + }, + { + "epoch": 0.13034623217922606, + "grad_norm": 1.5868902144082508, + "learning_rate": 4.324324324324325e-06, + "loss": 0.1641, + "step": 32 + }, + { + "epoch": 0.13441955193482688, + "grad_norm": 1.367409491711825, + "learning_rate": 4.45945945945946e-06, + "loss": 0.1568, + "step": 33 + }, + { + "epoch": 0.1384928716904277, + "grad_norm": 1.2082341226617432, + "learning_rate": 4.594594594594596e-06, + "loss": 0.1158, + "step": 34 + }, + { + "epoch": 0.1425661914460285, + "grad_norm": 1.2834846670425744, + "learning_rate": 4.72972972972973e-06, + "loss": 0.1553, + "step": 35 + }, + { + "epoch": 0.14663951120162932, + "grad_norm": 1.4278045526468992, + "learning_rate": 4.864864864864866e-06, + "loss": 0.1472, + "step": 36 + }, + { + "epoch": 0.15071283095723015, + "grad_norm": 1.1433863309324082, + "learning_rate": 5e-06, + "loss": 0.1216, + "step": 37 + }, + { + "epoch": 0.15478615071283094, + "grad_norm": 1.2556861775151085, + "learning_rate": 5.135135135135135e-06, + "loss": 0.1383, + "step": 38 + }, + { + "epoch": 0.15885947046843177, + "grad_norm": 1.1940515610624718, + "learning_rate": 5.2702702702702705e-06, + "loss": 0.1488, + "step": 39 + }, + { + "epoch": 0.1629327902240326, + "grad_norm": 1.365361196469323, + "learning_rate": 5.405405405405406e-06, + "loss": 0.1658, + "step": 40 + }, + { + "epoch": 0.1670061099796334, + "grad_norm": 1.6375597676471598, + "learning_rate": 5.540540540540541e-06, + "loss": 0.1244, + "step": 41 + }, + { + "epoch": 0.1710794297352342, + "grad_norm": 1.058410205986207, + "learning_rate": 5.675675675675676e-06, + "loss": 0.1129, + "step": 42 + }, + { + "epoch": 0.17515274949083504, + "grad_norm": 1.468616447672182, + "learning_rate": 5.810810810810811e-06, + "loss": 0.176, + "step": 43 + }, + { + "epoch": 0.17922606924643583, + "grad_norm": 1.1292066998688302, + "learning_rate": 5.945945945945947e-06, + "loss": 0.1235, + "step": 44 + }, + { + "epoch": 0.18329938900203666, + "grad_norm": 1.1790440780653373, + "learning_rate": 6.081081081081082e-06, + "loss": 0.1352, + "step": 45 + }, + { + "epoch": 0.18737270875763748, + "grad_norm": 1.144770740193701, + "learning_rate": 6.2162162162162164e-06, + "loss": 0.1375, + "step": 46 + }, + { + "epoch": 0.19144602851323828, + "grad_norm": 1.3169675540020822, + "learning_rate": 6.351351351351351e-06, + "loss": 0.1451, + "step": 47 + }, + { + "epoch": 0.1955193482688391, + "grad_norm": 1.1364743430386761, + "learning_rate": 6.486486486486487e-06, + "loss": 0.1073, + "step": 48 + }, + { + "epoch": 0.19959266802443992, + "grad_norm": 1.3532964160734307, + "learning_rate": 6.621621621621622e-06, + "loss": 0.1502, + "step": 49 + }, + { + "epoch": 0.20366598778004075, + "grad_norm": 1.1049371458723167, + "learning_rate": 6.7567567567567575e-06, + "loss": 0.116, + "step": 50 + }, + { + "epoch": 0.20773930753564154, + "grad_norm": 1.0634720045604809, + "learning_rate": 6.891891891891892e-06, + "loss": 0.1438, + "step": 51 + }, + { + "epoch": 0.21181262729124237, + "grad_norm": 1.1677623232682453, + "learning_rate": 7.027027027027028e-06, + "loss": 0.1143, + "step": 52 + }, + { + "epoch": 0.2158859470468432, + "grad_norm": 1.2552603959178812, + "learning_rate": 7.162162162162163e-06, + "loss": 0.1443, + "step": 53 + }, + { + "epoch": 0.219959266802444, + "grad_norm": 1.1556616828254782, + "learning_rate": 7.297297297297298e-06, + "loss": 0.1341, + "step": 54 + }, + { + "epoch": 0.2240325865580448, + "grad_norm": 1.1241236805182522, + "learning_rate": 7.4324324324324324e-06, + "loss": 0.1283, + "step": 55 + }, + { + "epoch": 0.22810590631364563, + "grad_norm": 0.9867741809756463, + "learning_rate": 7.567567567567569e-06, + "loss": 0.1302, + "step": 56 + }, + { + "epoch": 0.23217922606924643, + "grad_norm": 1.0672327000495885, + "learning_rate": 7.702702702702704e-06, + "loss": 0.113, + "step": 57 + }, + { + "epoch": 0.23625254582484725, + "grad_norm": 1.0659735135074857, + "learning_rate": 7.837837837837838e-06, + "loss": 0.1293, + "step": 58 + }, + { + "epoch": 0.24032586558044808, + "grad_norm": 1.2422197356017706, + "learning_rate": 7.972972972972974e-06, + "loss": 0.164, + "step": 59 + }, + { + "epoch": 0.24439918533604887, + "grad_norm": 1.3538609671806645, + "learning_rate": 8.108108108108109e-06, + "loss": 0.1548, + "step": 60 + }, + { + "epoch": 0.2484725050916497, + "grad_norm": 1.0759558101958346, + "learning_rate": 8.243243243243245e-06, + "loss": 0.1225, + "step": 61 + }, + { + "epoch": 0.2525458248472505, + "grad_norm": 1.1244956381449198, + "learning_rate": 8.378378378378378e-06, + "loss": 0.1175, + "step": 62 + }, + { + "epoch": 0.25661914460285135, + "grad_norm": 1.171629685706723, + "learning_rate": 8.513513513513514e-06, + "loss": 0.1204, + "step": 63 + }, + { + "epoch": 0.2606924643584521, + "grad_norm": 1.2905585681894916, + "learning_rate": 8.64864864864865e-06, + "loss": 0.1253, + "step": 64 + }, + { + "epoch": 0.26476578411405294, + "grad_norm": 1.3979008428570314, + "learning_rate": 8.783783783783785e-06, + "loss": 0.191, + "step": 65 + }, + { + "epoch": 0.26883910386965376, + "grad_norm": 1.226756333773235, + "learning_rate": 8.91891891891892e-06, + "loss": 0.1287, + "step": 66 + }, + { + "epoch": 0.2729124236252546, + "grad_norm": 1.2835470528218054, + "learning_rate": 9.054054054054054e-06, + "loss": 0.138, + "step": 67 + }, + { + "epoch": 0.2769857433808554, + "grad_norm": 1.1622195270679896, + "learning_rate": 9.189189189189191e-06, + "loss": 0.1259, + "step": 68 + }, + { + "epoch": 0.28105906313645623, + "grad_norm": 1.1512666578576678, + "learning_rate": 9.324324324324325e-06, + "loss": 0.1292, + "step": 69 + }, + { + "epoch": 0.285132382892057, + "grad_norm": 0.9695391815507838, + "learning_rate": 9.45945945945946e-06, + "loss": 0.1142, + "step": 70 + }, + { + "epoch": 0.2892057026476578, + "grad_norm": 1.1262409828408337, + "learning_rate": 9.594594594594594e-06, + "loss": 0.1188, + "step": 71 + }, + { + "epoch": 0.29327902240325865, + "grad_norm": 0.9820966211674147, + "learning_rate": 9.729729729729732e-06, + "loss": 0.1052, + "step": 72 + }, + { + "epoch": 0.2973523421588595, + "grad_norm": 1.1058230077470572, + "learning_rate": 9.864864864864865e-06, + "loss": 0.1246, + "step": 73 + }, + { + "epoch": 0.3014256619144603, + "grad_norm": 1.3891942844370528, + "learning_rate": 1e-05, + "loss": 0.1651, + "step": 74 + }, + { + "epoch": 0.3054989816700611, + "grad_norm": 1.1373599847305171, + "learning_rate": 9.99994352762958e-06, + "loss": 0.1259, + "step": 75 + }, + { + "epoch": 0.3095723014256619, + "grad_norm": 1.0803757941511039, + "learning_rate": 9.999774111793974e-06, + "loss": 0.1485, + "step": 76 + }, + { + "epoch": 0.3136456211812627, + "grad_norm": 1.509987566205336, + "learning_rate": 9.999491756320105e-06, + "loss": 0.1708, + "step": 77 + }, + { + "epoch": 0.31771894093686354, + "grad_norm": 1.3769318827034491, + "learning_rate": 9.99909646758609e-06, + "loss": 0.1483, + "step": 78 + }, + { + "epoch": 0.32179226069246436, + "grad_norm": 0.9995516357476201, + "learning_rate": 9.99858825452108e-06, + "loss": 0.1124, + "step": 79 + }, + { + "epoch": 0.3258655804480652, + "grad_norm": 1.4328593788226842, + "learning_rate": 9.997967128605078e-06, + "loss": 0.1849, + "step": 80 + }, + { + "epoch": 0.329938900203666, + "grad_norm": 1.0397129864144867, + "learning_rate": 9.997233103868664e-06, + "loss": 0.1199, + "step": 81 + }, + { + "epoch": 0.3340122199592668, + "grad_norm": 1.3312975796955133, + "learning_rate": 9.996386196892683e-06, + "loss": 0.1748, + "step": 82 + }, + { + "epoch": 0.3380855397148676, + "grad_norm": 1.2070448028045222, + "learning_rate": 9.995426426807875e-06, + "loss": 0.1449, + "step": 83 + }, + { + "epoch": 0.3421588594704684, + "grad_norm": 0.9786604342473315, + "learning_rate": 9.994353815294438e-06, + "loss": 0.1349, + "step": 84 + }, + { + "epoch": 0.34623217922606925, + "grad_norm": 1.16279378070579, + "learning_rate": 9.993168386581533e-06, + "loss": 0.1111, + "step": 85 + }, + { + "epoch": 0.35030549898167007, + "grad_norm": 1.0832386326974766, + "learning_rate": 9.991870167446751e-06, + "loss": 0.1271, + "step": 86 + }, + { + "epoch": 0.3543788187372709, + "grad_norm": 1.076044536856832, + "learning_rate": 9.990459187215498e-06, + "loss": 0.122, + "step": 87 + }, + { + "epoch": 0.35845213849287166, + "grad_norm": 1.1390626595350608, + "learning_rate": 9.98893547776033e-06, + "loss": 0.1429, + "step": 88 + }, + { + "epoch": 0.3625254582484725, + "grad_norm": 1.2799324833393828, + "learning_rate": 9.987299073500245e-06, + "loss": 0.1789, + "step": 89 + }, + { + "epoch": 0.3665987780040733, + "grad_norm": 1.0088789278468007, + "learning_rate": 9.985550011399889e-06, + "loss": 0.1217, + "step": 90 + }, + { + "epoch": 0.37067209775967414, + "grad_norm": 1.0635380396962304, + "learning_rate": 9.98368833096874e-06, + "loss": 0.1517, + "step": 91 + }, + { + "epoch": 0.37474541751527496, + "grad_norm": 1.1149195586496816, + "learning_rate": 9.981714074260196e-06, + "loss": 0.1648, + "step": 92 + }, + { + "epoch": 0.3788187372708758, + "grad_norm": 0.9770064004740078, + "learning_rate": 9.979627285870644e-06, + "loss": 0.1173, + "step": 93 + }, + { + "epoch": 0.38289205702647655, + "grad_norm": 1.5786545324573935, + "learning_rate": 9.977428012938437e-06, + "loss": 0.2148, + "step": 94 + }, + { + "epoch": 0.3869653767820774, + "grad_norm": 0.9445672697637628, + "learning_rate": 9.975116305142836e-06, + "loss": 0.1272, + "step": 95 + }, + { + "epoch": 0.3910386965376782, + "grad_norm": 0.832092882135511, + "learning_rate": 9.97269221470289e-06, + "loss": 0.1149, + "step": 96 + }, + { + "epoch": 0.395112016293279, + "grad_norm": 0.8009975217381654, + "learning_rate": 9.97015579637625e-06, + "loss": 0.1081, + "step": 97 + }, + { + "epoch": 0.39918533604887985, + "grad_norm": 0.909000272396086, + "learning_rate": 9.967507107457942e-06, + "loss": 0.1249, + "step": 98 + }, + { + "epoch": 0.40325865580448067, + "grad_norm": 0.9894702747295367, + "learning_rate": 9.96474620777906e-06, + "loss": 0.1404, + "step": 99 + }, + { + "epoch": 0.4073319755600815, + "grad_norm": 1.1517905886733883, + "learning_rate": 9.961873159705426e-06, + "loss": 0.1433, + "step": 100 + }, + { + "epoch": 0.41140529531568226, + "grad_norm": 1.2806427058824508, + "learning_rate": 9.95888802813617e-06, + "loss": 0.1723, + "step": 101 + }, + { + "epoch": 0.4154786150712831, + "grad_norm": 0.919332585767889, + "learning_rate": 9.955790880502278e-06, + "loss": 0.1219, + "step": 102 + }, + { + "epoch": 0.4195519348268839, + "grad_norm": 0.8901964293186232, + "learning_rate": 9.952581786765057e-06, + "loss": 0.1157, + "step": 103 + }, + { + "epoch": 0.42362525458248473, + "grad_norm": 1.3877972822654616, + "learning_rate": 9.949260819414557e-06, + "loss": 0.1642, + "step": 104 + }, + { + "epoch": 0.42769857433808556, + "grad_norm": 0.9602184939318458, + "learning_rate": 9.945828053467939e-06, + "loss": 0.1224, + "step": 105 + }, + { + "epoch": 0.4317718940936864, + "grad_norm": 1.230791876608231, + "learning_rate": 9.942283566467773e-06, + "loss": 0.1596, + "step": 106 + }, + { + "epoch": 0.43584521384928715, + "grad_norm": 1.1454248942159495, + "learning_rate": 9.938627438480295e-06, + "loss": 0.1541, + "step": 107 + }, + { + "epoch": 0.439918533604888, + "grad_norm": 1.0873300186194603, + "learning_rate": 9.93485975209359e-06, + "loss": 0.1533, + "step": 108 + }, + { + "epoch": 0.4439918533604888, + "grad_norm": 0.9668569607934798, + "learning_rate": 9.930980592415728e-06, + "loss": 0.1539, + "step": 109 + }, + { + "epoch": 0.4480651731160896, + "grad_norm": 1.487429443095859, + "learning_rate": 9.926990047072849e-06, + "loss": 0.2379, + "step": 110 + }, + { + "epoch": 0.45213849287169044, + "grad_norm": 1.036501582869458, + "learning_rate": 9.922888206207174e-06, + "loss": 0.1181, + "step": 111 + }, + { + "epoch": 0.45621181262729127, + "grad_norm": 0.9427386345315173, + "learning_rate": 9.918675162474974e-06, + "loss": 0.1157, + "step": 112 + }, + { + "epoch": 0.46028513238289204, + "grad_norm": 1.1671785006625848, + "learning_rate": 9.914351011044472e-06, + "loss": 0.1671, + "step": 113 + }, + { + "epoch": 0.46435845213849286, + "grad_norm": 0.8485104800209154, + "learning_rate": 9.909915849593705e-06, + "loss": 0.1094, + "step": 114 + }, + { + "epoch": 0.4684317718940937, + "grad_norm": 0.895507646361391, + "learning_rate": 9.905369778308304e-06, + "loss": 0.1205, + "step": 115 + }, + { + "epoch": 0.4725050916496945, + "grad_norm": 1.1024237478073182, + "learning_rate": 9.900712899879237e-06, + "loss": 0.1551, + "step": 116 + }, + { + "epoch": 0.47657841140529533, + "grad_norm": 1.0811464118865846, + "learning_rate": 9.895945319500488e-06, + "loss": 0.1402, + "step": 117 + }, + { + "epoch": 0.48065173116089616, + "grad_norm": 0.9829410685047446, + "learning_rate": 9.891067144866687e-06, + "loss": 0.1381, + "step": 118 + }, + { + "epoch": 0.4847250509164969, + "grad_norm": 0.8855824729064482, + "learning_rate": 9.886078486170665e-06, + "loss": 0.1038, + "step": 119 + }, + { + "epoch": 0.48879837067209775, + "grad_norm": 1.1091690462920576, + "learning_rate": 9.880979456100974e-06, + "loss": 0.1372, + "step": 120 + }, + { + "epoch": 0.49287169042769857, + "grad_norm": 0.907049897730717, + "learning_rate": 9.875770169839343e-06, + "loss": 0.1322, + "step": 121 + }, + { + "epoch": 0.4969450101832994, + "grad_norm": 1.0224824312976686, + "learning_rate": 9.870450745058066e-06, + "loss": 0.1257, + "step": 122 + }, + { + "epoch": 0.5010183299389002, + "grad_norm": 1.0439109698157967, + "learning_rate": 9.865021301917358e-06, + "loss": 0.1317, + "step": 123 + }, + { + "epoch": 0.505091649694501, + "grad_norm": 0.8972366065592501, + "learning_rate": 9.859481963062623e-06, + "loss": 0.1104, + "step": 124 + }, + { + "epoch": 0.5091649694501018, + "grad_norm": 0.916952485621608, + "learning_rate": 9.853832853621703e-06, + "loss": 0.124, + "step": 125 + }, + { + "epoch": 0.5132382892057027, + "grad_norm": 0.7586835858660547, + "learning_rate": 9.848074101202037e-06, + "loss": 0.1191, + "step": 126 + }, + { + "epoch": 0.5173116089613035, + "grad_norm": 0.9149593226270635, + "learning_rate": 9.842205835887785e-06, + "loss": 0.1188, + "step": 127 + }, + { + "epoch": 0.5213849287169042, + "grad_norm": 0.9483144871900878, + "learning_rate": 9.836228190236892e-06, + "loss": 0.1392, + "step": 128 + }, + { + "epoch": 0.5254582484725051, + "grad_norm": 1.1137009286811568, + "learning_rate": 9.83014129927808e-06, + "loss": 0.1331, + "step": 129 + }, + { + "epoch": 0.5295315682281059, + "grad_norm": 1.0049886812823983, + "learning_rate": 9.823945300507815e-06, + "loss": 0.1393, + "step": 130 + }, + { + "epoch": 0.5336048879837068, + "grad_norm": 1.0017821694016227, + "learning_rate": 9.817640333887194e-06, + "loss": 0.1376, + "step": 131 + }, + { + "epoch": 0.5376782077393075, + "grad_norm": 0.8770993451067021, + "learning_rate": 9.81122654183878e-06, + "loss": 0.1075, + "step": 132 + }, + { + "epoch": 0.5417515274949084, + "grad_norm": 0.8112662923925413, + "learning_rate": 9.804704069243389e-06, + "loss": 0.1149, + "step": 133 + }, + { + "epoch": 0.5458248472505092, + "grad_norm": 0.7783508225595258, + "learning_rate": 9.798073063436815e-06, + "loss": 0.1077, + "step": 134 + }, + { + "epoch": 0.5498981670061099, + "grad_norm": 1.6671316247114485, + "learning_rate": 9.791333674206507e-06, + "loss": 0.1892, + "step": 135 + }, + { + "epoch": 0.5539714867617108, + "grad_norm": 0.8856245620297392, + "learning_rate": 9.784486053788179e-06, + "loss": 0.1075, + "step": 136 + }, + { + "epoch": 0.5580448065173116, + "grad_norm": 2.0578900491298824, + "learning_rate": 9.77753035686237e-06, + "loss": 0.1472, + "step": 137 + }, + { + "epoch": 0.5621181262729125, + "grad_norm": 1.148525636808097, + "learning_rate": 9.770466740550963e-06, + "loss": 0.1598, + "step": 138 + }, + { + "epoch": 0.5661914460285132, + "grad_norm": 0.8665254831769179, + "learning_rate": 9.763295364413616e-06, + "loss": 0.1186, + "step": 139 + }, + { + "epoch": 0.570264765784114, + "grad_norm": 1.0970826186220186, + "learning_rate": 9.756016390444174e-06, + "loss": 0.1386, + "step": 140 + }, + { + "epoch": 0.5743380855397149, + "grad_norm": 0.9530034310899396, + "learning_rate": 9.748629983067004e-06, + "loss": 0.1282, + "step": 141 + }, + { + "epoch": 0.5784114052953157, + "grad_norm": 1.2706893271757027, + "learning_rate": 9.741136309133279e-06, + "loss": 0.1754, + "step": 142 + }, + { + "epoch": 0.5824847250509165, + "grad_norm": 0.9703463762849697, + "learning_rate": 9.733535537917211e-06, + "loss": 0.1194, + "step": 143 + }, + { + "epoch": 0.5865580448065173, + "grad_norm": 0.8038414888371753, + "learning_rate": 9.725827841112226e-06, + "loss": 0.1162, + "step": 144 + }, + { + "epoch": 0.5906313645621182, + "grad_norm": 0.9411283645508486, + "learning_rate": 9.718013392827087e-06, + "loss": 0.1121, + "step": 145 + }, + { + "epoch": 0.594704684317719, + "grad_norm": 1.501666156048829, + "learning_rate": 9.710092369581966e-06, + "loss": 0.16, + "step": 146 + }, + { + "epoch": 0.5987780040733197, + "grad_norm": 0.9141719119872903, + "learning_rate": 9.702064950304442e-06, + "loss": 0.1211, + "step": 147 + }, + { + "epoch": 0.6028513238289206, + "grad_norm": 0.8652675727574004, + "learning_rate": 9.693931316325473e-06, + "loss": 0.0946, + "step": 148 + }, + { + "epoch": 0.6069246435845214, + "grad_norm": 0.7377787499846402, + "learning_rate": 9.685691651375297e-06, + "loss": 0.1016, + "step": 149 + }, + { + "epoch": 0.6109979633401222, + "grad_norm": 0.7630312206018969, + "learning_rate": 9.677346141579277e-06, + "loss": 0.1014, + "step": 150 + }, + { + "epoch": 0.615071283095723, + "grad_norm": 0.9718289359974593, + "learning_rate": 9.668894975453705e-06, + "loss": 0.1562, + "step": 151 + }, + { + "epoch": 0.6191446028513238, + "grad_norm": 1.004301729468449, + "learning_rate": 9.66033834390153e-06, + "loss": 0.1372, + "step": 152 + }, + { + "epoch": 0.6232179226069247, + "grad_norm": 0.9350824611493259, + "learning_rate": 9.65167644020806e-06, + "loss": 0.1254, + "step": 153 + }, + { + "epoch": 0.6272912423625254, + "grad_norm": 0.7612329276402703, + "learning_rate": 9.64290946003659e-06, + "loss": 0.0989, + "step": 154 + }, + { + "epoch": 0.6313645621181263, + "grad_norm": 0.7706614538086551, + "learning_rate": 9.63403760142398e-06, + "loss": 0.1013, + "step": 155 + }, + { + "epoch": 0.6354378818737271, + "grad_norm": 1.0210499034582712, + "learning_rate": 9.625061064776183e-06, + "loss": 0.1134, + "step": 156 + }, + { + "epoch": 0.639511201629328, + "grad_norm": 0.7560805642981956, + "learning_rate": 9.61598005286372e-06, + "loss": 0.0939, + "step": 157 + }, + { + "epoch": 0.6435845213849287, + "grad_norm": 1.0834289937869723, + "learning_rate": 9.606794770817102e-06, + "loss": 0.1785, + "step": 158 + }, + { + "epoch": 0.6476578411405295, + "grad_norm": 1.0611196002268826, + "learning_rate": 9.597505426122184e-06, + "loss": 0.1571, + "step": 159 + }, + { + "epoch": 0.6517311608961304, + "grad_norm": 1.0914261737532949, + "learning_rate": 9.588112228615495e-06, + "loss": 0.1745, + "step": 160 + }, + { + "epoch": 0.6558044806517311, + "grad_norm": 0.953948451978483, + "learning_rate": 9.57861539047949e-06, + "loss": 0.1353, + "step": 161 + }, + { + "epoch": 0.659877800407332, + "grad_norm": 1.2562247665468482, + "learning_rate": 9.569015126237744e-06, + "loss": 0.1521, + "step": 162 + }, + { + "epoch": 0.6639511201629328, + "grad_norm": 0.8283783602425362, + "learning_rate": 9.559311652750135e-06, + "loss": 0.1161, + "step": 163 + }, + { + "epoch": 0.6680244399185336, + "grad_norm": 0.7823509791751794, + "learning_rate": 9.549505189207924e-06, + "loss": 0.0976, + "step": 164 + }, + { + "epoch": 0.6720977596741344, + "grad_norm": 1.118258806444578, + "learning_rate": 9.539595957128803e-06, + "loss": 0.171, + "step": 165 + }, + { + "epoch": 0.6761710794297352, + "grad_norm": 0.7563799438807557, + "learning_rate": 9.529584180351902e-06, + "loss": 0.1159, + "step": 166 + }, + { + "epoch": 0.6802443991853361, + "grad_norm": 1.0059732424782886, + "learning_rate": 9.519470085032733e-06, + "loss": 0.1278, + "step": 167 + }, + { + "epoch": 0.6843177189409368, + "grad_norm": 0.8261325503708756, + "learning_rate": 9.509253899638066e-06, + "loss": 0.104, + "step": 168 + }, + { + "epoch": 0.6883910386965377, + "grad_norm": 1.1918252125330613, + "learning_rate": 9.498935854940785e-06, + "loss": 0.1682, + "step": 169 + }, + { + "epoch": 0.6924643584521385, + "grad_norm": 0.7216709177105455, + "learning_rate": 9.488516184014667e-06, + "loss": 0.1089, + "step": 170 + }, + { + "epoch": 0.6965376782077393, + "grad_norm": 0.8952054280934858, + "learning_rate": 9.477995122229117e-06, + "loss": 0.1521, + "step": 171 + }, + { + "epoch": 0.7006109979633401, + "grad_norm": 0.6538828419017942, + "learning_rate": 9.467372907243858e-06, + "loss": 0.1012, + "step": 172 + }, + { + "epoch": 0.7046843177189409, + "grad_norm": 0.840723056036209, + "learning_rate": 9.456649779003548e-06, + "loss": 0.117, + "step": 173 + }, + { + "epoch": 0.7087576374745418, + "grad_norm": 0.7652580794490056, + "learning_rate": 9.44582597973238e-06, + "loss": 0.1284, + "step": 174 + }, + { + "epoch": 0.7128309572301426, + "grad_norm": 0.9696904154678632, + "learning_rate": 9.434901753928593e-06, + "loss": 0.1429, + "step": 175 + }, + { + "epoch": 0.7169042769857433, + "grad_norm": 0.7509027450046076, + "learning_rate": 9.423877348358956e-06, + "loss": 0.1006, + "step": 176 + }, + { + "epoch": 0.7209775967413442, + "grad_norm": 0.6942112976471692, + "learning_rate": 9.4127530120532e-06, + "loss": 0.1042, + "step": 177 + }, + { + "epoch": 0.725050916496945, + "grad_norm": 1.4641902043350905, + "learning_rate": 9.401528996298375e-06, + "loss": 0.1676, + "step": 178 + }, + { + "epoch": 0.7291242362525459, + "grad_norm": 0.7418396518869238, + "learning_rate": 9.390205554633193e-06, + "loss": 0.1082, + "step": 179 + }, + { + "epoch": 0.7331975560081466, + "grad_norm": 1.2074617530849705, + "learning_rate": 9.378782942842292e-06, + "loss": 0.1401, + "step": 180 + }, + { + "epoch": 0.7372708757637475, + "grad_norm": 1.2938802390610347, + "learning_rate": 9.367261418950459e-06, + "loss": 0.1855, + "step": 181 + }, + { + "epoch": 0.7413441955193483, + "grad_norm": 1.225757248706894, + "learning_rate": 9.355641243216798e-06, + "loss": 0.1729, + "step": 182 + }, + { + "epoch": 0.745417515274949, + "grad_norm": 1.1483380054973364, + "learning_rate": 9.343922678128854e-06, + "loss": 0.1078, + "step": 183 + }, + { + "epoch": 0.7494908350305499, + "grad_norm": 0.8222440765781929, + "learning_rate": 9.332105988396692e-06, + "loss": 0.1239, + "step": 184 + }, + { + "epoch": 0.7535641547861507, + "grad_norm": 0.9655962832595171, + "learning_rate": 9.3201914409469e-06, + "loss": 0.1309, + "step": 185 + }, + { + "epoch": 0.7576374745417516, + "grad_norm": 0.8060791719318856, + "learning_rate": 9.308179304916573e-06, + "loss": 0.1159, + "step": 186 + }, + { + "epoch": 0.7617107942973523, + "grad_norm": 0.7357782726661909, + "learning_rate": 9.29606985164723e-06, + "loss": 0.1052, + "step": 187 + }, + { + "epoch": 0.7657841140529531, + "grad_norm": 0.9536045205176826, + "learning_rate": 9.283863354678683e-06, + "loss": 0.1351, + "step": 188 + }, + { + "epoch": 0.769857433808554, + "grad_norm": 0.8771938059672718, + "learning_rate": 9.27156008974286e-06, + "loss": 0.1304, + "step": 189 + }, + { + "epoch": 0.7739307535641547, + "grad_norm": 0.7232888469506753, + "learning_rate": 9.259160334757575e-06, + "loss": 0.1054, + "step": 190 + }, + { + "epoch": 0.7780040733197556, + "grad_norm": 0.8295211262810136, + "learning_rate": 9.246664369820249e-06, + "loss": 0.1323, + "step": 191 + }, + { + "epoch": 0.7820773930753564, + "grad_norm": 1.546126242212441, + "learning_rate": 9.234072477201588e-06, + "loss": 0.2385, + "step": 192 + }, + { + "epoch": 0.7861507128309573, + "grad_norm": 1.3189210288828541, + "learning_rate": 9.2213849413392e-06, + "loss": 0.1312, + "step": 193 + }, + { + "epoch": 0.790224032586558, + "grad_norm": 0.6640416710388396, + "learning_rate": 9.208602048831176e-06, + "loss": 0.1032, + "step": 194 + }, + { + "epoch": 0.7942973523421588, + "grad_norm": 0.7975892776697048, + "learning_rate": 9.195724088429611e-06, + "loss": 0.1089, + "step": 195 + }, + { + "epoch": 0.7983706720977597, + "grad_norm": 0.706905690575772, + "learning_rate": 9.18275135103409e-06, + "loss": 0.1166, + "step": 196 + }, + { + "epoch": 0.8024439918533605, + "grad_norm": 0.8769448196441653, + "learning_rate": 9.169684129685099e-06, + "loss": 0.1317, + "step": 197 + }, + { + "epoch": 0.8065173116089613, + "grad_norm": 1.3681899543939136, + "learning_rate": 9.156522719557428e-06, + "loss": 0.1892, + "step": 198 + }, + { + "epoch": 0.8105906313645621, + "grad_norm": 1.0165895452906009, + "learning_rate": 9.143267417953486e-06, + "loss": 0.1526, + "step": 199 + }, + { + "epoch": 0.814663951120163, + "grad_norm": 0.9252869599364745, + "learning_rate": 9.129918524296596e-06, + "loss": 0.1791, + "step": 200 + }, + { + "epoch": 0.8187372708757638, + "grad_norm": 0.7566289195807724, + "learning_rate": 9.11647634012422e-06, + "loss": 0.1018, + "step": 201 + }, + { + "epoch": 0.8228105906313645, + "grad_norm": 0.7097020344942068, + "learning_rate": 9.102941169081167e-06, + "loss": 0.1174, + "step": 202 + }, + { + "epoch": 0.8268839103869654, + "grad_norm": 0.8335131746923946, + "learning_rate": 9.089313316912708e-06, + "loss": 0.14, + "step": 203 + }, + { + "epoch": 0.8309572301425662, + "grad_norm": 0.7934600650652943, + "learning_rate": 9.075593091457692e-06, + "loss": 0.1208, + "step": 204 + }, + { + "epoch": 0.835030549898167, + "grad_norm": 0.7614374059129773, + "learning_rate": 9.061780802641582e-06, + "loss": 0.1166, + "step": 205 + }, + { + "epoch": 0.8391038696537678, + "grad_norm": 0.7158974362347166, + "learning_rate": 9.047876762469451e-06, + "loss": 0.1046, + "step": 206 + }, + { + "epoch": 0.8431771894093686, + "grad_norm": 0.676023527010282, + "learning_rate": 9.033881285018945e-06, + "loss": 0.1049, + "step": 207 + }, + { + "epoch": 0.8472505091649695, + "grad_norm": 1.0542817712970116, + "learning_rate": 9.019794686433174e-06, + "loss": 0.1605, + "step": 208 + }, + { + "epoch": 0.8513238289205702, + "grad_norm": 0.791238316768574, + "learning_rate": 9.005617284913586e-06, + "loss": 0.1008, + "step": 209 + }, + { + "epoch": 0.8553971486761711, + "grad_norm": 1.3679274286147247, + "learning_rate": 8.991349400712772e-06, + "loss": 0.1174, + "step": 210 + }, + { + "epoch": 0.8594704684317719, + "grad_norm": 0.8904165376343479, + "learning_rate": 8.976991356127225e-06, + "loss": 0.1252, + "step": 211 + }, + { + "epoch": 0.8635437881873728, + "grad_norm": 0.6365058101639782, + "learning_rate": 8.962543475490068e-06, + "loss": 0.1054, + "step": 212 + }, + { + "epoch": 0.8676171079429735, + "grad_norm": 0.6899915324730952, + "learning_rate": 8.948006085163735e-06, + "loss": 0.1059, + "step": 213 + }, + { + "epoch": 0.8716904276985743, + "grad_norm": 0.7033665303348221, + "learning_rate": 8.933379513532575e-06, + "loss": 0.1055, + "step": 214 + }, + { + "epoch": 0.8757637474541752, + "grad_norm": 0.7051229848942461, + "learning_rate": 8.91866409099546e-06, + "loss": 0.1047, + "step": 215 + }, + { + "epoch": 0.879837067209776, + "grad_norm": 0.7365152922519815, + "learning_rate": 8.903860149958308e-06, + "loss": 0.1028, + "step": 216 + }, + { + "epoch": 0.8839103869653768, + "grad_norm": 0.8798834115379963, + "learning_rate": 8.888968024826575e-06, + "loss": 0.131, + "step": 217 + }, + { + "epoch": 0.8879837067209776, + "grad_norm": 0.8127281754244611, + "learning_rate": 8.873988051997702e-06, + "loss": 0.1014, + "step": 218 + }, + { + "epoch": 0.8920570264765784, + "grad_norm": 0.841292566312256, + "learning_rate": 8.85892056985352e-06, + "loss": 0.1335, + "step": 219 + }, + { + "epoch": 0.8961303462321792, + "grad_norm": 1.3435689868107352, + "learning_rate": 8.8437659187526e-06, + "loss": 0.2286, + "step": 220 + }, + { + "epoch": 0.90020366598778, + "grad_norm": 1.8444300521677208, + "learning_rate": 8.828524441022575e-06, + "loss": 0.1827, + "step": 221 + }, + { + "epoch": 0.9042769857433809, + "grad_norm": 0.7545922474592645, + "learning_rate": 8.813196480952393e-06, + "loss": 0.1027, + "step": 222 + }, + { + "epoch": 0.9083503054989817, + "grad_norm": 0.75537983489465, + "learning_rate": 8.797782384784549e-06, + "loss": 0.1198, + "step": 223 + }, + { + "epoch": 0.9124236252545825, + "grad_norm": 0.8104999041705286, + "learning_rate": 8.782282500707262e-06, + "loss": 0.1029, + "step": 224 + }, + { + "epoch": 0.9164969450101833, + "grad_norm": 0.8405282400775482, + "learning_rate": 8.766697178846611e-06, + "loss": 0.1241, + "step": 225 + }, + { + "epoch": 0.9205702647657841, + "grad_norm": 1.013551552697806, + "learning_rate": 8.751026771258622e-06, + "loss": 0.1343, + "step": 226 + }, + { + "epoch": 0.924643584521385, + "grad_norm": 0.6728989996123187, + "learning_rate": 8.735271631921322e-06, + "loss": 0.1058, + "step": 227 + }, + { + "epoch": 0.9287169042769857, + "grad_norm": 0.8690442261224494, + "learning_rate": 8.719432116726738e-06, + "loss": 0.1332, + "step": 228 + }, + { + "epoch": 0.9327902240325866, + "grad_norm": 0.9449187305589617, + "learning_rate": 8.703508583472855e-06, + "loss": 0.1451, + "step": 229 + }, + { + "epoch": 0.9368635437881874, + "grad_norm": 0.8067318337898685, + "learning_rate": 8.68750139185554e-06, + "loss": 0.1248, + "step": 230 + }, + { + "epoch": 0.9409368635437881, + "grad_norm": 0.7905017587261095, + "learning_rate": 8.671410903460416e-06, + "loss": 0.119, + "step": 231 + }, + { + "epoch": 0.945010183299389, + "grad_norm": 1.1238154965476772, + "learning_rate": 8.65523748175469e-06, + "loss": 0.1559, + "step": 232 + }, + { + "epoch": 0.9490835030549898, + "grad_norm": 1.1027211644152675, + "learning_rate": 8.63898149207895e-06, + "loss": 0.1693, + "step": 233 + }, + { + "epoch": 0.9531568228105907, + "grad_norm": 0.9411765578825619, + "learning_rate": 8.622643301638902e-06, + "loss": 0.1346, + "step": 234 + }, + { + "epoch": 0.9572301425661914, + "grad_norm": 0.6884466751221227, + "learning_rate": 8.606223279497081e-06, + "loss": 0.0968, + "step": 235 + }, + { + "epoch": 0.9613034623217923, + "grad_norm": 0.7219918781543078, + "learning_rate": 8.589721796564521e-06, + "loss": 0.0966, + "step": 236 + }, + { + "epoch": 0.9653767820773931, + "grad_norm": 0.7967809896092082, + "learning_rate": 8.57313922559236e-06, + "loss": 0.1201, + "step": 237 + }, + { + "epoch": 0.9694501018329938, + "grad_norm": 0.8113807921190012, + "learning_rate": 8.556475941163436e-06, + "loss": 0.1097, + "step": 238 + }, + { + "epoch": 0.9735234215885947, + "grad_norm": 1.0943551126152973, + "learning_rate": 8.539732319683817e-06, + "loss": 0.1552, + "step": 239 + }, + { + "epoch": 0.9775967413441955, + "grad_norm": 0.7854046329247982, + "learning_rate": 8.5229087393743e-06, + "loss": 0.1138, + "step": 240 + }, + { + "epoch": 0.9816700610997964, + "grad_norm": 1.1720562073286809, + "learning_rate": 8.506005580261872e-06, + "loss": 0.1525, + "step": 241 + }, + { + "epoch": 0.9857433808553971, + "grad_norm": 0.718895289386658, + "learning_rate": 8.489023224171114e-06, + "loss": 0.1082, + "step": 242 + }, + { + "epoch": 0.9898167006109979, + "grad_norm": 0.613834884154541, + "learning_rate": 8.47196205471559e-06, + "loss": 0.0877, + "step": 243 + }, + { + "epoch": 0.9938900203665988, + "grad_norm": 0.9789990123927295, + "learning_rate": 8.45482245728917e-06, + "loss": 0.1675, + "step": 244 + }, + { + "epoch": 0.9979633401221996, + "grad_norm": 1.5580291175140415, + "learning_rate": 8.437604819057336e-06, + "loss": 0.15, + "step": 245 + }, + { + "epoch": 1.0020366598778003, + "grad_norm": 0.7685763736473359, + "learning_rate": 8.420309528948422e-06, + "loss": 0.1072, + "step": 246 + }, + { + "epoch": 1.0061099796334012, + "grad_norm": 0.6434124354999965, + "learning_rate": 8.40293697764484e-06, + "loss": 0.0844, + "step": 247 + }, + { + "epoch": 1.010183299389002, + "grad_norm": 0.5841852692369695, + "learning_rate": 8.385487557574253e-06, + "loss": 0.0859, + "step": 248 + }, + { + "epoch": 1.0142566191446027, + "grad_norm": 0.6061435282600086, + "learning_rate": 8.367961662900704e-06, + "loss": 0.0809, + "step": 249 + }, + { + "epoch": 1.0183299389002036, + "grad_norm": 0.8866327026089017, + "learning_rate": 8.35035968951572e-06, + "loss": 0.0996, + "step": 250 + } + ], + "logging_steps": 1, + "max_steps": 735, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 250, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 16335919841280.0, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-250/training_args.bin b/checkpoint-250/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..d80d80f91915a307660dad4b2c4bb70a1f1629ab --- /dev/null +++ b/checkpoint-250/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:266201b4dbff74ad87f1a11f3b724a4866069747c79f60058f5aae5f6e7c094d +size 7416 diff --git a/checkpoint-250/zero_to_fp32.py b/checkpoint-250/zero_to_fp32.py new file mode 100644 index 0000000000000000000000000000000000000000..e69ecd9acb5a235ffbf927091051106d902b3d39 --- /dev/null +++ b/checkpoint-250/zero_to_fp32.py @@ -0,0 +1,674 @@ +#!/usr/bin/env python + +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets +# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in +# the future. Once extracted, the weights don't require DeepSpeed and can be used in any +# application. +# +# example: +# python zero_to_fp32.py . output_dir/ +# or +# python zero_to_fp32.py . output_dir/ --safe_serialization + +import argparse +import torch +import glob +import math +import os +import re +import json +from tqdm import tqdm +from collections import OrderedDict +from dataclasses import dataclass + +# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with +# DeepSpeed data structures it has to be available in the current python environment. +from deepspeed.utils import logger +from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS, + FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES, + FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS) + + +@dataclass +class zero_model_state: + buffers: dict() + param_shapes: dict() + shared_params: list + ds_version: int + frozen_param_shapes: dict() + frozen_param_fragments: dict() + + +debug = 0 + +# load to cpu +device = torch.device('cpu') + + +def atoi(text): + return int(text) if text.isdigit() else text + + +def natural_keys(text): + ''' + alist.sort(key=natural_keys) sorts in human order + http://nedbatchelder.com/blog/200712/human_sorting.html + (See Toothy's implementation in the comments) + ''' + return [atoi(c) for c in re.split(r'(\d+)', text)] + + +def get_model_state_file(checkpoint_dir, zero_stage): + if not os.path.isdir(checkpoint_dir): + raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist") + + # there should be only one file + if zero_stage <= 2: + file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt") + elif zero_stage == 3: + file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt") + + if not os.path.exists(file): + raise FileNotFoundError(f"can't find model states file at '{file}'") + + return file + + +def get_checkpoint_files(checkpoint_dir, glob_pattern): + # XXX: need to test that this simple glob rule works for multi-node setup too + ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys) + + if len(ckpt_files) == 0: + raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'") + + return ckpt_files + + +def get_optim_files(checkpoint_dir): + return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt") + + +def get_model_state_files(checkpoint_dir): + return get_checkpoint_files(checkpoint_dir, "*_model_states.pt") + + +def parse_model_states(files): + zero_model_states = [] + for file in files: + state_dict = torch.load(file, map_location=device) + + if BUFFER_NAMES not in state_dict: + raise ValueError(f"{file} is not a model state checkpoint") + buffer_names = state_dict[BUFFER_NAMES] + if debug: + print("Found buffers:", buffer_names) + + # recover just the buffers while restoring them to fp32 if they were saved in fp16 + buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names} + param_shapes = state_dict[PARAM_SHAPES] + + # collect parameters that are included in param_shapes + param_names = [] + for s in param_shapes: + for name in s.keys(): + param_names.append(name) + + # update with frozen parameters + frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None) + if frozen_param_shapes is not None: + if debug: + print(f"Found frozen_param_shapes: {frozen_param_shapes}") + param_names += list(frozen_param_shapes.keys()) + + # handle shared params + shared_params = [[k, v] for k, v in state_dict["shared_params"].items()] + + ds_version = state_dict.get(DS_VERSION, None) + + frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None) + + z_model_state = zero_model_state(buffers=buffers, + param_shapes=param_shapes, + shared_params=shared_params, + ds_version=ds_version, + frozen_param_shapes=frozen_param_shapes, + frozen_param_fragments=frozen_param_fragments) + zero_model_states.append(z_model_state) + + return zero_model_states + + +def parse_optim_states(files, ds_checkpoint_dir): + total_files = len(files) + state_dicts = [] + for f in files: + state_dict = torch.load(f, map_location=device) + # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights + # and also handle the case where it was already removed by another helper script + state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None) + state_dicts.append(state_dict) + + if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]: + raise ValueError(f"{files[0]} is not a zero checkpoint") + zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE] + world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT] + + # For ZeRO-2 each param group can have different partition_count as data parallelism for expert + # parameters can be different from data parallelism for non-expert parameters. So we can just + # use the max of the partition_count to get the dp world_size. + + if type(world_size) is list: + world_size = max(world_size) + + if world_size != total_files: + raise ValueError( + f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. " + "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes." + ) + + # the groups are named differently in each stage + if zero_stage <= 2: + fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS + elif zero_stage == 3: + fp32_groups_key = FP32_FLAT_GROUPS + else: + raise ValueError(f"unknown zero stage {zero_stage}") + + if zero_stage <= 2: + fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))] + elif zero_stage == 3: + # if there is more than one param group, there will be multiple flattened tensors - one + # flattened tensor per group - for simplicity merge them into a single tensor + # + # XXX: could make the script more memory efficient for when there are multiple groups - it + # will require matching the sub-lists of param_shapes for each param group flattened tensor + + fp32_flat_groups = [ + torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts)) + ] + + return zero_stage, world_size, fp32_flat_groups + + +def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters): + """ + Returns fp32 state_dict reconstructed from ds checkpoint + + Args: + - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are) + + """ + print(f"Processing zero checkpoint '{ds_checkpoint_dir}'") + + optim_files = get_optim_files(ds_checkpoint_dir) + zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir) + print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}") + + model_files = get_model_state_files(ds_checkpoint_dir) + + zero_model_states = parse_model_states(model_files) + print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}') + + if zero_stage <= 2: + return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states, + exclude_frozen_parameters) + elif zero_stage == 3: + return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states, + exclude_frozen_parameters) + + +def _zero2_merge_frozen_params(state_dict, zero_model_states): + if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0: + return + + frozen_param_shapes = zero_model_states[0].frozen_param_shapes + frozen_param_fragments = zero_model_states[0].frozen_param_fragments + + if debug: + num_elem = sum(s.numel() for s in frozen_param_shapes.values()) + print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}') + + wanted_params = len(frozen_param_shapes) + wanted_numel = sum(s.numel() for s in frozen_param_shapes.values()) + avail_numel = sum([p.numel() for p in frozen_param_fragments.values()]) + print(f'Frozen params: Have {avail_numel} numels to process.') + print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params') + + total_params = 0 + total_numel = 0 + for name, shape in frozen_param_shapes.items(): + total_params += 1 + unpartitioned_numel = shape.numel() + total_numel += unpartitioned_numel + + state_dict[name] = frozen_param_fragments[name] + + if debug: + print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ") + + print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements") + + +def _has_callable(obj, fn): + attr = getattr(obj, fn, None) + return callable(attr) + + +def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states): + param_shapes = zero_model_states[0].param_shapes + + # Reconstruction protocol: + # + # XXX: document this + + if debug: + for i in range(world_size): + for j in range(len(fp32_flat_groups[0])): + print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}") + + # XXX: memory usage doubles here (zero2) + num_param_groups = len(fp32_flat_groups[0]) + merged_single_partition_of_fp32_groups = [] + for i in range(num_param_groups): + merged_partitions = [sd[i] for sd in fp32_flat_groups] + full_single_fp32_vector = torch.cat(merged_partitions, 0) + merged_single_partition_of_fp32_groups.append(full_single_fp32_vector) + avail_numel = sum( + [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups]) + + if debug: + wanted_params = sum([len(shapes) for shapes in param_shapes]) + wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes]) + # not asserting if there is a mismatch due to possible padding + print(f"Have {avail_numel} numels to process.") + print(f"Need {wanted_numel} numels in {wanted_params} params.") + + # params + # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support + # out-of-core computing solution + total_numel = 0 + total_params = 0 + for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups): + offset = 0 + avail_numel = full_single_fp32_vector.numel() + for name, shape in shapes.items(): + + unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape) + total_numel += unpartitioned_numel + total_params += 1 + + if debug: + print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ") + state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape) + offset += unpartitioned_numel + + # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and + # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex + # paddings performed in the code it's almost impossible to predict the exact numbers w/o the + # live optimizer object, so we are checking that the numbers are within the right range + align_to = 2 * world_size + + def zero2_align(x): + return align_to * math.ceil(x / align_to) + + if debug: + print(f"original offset={offset}, avail_numel={avail_numel}") + + offset = zero2_align(offset) + avail_numel = zero2_align(avail_numel) + + if debug: + print(f"aligned offset={offset}, avail_numel={avail_numel}") + + # Sanity check + if offset != avail_numel: + raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong") + + print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements") + + +def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states, + exclude_frozen_parameters): + state_dict = OrderedDict() + + # buffers + buffers = zero_model_states[0].buffers + state_dict.update(buffers) + if debug: + print(f"added {len(buffers)} buffers") + + if not exclude_frozen_parameters: + _zero2_merge_frozen_params(state_dict, zero_model_states) + + _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states) + + # recover shared parameters + for pair in zero_model_states[0].shared_params: + if pair[1] in state_dict: + state_dict[pair[0]] = state_dict[pair[1]] + + return state_dict + + +def zero3_partitioned_param_info(unpartitioned_numel, world_size): + remainder = unpartitioned_numel % world_size + padding_numel = (world_size - remainder) if remainder else 0 + partitioned_numel = math.ceil(unpartitioned_numel / world_size) + return partitioned_numel, padding_numel + + +def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states): + if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0: + return + + if debug: + for i in range(world_size): + num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values()) + print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}') + + frozen_param_shapes = zero_model_states[0].frozen_param_shapes + wanted_params = len(frozen_param_shapes) + wanted_numel = sum(s.numel() for s in frozen_param_shapes.values()) + avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size + print(f'Frozen params: Have {avail_numel} numels to process.') + print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params') + + total_params = 0 + total_numel = 0 + for name, shape in zero_model_states[0].frozen_param_shapes.items(): + total_params += 1 + unpartitioned_numel = shape.numel() + total_numel += unpartitioned_numel + + param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states) + state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape) + + partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size) + + if debug: + print( + f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}" + ) + + print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements") + + +def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states): + param_shapes = zero_model_states[0].param_shapes + avail_numel = fp32_flat_groups[0].numel() * world_size + # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each + # param, re-consolidating each param, while dealing with padding if any + + # merge list of dicts, preserving order + param_shapes = {k: v for d in param_shapes for k, v in d.items()} + + if debug: + for i in range(world_size): + print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}") + + wanted_params = len(param_shapes) + wanted_numel = sum(shape.numel() for shape in param_shapes.values()) + # not asserting if there is a mismatch due to possible padding + avail_numel = fp32_flat_groups[0].numel() * world_size + print(f"Trainable params: Have {avail_numel} numels to process.") + print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.") + + # params + # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support + # out-of-core computing solution + offset = 0 + total_numel = 0 + total_params = 0 + for name, shape in tqdm(param_shapes.items(), desc='Gathering Sharded Weights'): + unpartitioned_numel = shape.numel() + total_numel += unpartitioned_numel + total_params += 1 + partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size) + + if debug: + print( + f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}" + ) + + # XXX: memory usage doubles here + state_dict[name] = torch.cat( + tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)), + 0).narrow(0, 0, unpartitioned_numel).view(shape) + offset += partitioned_numel + + offset *= world_size + + # Sanity check + if offset != avail_numel: + raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong") + + print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements") + + +def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states, + exclude_frozen_parameters): + state_dict = OrderedDict() + + # buffers + buffers = zero_model_states[0].buffers + state_dict.update(buffers) + if debug: + print(f"added {len(buffers)} buffers") + + if not exclude_frozen_parameters: + _zero3_merge_frozen_params(state_dict, world_size, zero_model_states) + + _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states) + + # recover shared parameters + for pair in zero_model_states[0].shared_params: + if pair[1] in state_dict: + state_dict[pair[0]] = state_dict[pair[1]] + + return state_dict + + +def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False): + """ + Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with + ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example + via a model hub. + + Args: + - ``checkpoint_dir``: path to the desired checkpoint folder + - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14`` + - ``exclude_frozen_parameters``: exclude frozen parameters + + Returns: + - pytorch ``state_dict`` + + Note: this approach may not work if your application doesn't have sufficient free CPU memory and + you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with + the checkpoint. + + A typical usage might be :: + + from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint + # do the training and checkpoint saving + state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu + model = model.cpu() # move to cpu + model.load_state_dict(state_dict) + # submit to model hub or save the model to share with others + + In this example the ``model`` will no longer be usable in the deepspeed context of the same + application. i.e. you will need to re-initialize the deepspeed engine, since + ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it. + + If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead. + + """ + if tag is None: + latest_path = os.path.join(checkpoint_dir, 'latest') + if os.path.isfile(latest_path): + with open(latest_path, 'r') as fd: + tag = fd.read().strip() + else: + raise ValueError(f"Unable to find 'latest' file at {latest_path}") + + ds_checkpoint_dir = os.path.join(checkpoint_dir, tag) + + if not os.path.isdir(ds_checkpoint_dir): + raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist") + + return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters) + + +def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, + output_dir, + max_shard_size="5GB", + safe_serialization=False, + tag=None, + exclude_frozen_parameters=False): + """ + Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be + loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed. + + Args: + - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``) + - ``output_dir``: directory to the pytorch fp32 state_dict output files + - ``max_shard_size``: the maximum size for a checkpoint before being sharded, default value is 5GB + - ``safe_serialization``: whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`). + - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14`` + - ``exclude_frozen_parameters``: exclude frozen parameters + """ + # Dependency pre-check + if safe_serialization: + try: + from safetensors.torch import save_file + except ImportError: + print('If you want to use `safe_serialization`, please `pip install safetensors`') + raise + if max_shard_size is not None: + try: + from huggingface_hub import split_torch_state_dict_into_shards + except ImportError: + print('If you want to use `max_shard_size`, please `pip install huggingface_hub`') + raise + + # Convert zero checkpoint to state_dict + state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters) + + # Shard the model if it is too big. + weights_name = "model.safetensors" if safe_serialization else "pytorch_model.bin" + if max_shard_size is not None: + filename_pattern = weights_name.replace(".bin", "{suffix}.bin").replace(".safetensors", "{suffix}.safetensors") + state_dict_split = split_torch_state_dict_into_shards(state_dict, + filename_pattern=filename_pattern, + max_shard_size=max_shard_size) + else: + from collections import namedtuple + StateDictSplit = namedtuple("StateDictSplit", ["is_sharded", "filename_to_tensors"]) + state_dict_split = StateDictSplit(is_sharded=False, + filename_to_tensors={weights_name: list(state_dict.keys())}) + + # Save the model + filename_to_tensors = state_dict_split.filename_to_tensors.items() + for shard_file, tensors in tqdm(filename_to_tensors, desc="Saving checkpoint shards"): + shard = {tensor: state_dict[tensor].contiguous() for tensor in tensors} + output_path = os.path.join(output_dir, shard_file) + if safe_serialization: + save_file(shard, output_path, metadata={"format": "pt"}) + else: + torch.save(shard, output_path) + + # Save index if sharded + if state_dict_split.is_sharded: + index = { + "metadata": state_dict_split.metadata, + "weight_map": state_dict_split.tensor_to_filename, + } + save_index_file = "model.safetensors.index.json" if safe_serialization else "pytorch_model.bin.index.json" + save_index_file = os.path.join(output_dir, save_index_file) + with open(save_index_file, "w", encoding="utf-8") as f: + content = json.dumps(index, indent=2, sort_keys=True) + "\n" + f.write(content) + + +def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None): + """ + 1. Put the provided model to cpu + 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` + 3. Load it into the provided model + + Args: + - ``model``: the model object to update + - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``) + - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14`` + + Returns: + - ``model`: modified model + + Make sure you have plenty of CPU memory available before you call this function. If you don't + have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it + conveniently placed for you in the checkpoint folder. + + A typical usage might be :: + + from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint + model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir) + # submit to model hub or save the model to share with others + + Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context + of the same application. i.e. you will need to re-initialize the deepspeed engine, since + ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it. + + """ + logger.info(f"Extracting fp32 weights") + state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag) + + logger.info(f"Overwriting model with fp32 weights") + model = model.cpu() + model.load_state_dict(state_dict, strict=False) + + return model + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("checkpoint_dir", + type=str, + help="path to the desired checkpoint folder, e.g., path/checkpoint-12") + parser.add_argument("output_dir", + type=str, + help="directory to the pytorch fp32 state_dict output files" + "(e.g. path/checkpoint-12-output/)") + parser.add_argument( + "--max_shard_size", + type=str, + default="5GB", + help="The maximum size for a checkpoint before being sharded. Checkpoints shard will then be each of size" + "lower than this size. If expressed as a string, needs to be digits followed by a unit (like `5MB`" + "We default it to 5GB in order for models to be able to run easily on free-tier google colab instances" + "without CPU OOM issues.") + parser.add_argument( + "--safe_serialization", + default=False, + action='store_true', + help="Whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).") + parser.add_argument("-t", + "--tag", + type=str, + default=None, + help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1") + parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters") + parser.add_argument("-d", "--debug", action='store_true', help="enable debug") + args = parser.parse_args() + + debug = args.debug + + convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir, + args.output_dir, + max_shard_size=args.max_shard_size, + safe_serialization=args.safe_serialization, + tag=args.tag, + exclude_frozen_parameters=args.exclude_frozen_parameters) diff --git a/checkpoint-500/generation_config.json b/checkpoint-500/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..6a807a364acb034610b0c0959eb3727910a0babe --- /dev/null +++ b/checkpoint-500/generation_config.json @@ -0,0 +1,12 @@ +{ + "bos_token_id": 128000, + "do_sample": true, + "eos_token_id": [ + 128001, + 128008, + 128009 + ], + "temperature": 0.6, + "top_p": 0.9, + "transformers_version": "4.45.0" +} diff --git a/checkpoint-500/latest b/checkpoint-500/latest new file mode 100644 index 0000000000000000000000000000000000000000..f0b47ce15fff9a01b2a416a473b2148085048a50 --- /dev/null +++ b/checkpoint-500/latest @@ -0,0 +1 @@ +global_step500 \ No newline at end of file diff --git a/checkpoint-500/model.safetensors.index.json b/checkpoint-500/model.safetensors.index.json new file mode 100644 index 0000000000000000000000000000000000000000..0fd8120f1c6acddc268ebc2583058efaf699a771 --- /dev/null +++ b/checkpoint-500/model.safetensors.index.json @@ -0,0 +1,298 @@ +{ + "metadata": { + "total_size": 16060522496 + }, + "weight_map": { + "lm_head.weight": "model-00004-of-00004.safetensors", + "model.embed_tokens.weight": "model-00001-of-00004.safetensors", + "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.31.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.norm.weight": "model-00004-of-00004.safetensors" + } +} diff --git a/checkpoint-500/rng_state_1.pth b/checkpoint-500/rng_state_1.pth new file mode 100644 index 0000000000000000000000000000000000000000..eeb7d8df6ed170dd98dba8737bc9dd038af61afd --- /dev/null +++ b/checkpoint-500/rng_state_1.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dbab71d98a3a9a92df82a6bba463947327c3a1bcf35cd9f4f46114641fc42dd9 +size 15984 diff --git a/checkpoint-500/rng_state_2.pth b/checkpoint-500/rng_state_2.pth new file mode 100644 index 0000000000000000000000000000000000000000..e144a445ffd57fbb5be9b5131f17149bde6c4ff5 --- /dev/null +++ b/checkpoint-500/rng_state_2.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:caac82d57d878d30219a4f9ec289a97ff90c53afc160b968f251b3fd3454b8d8 +size 15984 diff --git a/checkpoint-500/rng_state_6.pth b/checkpoint-500/rng_state_6.pth new file mode 100644 index 0000000000000000000000000000000000000000..d4e6b27cc99b0fa8e6bbf967892f9304b444d81d --- /dev/null +++ b/checkpoint-500/rng_state_6.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c71152053553e6e22d670fbc4fd7550bf8a046b54cad7b71869787986a6a42c +size 15984 diff --git a/checkpoint-500/tokenizer.json b/checkpoint-500/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2 --- /dev/null +++ b/checkpoint-500/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b +size 17209920 diff --git a/checkpoint-500/trainer_state.json b/checkpoint-500/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..344cb1bf271428a258553d8b6557ea7f4f32eaab --- /dev/null +++ b/checkpoint-500/trainer_state.json @@ -0,0 +1,3533 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.0366598778004072, + "eval_steps": 500, + "global_step": 500, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.004073319755600814, + "grad_norm": 14.739597738910254, + "learning_rate": 1.3513513513513515e-07, + "loss": 0.3301, + "step": 1 + }, + { + "epoch": 0.008146639511201629, + "grad_norm": 12.452379475537562, + "learning_rate": 2.702702702702703e-07, + "loss": 0.3036, + "step": 2 + }, + { + "epoch": 0.012219959266802444, + "grad_norm": 12.86836901323053, + "learning_rate": 4.0540540540540546e-07, + "loss": 0.3424, + "step": 3 + }, + { + "epoch": 0.016293279022403257, + "grad_norm": 13.43090696417895, + "learning_rate": 5.405405405405406e-07, + "loss": 0.3184, + "step": 4 + }, + { + "epoch": 0.020366598778004074, + "grad_norm": 14.809270827070751, + "learning_rate": 6.756756756756758e-07, + "loss": 0.3138, + "step": 5 + }, + { + "epoch": 0.024439918533604887, + "grad_norm": 12.082295065079254, + "learning_rate": 8.108108108108109e-07, + "loss": 0.2982, + "step": 6 + }, + { + "epoch": 0.028513238289205704, + "grad_norm": 8.711027825602685, + "learning_rate": 9.459459459459461e-07, + "loss": 0.3063, + "step": 7 + }, + { + "epoch": 0.032586558044806514, + "grad_norm": 7.652261107879573, + "learning_rate": 1.0810810810810812e-06, + "loss": 0.2749, + "step": 8 + }, + { + "epoch": 0.03665987780040733, + "grad_norm": 7.726905307446712, + "learning_rate": 1.2162162162162164e-06, + "loss": 0.2718, + "step": 9 + }, + { + "epoch": 0.04073319755600815, + "grad_norm": 3.4676017287243446, + "learning_rate": 1.3513513513513515e-06, + "loss": 0.2238, + "step": 10 + }, + { + "epoch": 0.04480651731160896, + "grad_norm": 4.29492237739491, + "learning_rate": 1.4864864864864868e-06, + "loss": 0.2513, + "step": 11 + }, + { + "epoch": 0.048879837067209775, + "grad_norm": 3.5844077505877707, + "learning_rate": 1.6216216216216219e-06, + "loss": 0.2327, + "step": 12 + }, + { + "epoch": 0.05295315682281059, + "grad_norm": 7.031113851112456, + "learning_rate": 1.756756756756757e-06, + "loss": 0.239, + "step": 13 + }, + { + "epoch": 0.05702647657841141, + "grad_norm": 7.210450292846676, + "learning_rate": 1.8918918918918922e-06, + "loss": 0.277, + "step": 14 + }, + { + "epoch": 0.06109979633401222, + "grad_norm": 6.526062178388152, + "learning_rate": 2.0270270270270273e-06, + "loss": 0.2495, + "step": 15 + }, + { + "epoch": 0.06517311608961303, + "grad_norm": 5.157530413977274, + "learning_rate": 2.1621621621621623e-06, + "loss": 0.2425, + "step": 16 + }, + { + "epoch": 0.06924643584521385, + "grad_norm": 3.242500698401516, + "learning_rate": 2.297297297297298e-06, + "loss": 0.1985, + "step": 17 + }, + { + "epoch": 0.07331975560081466, + "grad_norm": 2.554097363562634, + "learning_rate": 2.432432432432433e-06, + "loss": 0.1822, + "step": 18 + }, + { + "epoch": 0.07739307535641547, + "grad_norm": 2.6535341941648047, + "learning_rate": 2.5675675675675675e-06, + "loss": 0.2252, + "step": 19 + }, + { + "epoch": 0.0814663951120163, + "grad_norm": 2.5516965886789076, + "learning_rate": 2.702702702702703e-06, + "loss": 0.1862, + "step": 20 + }, + { + "epoch": 0.0855397148676171, + "grad_norm": 2.6017363031504264, + "learning_rate": 2.837837837837838e-06, + "loss": 0.2098, + "step": 21 + }, + { + "epoch": 0.08961303462321792, + "grad_norm": 2.0129035331912184, + "learning_rate": 2.9729729729729736e-06, + "loss": 0.1699, + "step": 22 + }, + { + "epoch": 0.09368635437881874, + "grad_norm": 1.849938821231628, + "learning_rate": 3.1081081081081082e-06, + "loss": 0.1868, + "step": 23 + }, + { + "epoch": 0.09775967413441955, + "grad_norm": 1.764863585345639, + "learning_rate": 3.2432432432432437e-06, + "loss": 0.1669, + "step": 24 + }, + { + "epoch": 0.10183299389002037, + "grad_norm": 1.8494325730642949, + "learning_rate": 3.3783783783783788e-06, + "loss": 0.1507, + "step": 25 + }, + { + "epoch": 0.10590631364562118, + "grad_norm": 1.6304914383856781, + "learning_rate": 3.513513513513514e-06, + "loss": 0.1534, + "step": 26 + }, + { + "epoch": 0.109979633401222, + "grad_norm": 1.4579913516119778, + "learning_rate": 3.648648648648649e-06, + "loss": 0.1387, + "step": 27 + }, + { + "epoch": 0.11405295315682282, + "grad_norm": 1.6082049119577289, + "learning_rate": 3.7837837837837844e-06, + "loss": 0.1406, + "step": 28 + }, + { + "epoch": 0.11812627291242363, + "grad_norm": 1.4819461102449234, + "learning_rate": 3.918918918918919e-06, + "loss": 0.1323, + "step": 29 + }, + { + "epoch": 0.12219959266802444, + "grad_norm": 1.7009165157092279, + "learning_rate": 4.0540540540540545e-06, + "loss": 0.1577, + "step": 30 + }, + { + "epoch": 0.12627291242362526, + "grad_norm": 1.5721048343370418, + "learning_rate": 4.189189189189189e-06, + "loss": 0.1345, + "step": 31 + }, + { + "epoch": 0.13034623217922606, + "grad_norm": 1.5868902144082508, + "learning_rate": 4.324324324324325e-06, + "loss": 0.1641, + "step": 32 + }, + { + "epoch": 0.13441955193482688, + "grad_norm": 1.367409491711825, + "learning_rate": 4.45945945945946e-06, + "loss": 0.1568, + "step": 33 + }, + { + "epoch": 0.1384928716904277, + "grad_norm": 1.2082341226617432, + "learning_rate": 4.594594594594596e-06, + "loss": 0.1158, + "step": 34 + }, + { + "epoch": 0.1425661914460285, + "grad_norm": 1.2834846670425744, + "learning_rate": 4.72972972972973e-06, + "loss": 0.1553, + "step": 35 + }, + { + "epoch": 0.14663951120162932, + "grad_norm": 1.4278045526468992, + "learning_rate": 4.864864864864866e-06, + "loss": 0.1472, + "step": 36 + }, + { + "epoch": 0.15071283095723015, + "grad_norm": 1.1433863309324082, + "learning_rate": 5e-06, + "loss": 0.1216, + "step": 37 + }, + { + "epoch": 0.15478615071283094, + "grad_norm": 1.2556861775151085, + "learning_rate": 5.135135135135135e-06, + "loss": 0.1383, + "step": 38 + }, + { + "epoch": 0.15885947046843177, + "grad_norm": 1.1940515610624718, + "learning_rate": 5.2702702702702705e-06, + "loss": 0.1488, + "step": 39 + }, + { + "epoch": 0.1629327902240326, + "grad_norm": 1.365361196469323, + "learning_rate": 5.405405405405406e-06, + "loss": 0.1658, + "step": 40 + }, + { + "epoch": 0.1670061099796334, + "grad_norm": 1.6375597676471598, + "learning_rate": 5.540540540540541e-06, + "loss": 0.1244, + "step": 41 + }, + { + "epoch": 0.1710794297352342, + "grad_norm": 1.058410205986207, + "learning_rate": 5.675675675675676e-06, + "loss": 0.1129, + "step": 42 + }, + { + "epoch": 0.17515274949083504, + "grad_norm": 1.468616447672182, + "learning_rate": 5.810810810810811e-06, + "loss": 0.176, + "step": 43 + }, + { + "epoch": 0.17922606924643583, + "grad_norm": 1.1292066998688302, + "learning_rate": 5.945945945945947e-06, + "loss": 0.1235, + "step": 44 + }, + { + "epoch": 0.18329938900203666, + "grad_norm": 1.1790440780653373, + "learning_rate": 6.081081081081082e-06, + "loss": 0.1352, + "step": 45 + }, + { + "epoch": 0.18737270875763748, + "grad_norm": 1.144770740193701, + "learning_rate": 6.2162162162162164e-06, + "loss": 0.1375, + "step": 46 + }, + { + "epoch": 0.19144602851323828, + "grad_norm": 1.3169675540020822, + "learning_rate": 6.351351351351351e-06, + "loss": 0.1451, + "step": 47 + }, + { + "epoch": 0.1955193482688391, + "grad_norm": 1.1364743430386761, + "learning_rate": 6.486486486486487e-06, + "loss": 0.1073, + "step": 48 + }, + { + "epoch": 0.19959266802443992, + "grad_norm": 1.3532964160734307, + "learning_rate": 6.621621621621622e-06, + "loss": 0.1502, + "step": 49 + }, + { + "epoch": 0.20366598778004075, + "grad_norm": 1.1049371458723167, + "learning_rate": 6.7567567567567575e-06, + "loss": 0.116, + "step": 50 + }, + { + "epoch": 0.20773930753564154, + "grad_norm": 1.0634720045604809, + "learning_rate": 6.891891891891892e-06, + "loss": 0.1438, + "step": 51 + }, + { + "epoch": 0.21181262729124237, + "grad_norm": 1.1677623232682453, + "learning_rate": 7.027027027027028e-06, + "loss": 0.1143, + "step": 52 + }, + { + "epoch": 0.2158859470468432, + "grad_norm": 1.2552603959178812, + "learning_rate": 7.162162162162163e-06, + "loss": 0.1443, + "step": 53 + }, + { + "epoch": 0.219959266802444, + "grad_norm": 1.1556616828254782, + "learning_rate": 7.297297297297298e-06, + "loss": 0.1341, + "step": 54 + }, + { + "epoch": 0.2240325865580448, + "grad_norm": 1.1241236805182522, + "learning_rate": 7.4324324324324324e-06, + "loss": 0.1283, + "step": 55 + }, + { + "epoch": 0.22810590631364563, + "grad_norm": 0.9867741809756463, + "learning_rate": 7.567567567567569e-06, + "loss": 0.1302, + "step": 56 + }, + { + "epoch": 0.23217922606924643, + "grad_norm": 1.0672327000495885, + "learning_rate": 7.702702702702704e-06, + "loss": 0.113, + "step": 57 + }, + { + "epoch": 0.23625254582484725, + "grad_norm": 1.0659735135074857, + "learning_rate": 7.837837837837838e-06, + "loss": 0.1293, + "step": 58 + }, + { + "epoch": 0.24032586558044808, + "grad_norm": 1.2422197356017706, + "learning_rate": 7.972972972972974e-06, + "loss": 0.164, + "step": 59 + }, + { + "epoch": 0.24439918533604887, + "grad_norm": 1.3538609671806645, + "learning_rate": 8.108108108108109e-06, + "loss": 0.1548, + "step": 60 + }, + { + "epoch": 0.2484725050916497, + "grad_norm": 1.0759558101958346, + "learning_rate": 8.243243243243245e-06, + "loss": 0.1225, + "step": 61 + }, + { + "epoch": 0.2525458248472505, + "grad_norm": 1.1244956381449198, + "learning_rate": 8.378378378378378e-06, + "loss": 0.1175, + "step": 62 + }, + { + "epoch": 0.25661914460285135, + "grad_norm": 1.171629685706723, + "learning_rate": 8.513513513513514e-06, + "loss": 0.1204, + "step": 63 + }, + { + "epoch": 0.2606924643584521, + "grad_norm": 1.2905585681894916, + "learning_rate": 8.64864864864865e-06, + "loss": 0.1253, + "step": 64 + }, + { + "epoch": 0.26476578411405294, + "grad_norm": 1.3979008428570314, + "learning_rate": 8.783783783783785e-06, + "loss": 0.191, + "step": 65 + }, + { + "epoch": 0.26883910386965376, + "grad_norm": 1.226756333773235, + "learning_rate": 8.91891891891892e-06, + "loss": 0.1287, + "step": 66 + }, + { + "epoch": 0.2729124236252546, + "grad_norm": 1.2835470528218054, + "learning_rate": 9.054054054054054e-06, + "loss": 0.138, + "step": 67 + }, + { + "epoch": 0.2769857433808554, + "grad_norm": 1.1622195270679896, + "learning_rate": 9.189189189189191e-06, + "loss": 0.1259, + "step": 68 + }, + { + "epoch": 0.28105906313645623, + "grad_norm": 1.1512666578576678, + "learning_rate": 9.324324324324325e-06, + "loss": 0.1292, + "step": 69 + }, + { + "epoch": 0.285132382892057, + "grad_norm": 0.9695391815507838, + "learning_rate": 9.45945945945946e-06, + "loss": 0.1142, + "step": 70 + }, + { + "epoch": 0.2892057026476578, + "grad_norm": 1.1262409828408337, + "learning_rate": 9.594594594594594e-06, + "loss": 0.1188, + "step": 71 + }, + { + "epoch": 0.29327902240325865, + "grad_norm": 0.9820966211674147, + "learning_rate": 9.729729729729732e-06, + "loss": 0.1052, + "step": 72 + }, + { + "epoch": 0.2973523421588595, + "grad_norm": 1.1058230077470572, + "learning_rate": 9.864864864864865e-06, + "loss": 0.1246, + "step": 73 + }, + { + "epoch": 0.3014256619144603, + "grad_norm": 1.3891942844370528, + "learning_rate": 1e-05, + "loss": 0.1651, + "step": 74 + }, + { + "epoch": 0.3054989816700611, + "grad_norm": 1.1373599847305171, + "learning_rate": 9.99994352762958e-06, + "loss": 0.1259, + "step": 75 + }, + { + "epoch": 0.3095723014256619, + "grad_norm": 1.0803757941511039, + "learning_rate": 9.999774111793974e-06, + "loss": 0.1485, + "step": 76 + }, + { + "epoch": 0.3136456211812627, + "grad_norm": 1.509987566205336, + "learning_rate": 9.999491756320105e-06, + "loss": 0.1708, + "step": 77 + }, + { + "epoch": 0.31771894093686354, + "grad_norm": 1.3769318827034491, + "learning_rate": 9.99909646758609e-06, + "loss": 0.1483, + "step": 78 + }, + { + "epoch": 0.32179226069246436, + "grad_norm": 0.9995516357476201, + "learning_rate": 9.99858825452108e-06, + "loss": 0.1124, + "step": 79 + }, + { + "epoch": 0.3258655804480652, + "grad_norm": 1.4328593788226842, + "learning_rate": 9.997967128605078e-06, + "loss": 0.1849, + "step": 80 + }, + { + "epoch": 0.329938900203666, + "grad_norm": 1.0397129864144867, + "learning_rate": 9.997233103868664e-06, + "loss": 0.1199, + "step": 81 + }, + { + "epoch": 0.3340122199592668, + "grad_norm": 1.3312975796955133, + "learning_rate": 9.996386196892683e-06, + "loss": 0.1748, + "step": 82 + }, + { + "epoch": 0.3380855397148676, + "grad_norm": 1.2070448028045222, + "learning_rate": 9.995426426807875e-06, + "loss": 0.1449, + "step": 83 + }, + { + "epoch": 0.3421588594704684, + "grad_norm": 0.9786604342473315, + "learning_rate": 9.994353815294438e-06, + "loss": 0.1349, + "step": 84 + }, + { + "epoch": 0.34623217922606925, + "grad_norm": 1.16279378070579, + "learning_rate": 9.993168386581533e-06, + "loss": 0.1111, + "step": 85 + }, + { + "epoch": 0.35030549898167007, + "grad_norm": 1.0832386326974766, + "learning_rate": 9.991870167446751e-06, + "loss": 0.1271, + "step": 86 + }, + { + "epoch": 0.3543788187372709, + "grad_norm": 1.076044536856832, + "learning_rate": 9.990459187215498e-06, + "loss": 0.122, + "step": 87 + }, + { + "epoch": 0.35845213849287166, + "grad_norm": 1.1390626595350608, + "learning_rate": 9.98893547776033e-06, + "loss": 0.1429, + "step": 88 + }, + { + "epoch": 0.3625254582484725, + "grad_norm": 1.2799324833393828, + "learning_rate": 9.987299073500245e-06, + "loss": 0.1789, + "step": 89 + }, + { + "epoch": 0.3665987780040733, + "grad_norm": 1.0088789278468007, + "learning_rate": 9.985550011399889e-06, + "loss": 0.1217, + "step": 90 + }, + { + "epoch": 0.37067209775967414, + "grad_norm": 1.0635380396962304, + "learning_rate": 9.98368833096874e-06, + "loss": 0.1517, + "step": 91 + }, + { + "epoch": 0.37474541751527496, + "grad_norm": 1.1149195586496816, + "learning_rate": 9.981714074260196e-06, + "loss": 0.1648, + "step": 92 + }, + { + "epoch": 0.3788187372708758, + "grad_norm": 0.9770064004740078, + "learning_rate": 9.979627285870644e-06, + "loss": 0.1173, + "step": 93 + }, + { + "epoch": 0.38289205702647655, + "grad_norm": 1.5786545324573935, + "learning_rate": 9.977428012938437e-06, + "loss": 0.2148, + "step": 94 + }, + { + "epoch": 0.3869653767820774, + "grad_norm": 0.9445672697637628, + "learning_rate": 9.975116305142836e-06, + "loss": 0.1272, + "step": 95 + }, + { + "epoch": 0.3910386965376782, + "grad_norm": 0.832092882135511, + "learning_rate": 9.97269221470289e-06, + "loss": 0.1149, + "step": 96 + }, + { + "epoch": 0.395112016293279, + "grad_norm": 0.8009975217381654, + "learning_rate": 9.97015579637625e-06, + "loss": 0.1081, + "step": 97 + }, + { + "epoch": 0.39918533604887985, + "grad_norm": 0.909000272396086, + "learning_rate": 9.967507107457942e-06, + "loss": 0.1249, + "step": 98 + }, + { + "epoch": 0.40325865580448067, + "grad_norm": 0.9894702747295367, + "learning_rate": 9.96474620777906e-06, + "loss": 0.1404, + "step": 99 + }, + { + "epoch": 0.4073319755600815, + "grad_norm": 1.1517905886733883, + "learning_rate": 9.961873159705426e-06, + "loss": 0.1433, + "step": 100 + }, + { + "epoch": 0.41140529531568226, + "grad_norm": 1.2806427058824508, + "learning_rate": 9.95888802813617e-06, + "loss": 0.1723, + "step": 101 + }, + { + "epoch": 0.4154786150712831, + "grad_norm": 0.919332585767889, + "learning_rate": 9.955790880502278e-06, + "loss": 0.1219, + "step": 102 + }, + { + "epoch": 0.4195519348268839, + "grad_norm": 0.8901964293186232, + "learning_rate": 9.952581786765057e-06, + "loss": 0.1157, + "step": 103 + }, + { + "epoch": 0.42362525458248473, + "grad_norm": 1.3877972822654616, + "learning_rate": 9.949260819414557e-06, + "loss": 0.1642, + "step": 104 + }, + { + "epoch": 0.42769857433808556, + "grad_norm": 0.9602184939318458, + "learning_rate": 9.945828053467939e-06, + "loss": 0.1224, + "step": 105 + }, + { + "epoch": 0.4317718940936864, + "grad_norm": 1.230791876608231, + "learning_rate": 9.942283566467773e-06, + "loss": 0.1596, + "step": 106 + }, + { + "epoch": 0.43584521384928715, + "grad_norm": 1.1454248942159495, + "learning_rate": 9.938627438480295e-06, + "loss": 0.1541, + "step": 107 + }, + { + "epoch": 0.439918533604888, + "grad_norm": 1.0873300186194603, + "learning_rate": 9.93485975209359e-06, + "loss": 0.1533, + "step": 108 + }, + { + "epoch": 0.4439918533604888, + "grad_norm": 0.9668569607934798, + "learning_rate": 9.930980592415728e-06, + "loss": 0.1539, + "step": 109 + }, + { + "epoch": 0.4480651731160896, + "grad_norm": 1.487429443095859, + "learning_rate": 9.926990047072849e-06, + "loss": 0.2379, + "step": 110 + }, + { + "epoch": 0.45213849287169044, + "grad_norm": 1.036501582869458, + "learning_rate": 9.922888206207174e-06, + "loss": 0.1181, + "step": 111 + }, + { + "epoch": 0.45621181262729127, + "grad_norm": 0.9427386345315173, + "learning_rate": 9.918675162474974e-06, + "loss": 0.1157, + "step": 112 + }, + { + "epoch": 0.46028513238289204, + "grad_norm": 1.1671785006625848, + "learning_rate": 9.914351011044472e-06, + "loss": 0.1671, + "step": 113 + }, + { + "epoch": 0.46435845213849286, + "grad_norm": 0.8485104800209154, + "learning_rate": 9.909915849593705e-06, + "loss": 0.1094, + "step": 114 + }, + { + "epoch": 0.4684317718940937, + "grad_norm": 0.895507646361391, + "learning_rate": 9.905369778308304e-06, + "loss": 0.1205, + "step": 115 + }, + { + "epoch": 0.4725050916496945, + "grad_norm": 1.1024237478073182, + "learning_rate": 9.900712899879237e-06, + "loss": 0.1551, + "step": 116 + }, + { + "epoch": 0.47657841140529533, + "grad_norm": 1.0811464118865846, + "learning_rate": 9.895945319500488e-06, + "loss": 0.1402, + "step": 117 + }, + { + "epoch": 0.48065173116089616, + "grad_norm": 0.9829410685047446, + "learning_rate": 9.891067144866687e-06, + "loss": 0.1381, + "step": 118 + }, + { + "epoch": 0.4847250509164969, + "grad_norm": 0.8855824729064482, + "learning_rate": 9.886078486170665e-06, + "loss": 0.1038, + "step": 119 + }, + { + "epoch": 0.48879837067209775, + "grad_norm": 1.1091690462920576, + "learning_rate": 9.880979456100974e-06, + "loss": 0.1372, + "step": 120 + }, + { + "epoch": 0.49287169042769857, + "grad_norm": 0.907049897730717, + "learning_rate": 9.875770169839343e-06, + "loss": 0.1322, + "step": 121 + }, + { + "epoch": 0.4969450101832994, + "grad_norm": 1.0224824312976686, + "learning_rate": 9.870450745058066e-06, + "loss": 0.1257, + "step": 122 + }, + { + "epoch": 0.5010183299389002, + "grad_norm": 1.0439109698157967, + "learning_rate": 9.865021301917358e-06, + "loss": 0.1317, + "step": 123 + }, + { + "epoch": 0.505091649694501, + "grad_norm": 0.8972366065592501, + "learning_rate": 9.859481963062623e-06, + "loss": 0.1104, + "step": 124 + }, + { + "epoch": 0.5091649694501018, + "grad_norm": 0.916952485621608, + "learning_rate": 9.853832853621703e-06, + "loss": 0.124, + "step": 125 + }, + { + "epoch": 0.5132382892057027, + "grad_norm": 0.7586835858660547, + "learning_rate": 9.848074101202037e-06, + "loss": 0.1191, + "step": 126 + }, + { + "epoch": 0.5173116089613035, + "grad_norm": 0.9149593226270635, + "learning_rate": 9.842205835887785e-06, + "loss": 0.1188, + "step": 127 + }, + { + "epoch": 0.5213849287169042, + "grad_norm": 0.9483144871900878, + "learning_rate": 9.836228190236892e-06, + "loss": 0.1392, + "step": 128 + }, + { + "epoch": 0.5254582484725051, + "grad_norm": 1.1137009286811568, + "learning_rate": 9.83014129927808e-06, + "loss": 0.1331, + "step": 129 + }, + { + "epoch": 0.5295315682281059, + "grad_norm": 1.0049886812823983, + "learning_rate": 9.823945300507815e-06, + "loss": 0.1393, + "step": 130 + }, + { + "epoch": 0.5336048879837068, + "grad_norm": 1.0017821694016227, + "learning_rate": 9.817640333887194e-06, + "loss": 0.1376, + "step": 131 + }, + { + "epoch": 0.5376782077393075, + "grad_norm": 0.8770993451067021, + "learning_rate": 9.81122654183878e-06, + "loss": 0.1075, + "step": 132 + }, + { + "epoch": 0.5417515274949084, + "grad_norm": 0.8112662923925413, + "learning_rate": 9.804704069243389e-06, + "loss": 0.1149, + "step": 133 + }, + { + "epoch": 0.5458248472505092, + "grad_norm": 0.7783508225595258, + "learning_rate": 9.798073063436815e-06, + "loss": 0.1077, + "step": 134 + }, + { + "epoch": 0.5498981670061099, + "grad_norm": 1.6671316247114485, + "learning_rate": 9.791333674206507e-06, + "loss": 0.1892, + "step": 135 + }, + { + "epoch": 0.5539714867617108, + "grad_norm": 0.8856245620297392, + "learning_rate": 9.784486053788179e-06, + "loss": 0.1075, + "step": 136 + }, + { + "epoch": 0.5580448065173116, + "grad_norm": 2.0578900491298824, + "learning_rate": 9.77753035686237e-06, + "loss": 0.1472, + "step": 137 + }, + { + "epoch": 0.5621181262729125, + "grad_norm": 1.148525636808097, + "learning_rate": 9.770466740550963e-06, + "loss": 0.1598, + "step": 138 + }, + { + "epoch": 0.5661914460285132, + "grad_norm": 0.8665254831769179, + "learning_rate": 9.763295364413616e-06, + "loss": 0.1186, + "step": 139 + }, + { + "epoch": 0.570264765784114, + "grad_norm": 1.0970826186220186, + "learning_rate": 9.756016390444174e-06, + "loss": 0.1386, + "step": 140 + }, + { + "epoch": 0.5743380855397149, + "grad_norm": 0.9530034310899396, + "learning_rate": 9.748629983067004e-06, + "loss": 0.1282, + "step": 141 + }, + { + "epoch": 0.5784114052953157, + "grad_norm": 1.2706893271757027, + "learning_rate": 9.741136309133279e-06, + "loss": 0.1754, + "step": 142 + }, + { + "epoch": 0.5824847250509165, + "grad_norm": 0.9703463762849697, + "learning_rate": 9.733535537917211e-06, + "loss": 0.1194, + "step": 143 + }, + { + "epoch": 0.5865580448065173, + "grad_norm": 0.8038414888371753, + "learning_rate": 9.725827841112226e-06, + "loss": 0.1162, + "step": 144 + }, + { + "epoch": 0.5906313645621182, + "grad_norm": 0.9411283645508486, + "learning_rate": 9.718013392827087e-06, + "loss": 0.1121, + "step": 145 + }, + { + "epoch": 0.594704684317719, + "grad_norm": 1.501666156048829, + "learning_rate": 9.710092369581966e-06, + "loss": 0.16, + "step": 146 + }, + { + "epoch": 0.5987780040733197, + "grad_norm": 0.9141719119872903, + "learning_rate": 9.702064950304442e-06, + "loss": 0.1211, + "step": 147 + }, + { + "epoch": 0.6028513238289206, + "grad_norm": 0.8652675727574004, + "learning_rate": 9.693931316325473e-06, + "loss": 0.0946, + "step": 148 + }, + { + "epoch": 0.6069246435845214, + "grad_norm": 0.7377787499846402, + "learning_rate": 9.685691651375297e-06, + "loss": 0.1016, + "step": 149 + }, + { + "epoch": 0.6109979633401222, + "grad_norm": 0.7630312206018969, + "learning_rate": 9.677346141579277e-06, + "loss": 0.1014, + "step": 150 + }, + { + "epoch": 0.615071283095723, + "grad_norm": 0.9718289359974593, + "learning_rate": 9.668894975453705e-06, + "loss": 0.1562, + "step": 151 + }, + { + "epoch": 0.6191446028513238, + "grad_norm": 1.004301729468449, + "learning_rate": 9.66033834390153e-06, + "loss": 0.1372, + "step": 152 + }, + { + "epoch": 0.6232179226069247, + "grad_norm": 0.9350824611493259, + "learning_rate": 9.65167644020806e-06, + "loss": 0.1254, + "step": 153 + }, + { + "epoch": 0.6272912423625254, + "grad_norm": 0.7612329276402703, + "learning_rate": 9.64290946003659e-06, + "loss": 0.0989, + "step": 154 + }, + { + "epoch": 0.6313645621181263, + "grad_norm": 0.7706614538086551, + "learning_rate": 9.63403760142398e-06, + "loss": 0.1013, + "step": 155 + }, + { + "epoch": 0.6354378818737271, + "grad_norm": 1.0210499034582712, + "learning_rate": 9.625061064776183e-06, + "loss": 0.1134, + "step": 156 + }, + { + "epoch": 0.639511201629328, + "grad_norm": 0.7560805642981956, + "learning_rate": 9.61598005286372e-06, + "loss": 0.0939, + "step": 157 + }, + { + "epoch": 0.6435845213849287, + "grad_norm": 1.0834289937869723, + "learning_rate": 9.606794770817102e-06, + "loss": 0.1785, + "step": 158 + }, + { + "epoch": 0.6476578411405295, + "grad_norm": 1.0611196002268826, + "learning_rate": 9.597505426122184e-06, + "loss": 0.1571, + "step": 159 + }, + { + "epoch": 0.6517311608961304, + "grad_norm": 1.0914261737532949, + "learning_rate": 9.588112228615495e-06, + "loss": 0.1745, + "step": 160 + }, + { + "epoch": 0.6558044806517311, + "grad_norm": 0.953948451978483, + "learning_rate": 9.57861539047949e-06, + "loss": 0.1353, + "step": 161 + }, + { + "epoch": 0.659877800407332, + "grad_norm": 1.2562247665468482, + "learning_rate": 9.569015126237744e-06, + "loss": 0.1521, + "step": 162 + }, + { + "epoch": 0.6639511201629328, + "grad_norm": 0.8283783602425362, + "learning_rate": 9.559311652750135e-06, + "loss": 0.1161, + "step": 163 + }, + { + "epoch": 0.6680244399185336, + "grad_norm": 0.7823509791751794, + "learning_rate": 9.549505189207924e-06, + "loss": 0.0976, + "step": 164 + }, + { + "epoch": 0.6720977596741344, + "grad_norm": 1.118258806444578, + "learning_rate": 9.539595957128803e-06, + "loss": 0.171, + "step": 165 + }, + { + "epoch": 0.6761710794297352, + "grad_norm": 0.7563799438807557, + "learning_rate": 9.529584180351902e-06, + "loss": 0.1159, + "step": 166 + }, + { + "epoch": 0.6802443991853361, + "grad_norm": 1.0059732424782886, + "learning_rate": 9.519470085032733e-06, + "loss": 0.1278, + "step": 167 + }, + { + "epoch": 0.6843177189409368, + "grad_norm": 0.8261325503708756, + "learning_rate": 9.509253899638066e-06, + "loss": 0.104, + "step": 168 + }, + { + "epoch": 0.6883910386965377, + "grad_norm": 1.1918252125330613, + "learning_rate": 9.498935854940785e-06, + "loss": 0.1682, + "step": 169 + }, + { + "epoch": 0.6924643584521385, + "grad_norm": 0.7216709177105455, + "learning_rate": 9.488516184014667e-06, + "loss": 0.1089, + "step": 170 + }, + { + "epoch": 0.6965376782077393, + "grad_norm": 0.8952054280934858, + "learning_rate": 9.477995122229117e-06, + "loss": 0.1521, + "step": 171 + }, + { + "epoch": 0.7006109979633401, + "grad_norm": 0.6538828419017942, + "learning_rate": 9.467372907243858e-06, + "loss": 0.1012, + "step": 172 + }, + { + "epoch": 0.7046843177189409, + "grad_norm": 0.840723056036209, + "learning_rate": 9.456649779003548e-06, + "loss": 0.117, + "step": 173 + }, + { + "epoch": 0.7087576374745418, + "grad_norm": 0.7652580794490056, + "learning_rate": 9.44582597973238e-06, + "loss": 0.1284, + "step": 174 + }, + { + "epoch": 0.7128309572301426, + "grad_norm": 0.9696904154678632, + "learning_rate": 9.434901753928593e-06, + "loss": 0.1429, + "step": 175 + }, + { + "epoch": 0.7169042769857433, + "grad_norm": 0.7509027450046076, + "learning_rate": 9.423877348358956e-06, + "loss": 0.1006, + "step": 176 + }, + { + "epoch": 0.7209775967413442, + "grad_norm": 0.6942112976471692, + "learning_rate": 9.4127530120532e-06, + "loss": 0.1042, + "step": 177 + }, + { + "epoch": 0.725050916496945, + "grad_norm": 1.4641902043350905, + "learning_rate": 9.401528996298375e-06, + "loss": 0.1676, + "step": 178 + }, + { + "epoch": 0.7291242362525459, + "grad_norm": 0.7418396518869238, + "learning_rate": 9.390205554633193e-06, + "loss": 0.1082, + "step": 179 + }, + { + "epoch": 0.7331975560081466, + "grad_norm": 1.2074617530849705, + "learning_rate": 9.378782942842292e-06, + "loss": 0.1401, + "step": 180 + }, + { + "epoch": 0.7372708757637475, + "grad_norm": 1.2938802390610347, + "learning_rate": 9.367261418950459e-06, + "loss": 0.1855, + "step": 181 + }, + { + "epoch": 0.7413441955193483, + "grad_norm": 1.225757248706894, + "learning_rate": 9.355641243216798e-06, + "loss": 0.1729, + "step": 182 + }, + { + "epoch": 0.745417515274949, + "grad_norm": 1.1483380054973364, + "learning_rate": 9.343922678128854e-06, + "loss": 0.1078, + "step": 183 + }, + { + "epoch": 0.7494908350305499, + "grad_norm": 0.8222440765781929, + "learning_rate": 9.332105988396692e-06, + "loss": 0.1239, + "step": 184 + }, + { + "epoch": 0.7535641547861507, + "grad_norm": 0.9655962832595171, + "learning_rate": 9.3201914409469e-06, + "loss": 0.1309, + "step": 185 + }, + { + "epoch": 0.7576374745417516, + "grad_norm": 0.8060791719318856, + "learning_rate": 9.308179304916573e-06, + "loss": 0.1159, + "step": 186 + }, + { + "epoch": 0.7617107942973523, + "grad_norm": 0.7357782726661909, + "learning_rate": 9.29606985164723e-06, + "loss": 0.1052, + "step": 187 + }, + { + "epoch": 0.7657841140529531, + "grad_norm": 0.9536045205176826, + "learning_rate": 9.283863354678683e-06, + "loss": 0.1351, + "step": 188 + }, + { + "epoch": 0.769857433808554, + "grad_norm": 0.8771938059672718, + "learning_rate": 9.27156008974286e-06, + "loss": 0.1304, + "step": 189 + }, + { + "epoch": 0.7739307535641547, + "grad_norm": 0.7232888469506753, + "learning_rate": 9.259160334757575e-06, + "loss": 0.1054, + "step": 190 + }, + { + "epoch": 0.7780040733197556, + "grad_norm": 0.8295211262810136, + "learning_rate": 9.246664369820249e-06, + "loss": 0.1323, + "step": 191 + }, + { + "epoch": 0.7820773930753564, + "grad_norm": 1.546126242212441, + "learning_rate": 9.234072477201588e-06, + "loss": 0.2385, + "step": 192 + }, + { + "epoch": 0.7861507128309573, + "grad_norm": 1.3189210288828541, + "learning_rate": 9.2213849413392e-06, + "loss": 0.1312, + "step": 193 + }, + { + "epoch": 0.790224032586558, + "grad_norm": 0.6640416710388396, + "learning_rate": 9.208602048831176e-06, + "loss": 0.1032, + "step": 194 + }, + { + "epoch": 0.7942973523421588, + "grad_norm": 0.7975892776697048, + "learning_rate": 9.195724088429611e-06, + "loss": 0.1089, + "step": 195 + }, + { + "epoch": 0.7983706720977597, + "grad_norm": 0.706905690575772, + "learning_rate": 9.18275135103409e-06, + "loss": 0.1166, + "step": 196 + }, + { + "epoch": 0.8024439918533605, + "grad_norm": 0.8769448196441653, + "learning_rate": 9.169684129685099e-06, + "loss": 0.1317, + "step": 197 + }, + { + "epoch": 0.8065173116089613, + "grad_norm": 1.3681899543939136, + "learning_rate": 9.156522719557428e-06, + "loss": 0.1892, + "step": 198 + }, + { + "epoch": 0.8105906313645621, + "grad_norm": 1.0165895452906009, + "learning_rate": 9.143267417953486e-06, + "loss": 0.1526, + "step": 199 + }, + { + "epoch": 0.814663951120163, + "grad_norm": 0.9252869599364745, + "learning_rate": 9.129918524296596e-06, + "loss": 0.1791, + "step": 200 + }, + { + "epoch": 0.8187372708757638, + "grad_norm": 0.7566289195807724, + "learning_rate": 9.11647634012422e-06, + "loss": 0.1018, + "step": 201 + }, + { + "epoch": 0.8228105906313645, + "grad_norm": 0.7097020344942068, + "learning_rate": 9.102941169081167e-06, + "loss": 0.1174, + "step": 202 + }, + { + "epoch": 0.8268839103869654, + "grad_norm": 0.8335131746923946, + "learning_rate": 9.089313316912708e-06, + "loss": 0.14, + "step": 203 + }, + { + "epoch": 0.8309572301425662, + "grad_norm": 0.7934600650652943, + "learning_rate": 9.075593091457692e-06, + "loss": 0.1208, + "step": 204 + }, + { + "epoch": 0.835030549898167, + "grad_norm": 0.7614374059129773, + "learning_rate": 9.061780802641582e-06, + "loss": 0.1166, + "step": 205 + }, + { + "epoch": 0.8391038696537678, + "grad_norm": 0.7158974362347166, + "learning_rate": 9.047876762469451e-06, + "loss": 0.1046, + "step": 206 + }, + { + "epoch": 0.8431771894093686, + "grad_norm": 0.676023527010282, + "learning_rate": 9.033881285018945e-06, + "loss": 0.1049, + "step": 207 + }, + { + "epoch": 0.8472505091649695, + "grad_norm": 1.0542817712970116, + "learning_rate": 9.019794686433174e-06, + "loss": 0.1605, + "step": 208 + }, + { + "epoch": 0.8513238289205702, + "grad_norm": 0.791238316768574, + "learning_rate": 9.005617284913586e-06, + "loss": 0.1008, + "step": 209 + }, + { + "epoch": 0.8553971486761711, + "grad_norm": 1.3679274286147247, + "learning_rate": 8.991349400712772e-06, + "loss": 0.1174, + "step": 210 + }, + { + "epoch": 0.8594704684317719, + "grad_norm": 0.8904165376343479, + "learning_rate": 8.976991356127225e-06, + "loss": 0.1252, + "step": 211 + }, + { + "epoch": 0.8635437881873728, + "grad_norm": 0.6365058101639782, + "learning_rate": 8.962543475490068e-06, + "loss": 0.1054, + "step": 212 + }, + { + "epoch": 0.8676171079429735, + "grad_norm": 0.6899915324730952, + "learning_rate": 8.948006085163735e-06, + "loss": 0.1059, + "step": 213 + }, + { + "epoch": 0.8716904276985743, + "grad_norm": 0.7033665303348221, + "learning_rate": 8.933379513532575e-06, + "loss": 0.1055, + "step": 214 + }, + { + "epoch": 0.8757637474541752, + "grad_norm": 0.7051229848942461, + "learning_rate": 8.91866409099546e-06, + "loss": 0.1047, + "step": 215 + }, + { + "epoch": 0.879837067209776, + "grad_norm": 0.7365152922519815, + "learning_rate": 8.903860149958308e-06, + "loss": 0.1028, + "step": 216 + }, + { + "epoch": 0.8839103869653768, + "grad_norm": 0.8798834115379963, + "learning_rate": 8.888968024826575e-06, + "loss": 0.131, + "step": 217 + }, + { + "epoch": 0.8879837067209776, + "grad_norm": 0.8127281754244611, + "learning_rate": 8.873988051997702e-06, + "loss": 0.1014, + "step": 218 + }, + { + "epoch": 0.8920570264765784, + "grad_norm": 0.841292566312256, + "learning_rate": 8.85892056985352e-06, + "loss": 0.1335, + "step": 219 + }, + { + "epoch": 0.8961303462321792, + "grad_norm": 1.3435689868107352, + "learning_rate": 8.8437659187526e-06, + "loss": 0.2286, + "step": 220 + }, + { + "epoch": 0.90020366598778, + "grad_norm": 1.8444300521677208, + "learning_rate": 8.828524441022575e-06, + "loss": 0.1827, + "step": 221 + }, + { + "epoch": 0.9042769857433809, + "grad_norm": 0.7545922474592645, + "learning_rate": 8.813196480952393e-06, + "loss": 0.1027, + "step": 222 + }, + { + "epoch": 0.9083503054989817, + "grad_norm": 0.75537983489465, + "learning_rate": 8.797782384784549e-06, + "loss": 0.1198, + "step": 223 + }, + { + "epoch": 0.9124236252545825, + "grad_norm": 0.8104999041705286, + "learning_rate": 8.782282500707262e-06, + "loss": 0.1029, + "step": 224 + }, + { + "epoch": 0.9164969450101833, + "grad_norm": 0.8405282400775482, + "learning_rate": 8.766697178846611e-06, + "loss": 0.1241, + "step": 225 + }, + { + "epoch": 0.9205702647657841, + "grad_norm": 1.013551552697806, + "learning_rate": 8.751026771258622e-06, + "loss": 0.1343, + "step": 226 + }, + { + "epoch": 0.924643584521385, + "grad_norm": 0.6728989996123187, + "learning_rate": 8.735271631921322e-06, + "loss": 0.1058, + "step": 227 + }, + { + "epoch": 0.9287169042769857, + "grad_norm": 0.8690442261224494, + "learning_rate": 8.719432116726738e-06, + "loss": 0.1332, + "step": 228 + }, + { + "epoch": 0.9327902240325866, + "grad_norm": 0.9449187305589617, + "learning_rate": 8.703508583472855e-06, + "loss": 0.1451, + "step": 229 + }, + { + "epoch": 0.9368635437881874, + "grad_norm": 0.8067318337898685, + "learning_rate": 8.68750139185554e-06, + "loss": 0.1248, + "step": 230 + }, + { + "epoch": 0.9409368635437881, + "grad_norm": 0.7905017587261095, + "learning_rate": 8.671410903460416e-06, + "loss": 0.119, + "step": 231 + }, + { + "epoch": 0.945010183299389, + "grad_norm": 1.1238154965476772, + "learning_rate": 8.65523748175469e-06, + "loss": 0.1559, + "step": 232 + }, + { + "epoch": 0.9490835030549898, + "grad_norm": 1.1027211644152675, + "learning_rate": 8.63898149207895e-06, + "loss": 0.1693, + "step": 233 + }, + { + "epoch": 0.9531568228105907, + "grad_norm": 0.9411765578825619, + "learning_rate": 8.622643301638902e-06, + "loss": 0.1346, + "step": 234 + }, + { + "epoch": 0.9572301425661914, + "grad_norm": 0.6884466751221227, + "learning_rate": 8.606223279497081e-06, + "loss": 0.0968, + "step": 235 + }, + { + "epoch": 0.9613034623217923, + "grad_norm": 0.7219918781543078, + "learning_rate": 8.589721796564521e-06, + "loss": 0.0966, + "step": 236 + }, + { + "epoch": 0.9653767820773931, + "grad_norm": 0.7967809896092082, + "learning_rate": 8.57313922559236e-06, + "loss": 0.1201, + "step": 237 + }, + { + "epoch": 0.9694501018329938, + "grad_norm": 0.8113807921190012, + "learning_rate": 8.556475941163436e-06, + "loss": 0.1097, + "step": 238 + }, + { + "epoch": 0.9735234215885947, + "grad_norm": 1.0943551126152973, + "learning_rate": 8.539732319683817e-06, + "loss": 0.1552, + "step": 239 + }, + { + "epoch": 0.9775967413441955, + "grad_norm": 0.7854046329247982, + "learning_rate": 8.5229087393743e-06, + "loss": 0.1138, + "step": 240 + }, + { + "epoch": 0.9816700610997964, + "grad_norm": 1.1720562073286809, + "learning_rate": 8.506005580261872e-06, + "loss": 0.1525, + "step": 241 + }, + { + "epoch": 0.9857433808553971, + "grad_norm": 0.718895289386658, + "learning_rate": 8.489023224171114e-06, + "loss": 0.1082, + "step": 242 + }, + { + "epoch": 0.9898167006109979, + "grad_norm": 0.613834884154541, + "learning_rate": 8.47196205471559e-06, + "loss": 0.0877, + "step": 243 + }, + { + "epoch": 0.9938900203665988, + "grad_norm": 0.9789990123927295, + "learning_rate": 8.45482245728917e-06, + "loss": 0.1675, + "step": 244 + }, + { + "epoch": 0.9979633401221996, + "grad_norm": 1.5580291175140415, + "learning_rate": 8.437604819057336e-06, + "loss": 0.15, + "step": 245 + }, + { + "epoch": 1.0020366598778003, + "grad_norm": 0.7685763736473359, + "learning_rate": 8.420309528948422e-06, + "loss": 0.1072, + "step": 246 + }, + { + "epoch": 1.0061099796334012, + "grad_norm": 0.6434124354999965, + "learning_rate": 8.40293697764484e-06, + "loss": 0.0844, + "step": 247 + }, + { + "epoch": 1.010183299389002, + "grad_norm": 0.5841852692369695, + "learning_rate": 8.385487557574253e-06, + "loss": 0.0859, + "step": 248 + }, + { + "epoch": 1.0142566191446027, + "grad_norm": 0.6061435282600086, + "learning_rate": 8.367961662900704e-06, + "loss": 0.0809, + "step": 249 + }, + { + "epoch": 1.0183299389002036, + "grad_norm": 0.8866327026089017, + "learning_rate": 8.35035968951572e-06, + "loss": 0.0996, + "step": 250 + }, + { + "epoch": 1.0224032586558045, + "grad_norm": 0.789311514275454, + "learning_rate": 8.33268203502937e-06, + "loss": 0.0999, + "step": 251 + }, + { + "epoch": 1.0264765784114054, + "grad_norm": 0.7470915493623619, + "learning_rate": 8.314929098761268e-06, + "loss": 0.0836, + "step": 252 + }, + { + "epoch": 1.030549898167006, + "grad_norm": 0.7275329446393578, + "learning_rate": 8.297101281731576e-06, + "loss": 0.0866, + "step": 253 + }, + { + "epoch": 1.034623217922607, + "grad_norm": 0.7227258514093932, + "learning_rate": 8.279198986651925e-06, + "loss": 0.0901, + "step": 254 + }, + { + "epoch": 1.0386965376782078, + "grad_norm": 0.6146754288814568, + "learning_rate": 8.261222617916335e-06, + "loss": 0.0789, + "step": 255 + }, + { + "epoch": 1.0427698574338085, + "grad_norm": 0.8514917967475527, + "learning_rate": 8.243172581592066e-06, + "loss": 0.1017, + "step": 256 + }, + { + "epoch": 1.0468431771894093, + "grad_norm": 0.7579530053794002, + "learning_rate": 8.22504928541045e-06, + "loss": 0.085, + "step": 257 + }, + { + "epoch": 1.0509164969450102, + "grad_norm": 0.6252945360785674, + "learning_rate": 8.206853138757687e-06, + "loss": 0.0777, + "step": 258 + }, + { + "epoch": 1.054989816700611, + "grad_norm": 0.7706416070190195, + "learning_rate": 8.188584552665592e-06, + "loss": 0.0833, + "step": 259 + }, + { + "epoch": 1.0590631364562118, + "grad_norm": 0.7980656602534597, + "learning_rate": 8.17024393980231e-06, + "loss": 0.0968, + "step": 260 + }, + { + "epoch": 1.0631364562118126, + "grad_norm": 0.6040157250127779, + "learning_rate": 8.15183171446299e-06, + "loss": 0.0632, + "step": 261 + }, + { + "epoch": 1.0672097759674135, + "grad_norm": 0.6247841753185668, + "learning_rate": 8.133348292560442e-06, + "loss": 0.073, + "step": 262 + }, + { + "epoch": 1.0712830957230142, + "grad_norm": 0.9705375898534241, + "learning_rate": 8.114794091615718e-06, + "loss": 0.1158, + "step": 263 + }, + { + "epoch": 1.075356415478615, + "grad_norm": 0.5868673627641846, + "learning_rate": 8.096169530748708e-06, + "loss": 0.0616, + "step": 264 + }, + { + "epoch": 1.079429735234216, + "grad_norm": 0.6511676677923491, + "learning_rate": 8.077475030668647e-06, + "loss": 0.0924, + "step": 265 + }, + { + "epoch": 1.0835030549898166, + "grad_norm": 0.8628274720451822, + "learning_rate": 8.058711013664633e-06, + "loss": 0.0841, + "step": 266 + }, + { + "epoch": 1.0875763747454175, + "grad_norm": 0.6366922861725464, + "learning_rate": 8.039877903596069e-06, + "loss": 0.0781, + "step": 267 + }, + { + "epoch": 1.0916496945010183, + "grad_norm": 0.6679013936452773, + "learning_rate": 8.020976125883105e-06, + "loss": 0.074, + "step": 268 + }, + { + "epoch": 1.0957230142566192, + "grad_norm": 0.7115218188251841, + "learning_rate": 8.002006107497018e-06, + "loss": 0.0909, + "step": 269 + }, + { + "epoch": 1.0997963340122199, + "grad_norm": 0.6044721294592456, + "learning_rate": 7.982968276950568e-06, + "loss": 0.0682, + "step": 270 + }, + { + "epoch": 1.1038696537678208, + "grad_norm": 1.1198941616515987, + "learning_rate": 7.963863064288326e-06, + "loss": 0.1067, + "step": 271 + }, + { + "epoch": 1.1079429735234216, + "grad_norm": 0.5209754431453612, + "learning_rate": 7.944690901076949e-06, + "loss": 0.0743, + "step": 272 + }, + { + "epoch": 1.1120162932790225, + "grad_norm": 0.7595155294604947, + "learning_rate": 7.925452220395436e-06, + "loss": 0.0903, + "step": 273 + }, + { + "epoch": 1.1160896130346232, + "grad_norm": 0.692986942378061, + "learning_rate": 7.906147456825349e-06, + "loss": 0.0835, + "step": 274 + }, + { + "epoch": 1.120162932790224, + "grad_norm": 0.6090247595888676, + "learning_rate": 7.886777046440993e-06, + "loss": 0.0739, + "step": 275 + }, + { + "epoch": 1.124236252545825, + "grad_norm": 1.042879657433014, + "learning_rate": 7.867341426799562e-06, + "loss": 0.1219, + "step": 276 + }, + { + "epoch": 1.1283095723014256, + "grad_norm": 0.7996350912985092, + "learning_rate": 7.847841036931263e-06, + "loss": 0.0913, + "step": 277 + }, + { + "epoch": 1.1323828920570265, + "grad_norm": 0.5332223023774978, + "learning_rate": 7.828276317329388e-06, + "loss": 0.0757, + "step": 278 + }, + { + "epoch": 1.1364562118126273, + "grad_norm": 0.5635521535562537, + "learning_rate": 7.80864770994038e-06, + "loss": 0.0641, + "step": 279 + }, + { + "epoch": 1.140529531568228, + "grad_norm": 0.6987390220817623, + "learning_rate": 7.788955658153829e-06, + "loss": 0.0821, + "step": 280 + }, + { + "epoch": 1.1446028513238289, + "grad_norm": 0.6962375028972171, + "learning_rate": 7.769200606792476e-06, + "loss": 0.0686, + "step": 281 + }, + { + "epoch": 1.1486761710794298, + "grad_norm": 1.3654151723987282, + "learning_rate": 7.749383002102147e-06, + "loss": 0.0959, + "step": 282 + }, + { + "epoch": 1.1527494908350306, + "grad_norm": 0.8307371553295552, + "learning_rate": 7.72950329174169e-06, + "loss": 0.0859, + "step": 283 + }, + { + "epoch": 1.1568228105906313, + "grad_norm": 0.9349410963502993, + "learning_rate": 7.709561924772855e-06, + "loss": 0.0874, + "step": 284 + }, + { + "epoch": 1.1608961303462322, + "grad_norm": 1.1074351510282887, + "learning_rate": 7.689559351650142e-06, + "loss": 0.1012, + "step": 285 + }, + { + "epoch": 1.164969450101833, + "grad_norm": 0.9416925430977862, + "learning_rate": 7.66949602421064e-06, + "loss": 0.0923, + "step": 286 + }, + { + "epoch": 1.1690427698574337, + "grad_norm": 0.6905374196650557, + "learning_rate": 7.649372395663816e-06, + "loss": 0.0676, + "step": 287 + }, + { + "epoch": 1.1731160896130346, + "grad_norm": 0.7723265248527, + "learning_rate": 7.629188920581267e-06, + "loss": 0.0812, + "step": 288 + }, + { + "epoch": 1.1771894093686355, + "grad_norm": 0.815712697347911, + "learning_rate": 7.608946054886468e-06, + "loss": 0.0755, + "step": 289 + }, + { + "epoch": 1.1812627291242364, + "grad_norm": 0.8142685931991146, + "learning_rate": 7.588644255844464e-06, + "loss": 0.097, + "step": 290 + }, + { + "epoch": 1.185336048879837, + "grad_norm": 0.6448741497851783, + "learning_rate": 7.568283982051538e-06, + "loss": 0.0719, + "step": 291 + }, + { + "epoch": 1.189409368635438, + "grad_norm": 0.5839738850266732, + "learning_rate": 7.5478656934248626e-06, + "loss": 0.0715, + "step": 292 + }, + { + "epoch": 1.1934826883910388, + "grad_norm": 0.6469439075383099, + "learning_rate": 7.527389851192099e-06, + "loss": 0.0745, + "step": 293 + }, + { + "epoch": 1.1975560081466394, + "grad_norm": 0.7083801028247904, + "learning_rate": 7.506856917880989e-06, + "loss": 0.077, + "step": 294 + }, + { + "epoch": 1.2016293279022403, + "grad_norm": 0.5584165288788803, + "learning_rate": 7.486267357308896e-06, + "loss": 0.0757, + "step": 295 + }, + { + "epoch": 1.2057026476578412, + "grad_norm": 0.7093198997979762, + "learning_rate": 7.465621634572336e-06, + "loss": 0.0821, + "step": 296 + }, + { + "epoch": 1.2097759674134418, + "grad_norm": 0.8170300802168915, + "learning_rate": 7.444920216036473e-06, + "loss": 0.0857, + "step": 297 + }, + { + "epoch": 1.2138492871690427, + "grad_norm": 0.6052440895883001, + "learning_rate": 7.4241635693245766e-06, + "loss": 0.0706, + "step": 298 + }, + { + "epoch": 1.2179226069246436, + "grad_norm": 0.5358799947933865, + "learning_rate": 7.40335216330746e-06, + "loss": 0.0698, + "step": 299 + }, + { + "epoch": 1.2219959266802445, + "grad_norm": 0.57379599749216, + "learning_rate": 7.382486468092899e-06, + "loss": 0.0778, + "step": 300 + }, + { + "epoch": 1.2260692464358451, + "grad_norm": 1.9428087979785686, + "learning_rate": 7.361566955014999e-06, + "loss": 0.0881, + "step": 301 + }, + { + "epoch": 1.230142566191446, + "grad_norm": 0.8052892598402892, + "learning_rate": 7.340594096623559e-06, + "loss": 0.0807, + "step": 302 + }, + { + "epoch": 1.234215885947047, + "grad_norm": 0.5697887043869344, + "learning_rate": 7.319568366673389e-06, + "loss": 0.0761, + "step": 303 + }, + { + "epoch": 1.2382892057026478, + "grad_norm": 0.8103909158109461, + "learning_rate": 7.2984902401136115e-06, + "loss": 0.084, + "step": 304 + }, + { + "epoch": 1.2423625254582484, + "grad_norm": 0.7842393785164873, + "learning_rate": 7.277360193076936e-06, + "loss": 0.0762, + "step": 305 + }, + { + "epoch": 1.2464358452138493, + "grad_norm": 0.5722519256754022, + "learning_rate": 7.256178702868899e-06, + "loss": 0.0723, + "step": 306 + }, + { + "epoch": 1.2505091649694502, + "grad_norm": 0.5772908994522038, + "learning_rate": 7.234946247957087e-06, + "loss": 0.0809, + "step": 307 + }, + { + "epoch": 1.2545824847250509, + "grad_norm": 0.7436038482880873, + "learning_rate": 7.213663307960321e-06, + "loss": 0.0822, + "step": 308 + }, + { + "epoch": 1.2586558044806517, + "grad_norm": 0.9056276676473372, + "learning_rate": 7.192330363637832e-06, + "loss": 0.1005, + "step": 309 + }, + { + "epoch": 1.2627291242362526, + "grad_norm": 0.6358405282433213, + "learning_rate": 7.170947896878392e-06, + "loss": 0.0737, + "step": 310 + }, + { + "epoch": 1.2668024439918533, + "grad_norm": 0.5954677442268389, + "learning_rate": 7.149516390689433e-06, + "loss": 0.0694, + "step": 311 + }, + { + "epoch": 1.2708757637474541, + "grad_norm": 0.6088987642638558, + "learning_rate": 7.12803632918614e-06, + "loss": 0.0712, + "step": 312 + }, + { + "epoch": 1.274949083503055, + "grad_norm": 0.6327020826012894, + "learning_rate": 7.1065081975805086e-06, + "loss": 0.0709, + "step": 313 + }, + { + "epoch": 1.2790224032586557, + "grad_norm": 0.8052023527305602, + "learning_rate": 7.084932482170385e-06, + "loss": 0.0884, + "step": 314 + }, + { + "epoch": 1.2830957230142566, + "grad_norm": 0.6196057921350728, + "learning_rate": 7.063309670328491e-06, + "loss": 0.0791, + "step": 315 + }, + { + "epoch": 1.2871690427698574, + "grad_norm": 0.6874689974891509, + "learning_rate": 7.041640250491398e-06, + "loss": 0.0725, + "step": 316 + }, + { + "epoch": 1.2912423625254583, + "grad_norm": 0.5750839664752618, + "learning_rate": 7.019924712148511e-06, + "loss": 0.0676, + "step": 317 + }, + { + "epoch": 1.2953156822810592, + "grad_norm": 0.7742197007819996, + "learning_rate": 6.998163545830998e-06, + "loss": 0.0734, + "step": 318 + }, + { + "epoch": 1.2993890020366599, + "grad_norm": 0.6195282494860945, + "learning_rate": 6.976357243100718e-06, + "loss": 0.0775, + "step": 319 + }, + { + "epoch": 1.3034623217922607, + "grad_norm": 0.6875577568472137, + "learning_rate": 6.954506296539112e-06, + "loss": 0.0878, + "step": 320 + }, + { + "epoch": 1.3075356415478616, + "grad_norm": 0.7320126566140536, + "learning_rate": 6.9326111997360775e-06, + "loss": 0.081, + "step": 321 + }, + { + "epoch": 1.3116089613034623, + "grad_norm": 0.843149661393896, + "learning_rate": 6.910672447278827e-06, + "loss": 0.0918, + "step": 322 + }, + { + "epoch": 1.3156822810590632, + "grad_norm": 0.6249054893219663, + "learning_rate": 6.8886905347406985e-06, + "loss": 0.0859, + "step": 323 + }, + { + "epoch": 1.319755600814664, + "grad_norm": 0.6921145932385908, + "learning_rate": 6.866665958669976e-06, + "loss": 0.0916, + "step": 324 + }, + { + "epoch": 1.3238289205702647, + "grad_norm": 0.5550915898105676, + "learning_rate": 6.844599216578667e-06, + "loss": 0.0755, + "step": 325 + }, + { + "epoch": 1.3279022403258656, + "grad_norm": 0.8923390816851975, + "learning_rate": 6.822490806931262e-06, + "loss": 0.0903, + "step": 326 + }, + { + "epoch": 1.3319755600814664, + "grad_norm": 0.6246266762679324, + "learning_rate": 6.800341229133486e-06, + "loss": 0.0833, + "step": 327 + }, + { + "epoch": 1.336048879837067, + "grad_norm": 0.8145107114030108, + "learning_rate": 6.778150983520999e-06, + "loss": 0.1131, + "step": 328 + }, + { + "epoch": 1.340122199592668, + "grad_norm": 0.6593824730687659, + "learning_rate": 6.755920571348111e-06, + "loss": 0.0783, + "step": 329 + }, + { + "epoch": 1.3441955193482689, + "grad_norm": 0.7871250490461288, + "learning_rate": 6.73365049477645e-06, + "loss": 0.0791, + "step": 330 + }, + { + "epoch": 1.3482688391038695, + "grad_norm": 1.5668561976011524, + "learning_rate": 6.711341256863623e-06, + "loss": 0.116, + "step": 331 + }, + { + "epoch": 1.3523421588594704, + "grad_norm": 0.6238740583787221, + "learning_rate": 6.688993361551847e-06, + "loss": 0.079, + "step": 332 + }, + { + "epoch": 1.3564154786150713, + "grad_norm": 0.7619408457347728, + "learning_rate": 6.66660731365657e-06, + "loss": 0.0876, + "step": 333 + }, + { + "epoch": 1.3604887983706722, + "grad_norm": 0.5260527244857047, + "learning_rate": 6.64418361885507e-06, + "loss": 0.08, + "step": 334 + }, + { + "epoch": 1.364562118126273, + "grad_norm": 0.7273285421050715, + "learning_rate": 6.621722783675024e-06, + "loss": 0.0803, + "step": 335 + }, + { + "epoch": 1.3686354378818737, + "grad_norm": 0.5976232642003504, + "learning_rate": 6.599225315483076e-06, + "loss": 0.0773, + "step": 336 + }, + { + "epoch": 1.3727087576374746, + "grad_norm": 0.5354838304438007, + "learning_rate": 6.576691722473368e-06, + "loss": 0.0699, + "step": 337 + }, + { + "epoch": 1.3767820773930755, + "grad_norm": 0.7164176676979538, + "learning_rate": 6.554122513656065e-06, + "loss": 0.0898, + "step": 338 + }, + { + "epoch": 1.3808553971486761, + "grad_norm": 0.694148331721139, + "learning_rate": 6.531518198845854e-06, + "loss": 0.0796, + "step": 339 + }, + { + "epoch": 1.384928716904277, + "grad_norm": 0.8668103406807447, + "learning_rate": 6.508879288650431e-06, + "loss": 0.0864, + "step": 340 + }, + { + "epoch": 1.3890020366598779, + "grad_norm": 0.905281439053181, + "learning_rate": 6.486206294458966e-06, + "loss": 0.1032, + "step": 341 + }, + { + "epoch": 1.3930753564154785, + "grad_norm": 0.6090466800578089, + "learning_rate": 6.463499728430549e-06, + "loss": 0.0739, + "step": 342 + }, + { + "epoch": 1.3971486761710794, + "grad_norm": 0.870879688990723, + "learning_rate": 6.4407601034826225e-06, + "loss": 0.0911, + "step": 343 + }, + { + "epoch": 1.4012219959266803, + "grad_norm": 0.5520211139425468, + "learning_rate": 6.417987933279397e-06, + "loss": 0.0763, + "step": 344 + }, + { + "epoch": 1.405295315682281, + "grad_norm": 0.567715626333358, + "learning_rate": 6.395183732220242e-06, + "loss": 0.0685, + "step": 345 + }, + { + "epoch": 1.4093686354378818, + "grad_norm": 0.6211209151590639, + "learning_rate": 6.372348015428077e-06, + "loss": 0.0763, + "step": 346 + }, + { + "epoch": 1.4134419551934827, + "grad_norm": 0.6300074755034071, + "learning_rate": 6.349481298737723e-06, + "loss": 0.0811, + "step": 347 + }, + { + "epoch": 1.4175152749490836, + "grad_norm": 0.5995929997342367, + "learning_rate": 6.32658409868426e-06, + "loss": 0.0651, + "step": 348 + }, + { + "epoch": 1.4215885947046842, + "grad_norm": 0.6049082049048853, + "learning_rate": 6.303656932491349e-06, + "loss": 0.0763, + "step": 349 + }, + { + "epoch": 1.4256619144602851, + "grad_norm": 0.9822040378006914, + "learning_rate": 6.280700318059563e-06, + "loss": 0.1054, + "step": 350 + }, + { + "epoch": 1.429735234215886, + "grad_norm": 0.8449953491667241, + "learning_rate": 6.257714773954674e-06, + "loss": 0.102, + "step": 351 + }, + { + "epoch": 1.4338085539714869, + "grad_norm": 0.5999494361508966, + "learning_rate": 6.234700819395946e-06, + "loss": 0.0813, + "step": 352 + }, + { + "epoch": 1.4378818737270875, + "grad_norm": 0.6512734204457252, + "learning_rate": 6.211658974244407e-06, + "loss": 0.0829, + "step": 353 + }, + { + "epoch": 1.4419551934826884, + "grad_norm": 0.616189473053879, + "learning_rate": 6.1885897589911e-06, + "loss": 0.0782, + "step": 354 + }, + { + "epoch": 1.4460285132382893, + "grad_norm": 0.9515071645483372, + "learning_rate": 6.1654936947453355e-06, + "loss": 0.0975, + "step": 355 + }, + { + "epoch": 1.45010183299389, + "grad_norm": 0.6021477200299695, + "learning_rate": 6.142371303222909e-06, + "loss": 0.071, + "step": 356 + }, + { + "epoch": 1.4541751527494908, + "grad_norm": 0.678653399667039, + "learning_rate": 6.119223106734328e-06, + "loss": 0.0812, + "step": 357 + }, + { + "epoch": 1.4582484725050917, + "grad_norm": 0.6291449966527708, + "learning_rate": 6.0960496281729995e-06, + "loss": 0.0689, + "step": 358 + }, + { + "epoch": 1.4623217922606924, + "grad_norm": 0.5932315331898775, + "learning_rate": 6.072851391003432e-06, + "loss": 0.075, + "step": 359 + }, + { + "epoch": 1.4663951120162932, + "grad_norm": 0.6492759903008403, + "learning_rate": 6.0496289192494e-06, + "loss": 0.0851, + "step": 360 + }, + { + "epoch": 1.4704684317718941, + "grad_norm": 0.8227230707733793, + "learning_rate": 6.026382737482116e-06, + "loss": 0.0939, + "step": 361 + }, + { + "epoch": 1.4745417515274948, + "grad_norm": 0.7553836500415501, + "learning_rate": 6.003113370808375e-06, + "loss": 0.089, + "step": 362 + }, + { + "epoch": 1.4786150712830957, + "grad_norm": 0.8341775236686235, + "learning_rate": 5.979821344858695e-06, + "loss": 0.1087, + "step": 363 + }, + { + "epoch": 1.4826883910386965, + "grad_norm": 0.7407455393175063, + "learning_rate": 5.956507185775441e-06, + "loss": 0.0875, + "step": 364 + }, + { + "epoch": 1.4867617107942974, + "grad_norm": 0.6302976593180085, + "learning_rate": 5.933171420200946e-06, + "loss": 0.074, + "step": 365 + }, + { + "epoch": 1.4908350305498983, + "grad_norm": 0.6017971359417691, + "learning_rate": 5.909814575265609e-06, + "loss": 0.0771, + "step": 366 + }, + { + "epoch": 1.494908350305499, + "grad_norm": 0.5327101770243312, + "learning_rate": 5.88643717857599e-06, + "loss": 0.064, + "step": 367 + }, + { + "epoch": 1.4989816700610998, + "grad_norm": 0.5530940520217251, + "learning_rate": 5.863039758202889e-06, + "loss": 0.0732, + "step": 368 + }, + { + "epoch": 1.5030549898167007, + "grad_norm": 0.575697348750185, + "learning_rate": 5.839622842669423e-06, + "loss": 0.0794, + "step": 369 + }, + { + "epoch": 1.5071283095723014, + "grad_norm": 0.7879263375060825, + "learning_rate": 5.816186960939084e-06, + "loss": 0.0873, + "step": 370 + }, + { + "epoch": 1.5112016293279023, + "grad_norm": 0.6477317557359421, + "learning_rate": 5.7927326424037875e-06, + "loss": 0.0808, + "step": 371 + }, + { + "epoch": 1.5152749490835031, + "grad_norm": 0.6424367902201409, + "learning_rate": 5.7692604168719225e-06, + "loss": 0.0743, + "step": 372 + }, + { + "epoch": 1.5193482688391038, + "grad_norm": 0.5205522605427637, + "learning_rate": 5.745770814556373e-06, + "loss": 0.0737, + "step": 373 + }, + { + "epoch": 1.5234215885947047, + "grad_norm": 0.8437328046734178, + "learning_rate": 5.722264366062549e-06, + "loss": 0.1075, + "step": 374 + }, + { + "epoch": 1.5274949083503055, + "grad_norm": 0.705710140903758, + "learning_rate": 5.698741602376395e-06, + "loss": 0.0854, + "step": 375 + }, + { + "epoch": 1.5315682281059062, + "grad_norm": 0.5479078071724787, + "learning_rate": 5.675203054852403e-06, + "loss": 0.0735, + "step": 376 + }, + { + "epoch": 1.535641547861507, + "grad_norm": 0.628783685599969, + "learning_rate": 5.651649255201603e-06, + "loss": 0.0893, + "step": 377 + }, + { + "epoch": 1.539714867617108, + "grad_norm": 0.6393301256468628, + "learning_rate": 5.628080735479553e-06, + "loss": 0.0808, + "step": 378 + }, + { + "epoch": 1.5437881873727086, + "grad_norm": 0.5419627427127232, + "learning_rate": 5.604498028074323e-06, + "loss": 0.0693, + "step": 379 + }, + { + "epoch": 1.5478615071283097, + "grad_norm": 0.5585947140971957, + "learning_rate": 5.580901665694471e-06, + "loss": 0.0708, + "step": 380 + }, + { + "epoch": 1.5519348268839104, + "grad_norm": 0.7630225067115134, + "learning_rate": 5.557292181357003e-06, + "loss": 0.0916, + "step": 381 + }, + { + "epoch": 1.556008146639511, + "grad_norm": 0.7132672152748122, + "learning_rate": 5.533670108375334e-06, + "loss": 0.075, + "step": 382 + }, + { + "epoch": 1.5600814663951121, + "grad_norm": 0.6317003602484598, + "learning_rate": 5.510035980347249e-06, + "loss": 0.0629, + "step": 383 + }, + { + "epoch": 1.5641547861507128, + "grad_norm": 1.0965759213987818, + "learning_rate": 5.486390331142841e-06, + "loss": 0.129, + "step": 384 + }, + { + "epoch": 1.5682281059063137, + "grad_norm": 0.9961574441023341, + "learning_rate": 5.462733694892452e-06, + "loss": 0.0994, + "step": 385 + }, + { + "epoch": 1.5723014256619146, + "grad_norm": 0.55138988687004, + "learning_rate": 5.439066605974615e-06, + "loss": 0.0884, + "step": 386 + }, + { + "epoch": 1.5763747454175152, + "grad_norm": 0.642013542309746, + "learning_rate": 5.415389599003972e-06, + "loss": 0.0758, + "step": 387 + }, + { + "epoch": 1.580448065173116, + "grad_norm": 0.7003801753473788, + "learning_rate": 5.391703208819209e-06, + "loss": 0.0822, + "step": 388 + }, + { + "epoch": 1.584521384928717, + "grad_norm": 0.641290834723853, + "learning_rate": 5.368007970470964e-06, + "loss": 0.0794, + "step": 389 + }, + { + "epoch": 1.5885947046843176, + "grad_norm": 0.7220121980133926, + "learning_rate": 5.344304419209748e-06, + "loss": 0.0908, + "step": 390 + }, + { + "epoch": 1.5926680244399185, + "grad_norm": 0.5338391641253294, + "learning_rate": 5.3205930904738544e-06, + "loss": 0.065, + "step": 391 + }, + { + "epoch": 1.5967413441955194, + "grad_norm": 0.8220572260271829, + "learning_rate": 5.296874519877256e-06, + "loss": 0.1063, + "step": 392 + }, + { + "epoch": 1.60081466395112, + "grad_norm": 0.6948752546108359, + "learning_rate": 5.273149243197517e-06, + "loss": 0.092, + "step": 393 + }, + { + "epoch": 1.6048879837067211, + "grad_norm": 0.5973741327673737, + "learning_rate": 5.2494177963636785e-06, + "loss": 0.0781, + "step": 394 + }, + { + "epoch": 1.6089613034623218, + "grad_norm": 0.7082285511834933, + "learning_rate": 5.225680715444168e-06, + "loss": 0.0844, + "step": 395 + }, + { + "epoch": 1.6130346232179225, + "grad_norm": 0.5776337469174156, + "learning_rate": 5.201938536634674e-06, + "loss": 0.0755, + "step": 396 + }, + { + "epoch": 1.6171079429735236, + "grad_norm": 0.6439617862748772, + "learning_rate": 5.178191796246043e-06, + "loss": 0.0692, + "step": 397 + }, + { + "epoch": 1.6211812627291242, + "grad_norm": 0.693976310427531, + "learning_rate": 5.154441030692162e-06, + "loss": 0.0844, + "step": 398 + }, + { + "epoch": 1.625254582484725, + "grad_norm": 0.6310937269228004, + "learning_rate": 5.1306867764778445e-06, + "loss": 0.0605, + "step": 399 + }, + { + "epoch": 1.629327902240326, + "grad_norm": 0.5557882712930212, + "learning_rate": 5.106929570186706e-06, + "loss": 0.0616, + "step": 400 + }, + { + "epoch": 1.6334012219959266, + "grad_norm": 0.7105865341669582, + "learning_rate": 5.083169948469049e-06, + "loss": 0.0888, + "step": 401 + }, + { + "epoch": 1.6374745417515275, + "grad_norm": 0.785914280291214, + "learning_rate": 5.059408448029737e-06, + "loss": 0.0924, + "step": 402 + }, + { + "epoch": 1.6415478615071284, + "grad_norm": 0.760854086300454, + "learning_rate": 5.0356456056160715e-06, + "loss": 0.0899, + "step": 403 + }, + { + "epoch": 1.645621181262729, + "grad_norm": 0.5160083716123763, + "learning_rate": 5.0118819580056686e-06, + "loss": 0.0676, + "step": 404 + }, + { + "epoch": 1.64969450101833, + "grad_norm": 0.5926381494692319, + "learning_rate": 4.988118041994332e-06, + "loss": 0.0719, + "step": 405 + }, + { + "epoch": 1.6537678207739308, + "grad_norm": 0.5387161542722511, + "learning_rate": 4.964354394383929e-06, + "loss": 0.0757, + "step": 406 + }, + { + "epoch": 1.6578411405295315, + "grad_norm": 0.5028703565858573, + "learning_rate": 4.940591551970264e-06, + "loss": 0.066, + "step": 407 + }, + { + "epoch": 1.6619144602851323, + "grad_norm": 0.5736039189545704, + "learning_rate": 4.9168300515309515e-06, + "loss": 0.0724, + "step": 408 + }, + { + "epoch": 1.6659877800407332, + "grad_norm": 0.984691517709554, + "learning_rate": 4.8930704298132965e-06, + "loss": 0.1022, + "step": 409 + }, + { + "epoch": 1.6700610997963339, + "grad_norm": 0.5927643518511072, + "learning_rate": 4.869313223522159e-06, + "loss": 0.0718, + "step": 410 + }, + { + "epoch": 1.674134419551935, + "grad_norm": 0.566899954027869, + "learning_rate": 4.845558969307839e-06, + "loss": 0.0707, + "step": 411 + }, + { + "epoch": 1.6782077393075356, + "grad_norm": 0.5788045225856518, + "learning_rate": 4.821808203753959e-06, + "loss": 0.0785, + "step": 412 + }, + { + "epoch": 1.6822810590631363, + "grad_norm": 0.5224303081188956, + "learning_rate": 4.798061463365327e-06, + "loss": 0.0755, + "step": 413 + }, + { + "epoch": 1.6863543788187374, + "grad_norm": 0.5256289426946346, + "learning_rate": 4.774319284555833e-06, + "loss": 0.0725, + "step": 414 + }, + { + "epoch": 1.690427698574338, + "grad_norm": 0.5514388613040209, + "learning_rate": 4.7505822036363214e-06, + "loss": 0.0698, + "step": 415 + }, + { + "epoch": 1.694501018329939, + "grad_norm": 0.7507069842501244, + "learning_rate": 4.726850756802486e-06, + "loss": 0.0779, + "step": 416 + }, + { + "epoch": 1.6985743380855398, + "grad_norm": 0.5535879129510451, + "learning_rate": 4.703125480122747e-06, + "loss": 0.0677, + "step": 417 + }, + { + "epoch": 1.7026476578411405, + "grad_norm": 0.7586101007852933, + "learning_rate": 4.679406909526147e-06, + "loss": 0.0959, + "step": 418 + }, + { + "epoch": 1.7067209775967414, + "grad_norm": 0.5235003242969455, + "learning_rate": 4.655695580790254e-06, + "loss": 0.0782, + "step": 419 + }, + { + "epoch": 1.7107942973523422, + "grad_norm": 0.8957116693381422, + "learning_rate": 4.631992029529037e-06, + "loss": 0.104, + "step": 420 + }, + { + "epoch": 1.7148676171079429, + "grad_norm": 0.5397767746056776, + "learning_rate": 4.608296791180793e-06, + "loss": 0.0794, + "step": 421 + }, + { + "epoch": 1.7189409368635438, + "grad_norm": 0.5510200443982937, + "learning_rate": 4.584610400996028e-06, + "loss": 0.0727, + "step": 422 + }, + { + "epoch": 1.7230142566191446, + "grad_norm": 0.7218539657297133, + "learning_rate": 4.560933394025386e-06, + "loss": 0.0812, + "step": 423 + }, + { + "epoch": 1.7270875763747453, + "grad_norm": 0.6450718218339647, + "learning_rate": 4.537266305107549e-06, + "loss": 0.0857, + "step": 424 + }, + { + "epoch": 1.7311608961303462, + "grad_norm": 0.5240562658243174, + "learning_rate": 4.513609668857162e-06, + "loss": 0.0658, + "step": 425 + }, + { + "epoch": 1.735234215885947, + "grad_norm": 0.5203639231556227, + "learning_rate": 4.489964019652752e-06, + "loss": 0.0637, + "step": 426 + }, + { + "epoch": 1.7393075356415477, + "grad_norm": 0.7157158022653508, + "learning_rate": 4.4663298916246665e-06, + "loss": 0.0878, + "step": 427 + }, + { + "epoch": 1.7433808553971488, + "grad_norm": 0.6919816756435726, + "learning_rate": 4.442707818642999e-06, + "loss": 0.0801, + "step": 428 + }, + { + "epoch": 1.7474541751527495, + "grad_norm": 0.7381380434766637, + "learning_rate": 4.419098334305529e-06, + "loss": 0.0831, + "step": 429 + }, + { + "epoch": 1.7515274949083504, + "grad_norm": 0.720540642558599, + "learning_rate": 4.395501971925677e-06, + "loss": 0.0782, + "step": 430 + }, + { + "epoch": 1.7556008146639512, + "grad_norm": 0.6824501618323201, + "learning_rate": 4.371919264520449e-06, + "loss": 0.0795, + "step": 431 + }, + { + "epoch": 1.759674134419552, + "grad_norm": 0.7572127655566931, + "learning_rate": 4.348350744798399e-06, + "loss": 0.0798, + "step": 432 + }, + { + "epoch": 1.7637474541751528, + "grad_norm": 0.5634906535431049, + "learning_rate": 4.324796945147598e-06, + "loss": 0.0728, + "step": 433 + }, + { + "epoch": 1.7678207739307537, + "grad_norm": 0.7686272783650896, + "learning_rate": 4.301258397623606e-06, + "loss": 0.0782, + "step": 434 + }, + { + "epoch": 1.7718940936863543, + "grad_norm": 0.712370232236961, + "learning_rate": 4.2777356339374526e-06, + "loss": 0.0856, + "step": 435 + }, + { + "epoch": 1.7759674134419552, + "grad_norm": 0.9290726298620688, + "learning_rate": 4.254229185443628e-06, + "loss": 0.0783, + "step": 436 + }, + { + "epoch": 1.780040733197556, + "grad_norm": 0.5716455342190312, + "learning_rate": 4.230739583128078e-06, + "loss": 0.0701, + "step": 437 + }, + { + "epoch": 1.7841140529531567, + "grad_norm": 1.1820087104502222, + "learning_rate": 4.2072673575962125e-06, + "loss": 0.0977, + "step": 438 + }, + { + "epoch": 1.7881873727087576, + "grad_norm": 0.935231069436012, + "learning_rate": 4.183813039060919e-06, + "loss": 0.1103, + "step": 439 + }, + { + "epoch": 1.7922606924643585, + "grad_norm": 0.5110831181130548, + "learning_rate": 4.160377157330579e-06, + "loss": 0.0787, + "step": 440 + }, + { + "epoch": 1.7963340122199591, + "grad_norm": 0.6036766363233732, + "learning_rate": 4.136960241797113e-06, + "loss": 0.0648, + "step": 441 + }, + { + "epoch": 1.8004073319755602, + "grad_norm": 0.6253574899226054, + "learning_rate": 4.113562821424012e-06, + "loss": 0.0856, + "step": 442 + }, + { + "epoch": 1.804480651731161, + "grad_norm": 0.5737047935293089, + "learning_rate": 4.090185424734392e-06, + "loss": 0.0769, + "step": 443 + }, + { + "epoch": 1.8085539714867616, + "grad_norm": 0.6675899229386297, + "learning_rate": 4.066828579799054e-06, + "loss": 0.0761, + "step": 444 + }, + { + "epoch": 1.8126272912423627, + "grad_norm": 0.5292920337446484, + "learning_rate": 4.043492814224559e-06, + "loss": 0.0684, + "step": 445 + }, + { + "epoch": 1.8167006109979633, + "grad_norm": 0.7551251294419339, + "learning_rate": 4.020178655141307e-06, + "loss": 0.0792, + "step": 446 + }, + { + "epoch": 1.8207739307535642, + "grad_norm": 0.6230513981956748, + "learning_rate": 3.9968866291916254e-06, + "loss": 0.082, + "step": 447 + }, + { + "epoch": 1.824847250509165, + "grad_norm": 0.5934762051634166, + "learning_rate": 3.973617262517886e-06, + "loss": 0.0638, + "step": 448 + }, + { + "epoch": 1.8289205702647657, + "grad_norm": 0.675289075760882, + "learning_rate": 3.950371080750602e-06, + "loss": 0.0795, + "step": 449 + }, + { + "epoch": 1.8329938900203666, + "grad_norm": 0.9147897851217298, + "learning_rate": 3.927148608996569e-06, + "loss": 0.1063, + "step": 450 + }, + { + "epoch": 1.8370672097759675, + "grad_norm": 1.1105124935695379, + "learning_rate": 3.903950371827001e-06, + "loss": 0.087, + "step": 451 + }, + { + "epoch": 1.8411405295315681, + "grad_norm": 0.5889228060416761, + "learning_rate": 3.880776893265673e-06, + "loss": 0.0767, + "step": 452 + }, + { + "epoch": 1.845213849287169, + "grad_norm": 0.5032013275393701, + "learning_rate": 3.85762869677709e-06, + "loss": 0.0575, + "step": 453 + }, + { + "epoch": 1.84928716904277, + "grad_norm": 0.6559816525725005, + "learning_rate": 3.834506305254667e-06, + "loss": 0.0896, + "step": 454 + }, + { + "epoch": 1.8533604887983706, + "grad_norm": 0.7532212211298266, + "learning_rate": 3.811410241008902e-06, + "loss": 0.0856, + "step": 455 + }, + { + "epoch": 1.8574338085539714, + "grad_norm": 0.5738261899311171, + "learning_rate": 3.788341025755595e-06, + "loss": 0.0733, + "step": 456 + }, + { + "epoch": 1.8615071283095723, + "grad_norm": 0.7208121405309541, + "learning_rate": 3.765299180604055e-06, + "loss": 0.0829, + "step": 457 + }, + { + "epoch": 1.865580448065173, + "grad_norm": 0.5296431964709345, + "learning_rate": 3.7422852260453274e-06, + "loss": 0.0704, + "step": 458 + }, + { + "epoch": 1.869653767820774, + "grad_norm": 0.6719188744341698, + "learning_rate": 3.719299681940437e-06, + "loss": 0.09, + "step": 459 + }, + { + "epoch": 1.8737270875763747, + "grad_norm": 0.5867743083933351, + "learning_rate": 3.696343067508651e-06, + "loss": 0.0816, + "step": 460 + }, + { + "epoch": 1.8778004073319754, + "grad_norm": 0.49214441808140924, + "learning_rate": 3.673415901315743e-06, + "loss": 0.0648, + "step": 461 + }, + { + "epoch": 1.8818737270875765, + "grad_norm": 0.5871371037925425, + "learning_rate": 3.650518701262278e-06, + "loss": 0.0732, + "step": 462 + }, + { + "epoch": 1.8859470468431772, + "grad_norm": 0.6415787500205045, + "learning_rate": 3.6276519845719237e-06, + "loss": 0.0731, + "step": 463 + }, + { + "epoch": 1.890020366598778, + "grad_norm": 0.7505406685838056, + "learning_rate": 3.6048162677797595e-06, + "loss": 0.0847, + "step": 464 + }, + { + "epoch": 1.894093686354379, + "grad_norm": 0.5203285332468817, + "learning_rate": 3.582012066720605e-06, + "loss": 0.0666, + "step": 465 + }, + { + "epoch": 1.8981670061099796, + "grad_norm": 0.5664388898818371, + "learning_rate": 3.559239896517379e-06, + "loss": 0.0814, + "step": 466 + }, + { + "epoch": 1.9022403258655805, + "grad_norm": 0.9911644175743987, + "learning_rate": 3.536500271569452e-06, + "loss": 0.0766, + "step": 467 + }, + { + "epoch": 1.9063136456211813, + "grad_norm": 0.5541020704419577, + "learning_rate": 3.5137937055410343e-06, + "loss": 0.0677, + "step": 468 + }, + { + "epoch": 1.910386965376782, + "grad_norm": 0.7733672524931661, + "learning_rate": 3.4911207113495703e-06, + "loss": 0.0883, + "step": 469 + }, + { + "epoch": 1.9144602851323829, + "grad_norm": 0.5994378228763769, + "learning_rate": 3.4684818011541484e-06, + "loss": 0.0779, + "step": 470 + }, + { + "epoch": 1.9185336048879837, + "grad_norm": 0.7925341371043554, + "learning_rate": 3.4458774863439366e-06, + "loss": 0.0882, + "step": 471 + }, + { + "epoch": 1.9226069246435844, + "grad_norm": 0.8007592704584606, + "learning_rate": 3.423308277526633e-06, + "loss": 0.088, + "step": 472 + }, + { + "epoch": 1.9266802443991853, + "grad_norm": 0.6130809497009315, + "learning_rate": 3.4007746845169253e-06, + "loss": 0.0836, + "step": 473 + }, + { + "epoch": 1.9307535641547862, + "grad_norm": 0.6462482272234705, + "learning_rate": 3.3782772163249767e-06, + "loss": 0.0671, + "step": 474 + }, + { + "epoch": 1.9348268839103868, + "grad_norm": 0.4781126271398999, + "learning_rate": 3.3558163811449317e-06, + "loss": 0.0694, + "step": 475 + }, + { + "epoch": 1.938900203665988, + "grad_norm": 0.7328378092623428, + "learning_rate": 3.3333926863434317e-06, + "loss": 0.0847, + "step": 476 + }, + { + "epoch": 1.9429735234215886, + "grad_norm": 0.6002434419865161, + "learning_rate": 3.311006638448155e-06, + "loss": 0.0724, + "step": 477 + }, + { + "epoch": 1.9470468431771895, + "grad_norm": 0.6276269450961816, + "learning_rate": 3.288658743136378e-06, + "loss": 0.066, + "step": 478 + }, + { + "epoch": 1.9511201629327903, + "grad_norm": 0.8586874267355604, + "learning_rate": 3.2663495052235505e-06, + "loss": 0.1077, + "step": 479 + }, + { + "epoch": 1.955193482688391, + "grad_norm": 0.6160197884131686, + "learning_rate": 3.2440794286518896e-06, + "loss": 0.085, + "step": 480 + }, + { + "epoch": 1.9592668024439919, + "grad_norm": 0.5494876667673338, + "learning_rate": 3.2218490164790015e-06, + "loss": 0.0656, + "step": 481 + }, + { + "epoch": 1.9633401221995928, + "grad_norm": 0.5953585828338879, + "learning_rate": 3.199658770866515e-06, + "loss": 0.0754, + "step": 482 + }, + { + "epoch": 1.9674134419551934, + "grad_norm": 0.5401626625339145, + "learning_rate": 3.1775091930687374e-06, + "loss": 0.0668, + "step": 483 + }, + { + "epoch": 1.9714867617107943, + "grad_norm": 0.5111554952361316, + "learning_rate": 3.1554007834213357e-06, + "loss": 0.0686, + "step": 484 + }, + { + "epoch": 1.9755600814663952, + "grad_norm": 0.722625027505199, + "learning_rate": 3.1333340413300263e-06, + "loss": 0.0848, + "step": 485 + }, + { + "epoch": 1.9796334012219958, + "grad_norm": 0.5959140201333295, + "learning_rate": 3.1113094652593023e-06, + "loss": 0.0701, + "step": 486 + }, + { + "epoch": 1.9837067209775967, + "grad_norm": 0.8208975505384392, + "learning_rate": 3.0893275527211742e-06, + "loss": 0.1013, + "step": 487 + }, + { + "epoch": 1.9877800407331976, + "grad_norm": 0.6224896038998268, + "learning_rate": 3.067388800263923e-06, + "loss": 0.0832, + "step": 488 + }, + { + "epoch": 1.9918533604887982, + "grad_norm": 0.6241176624569225, + "learning_rate": 3.04549370346089e-06, + "loss": 0.0678, + "step": 489 + }, + { + "epoch": 1.9959266802443993, + "grad_norm": 0.6061475364205144, + "learning_rate": 3.0236427568992845e-06, + "loss": 0.0768, + "step": 490 + }, + { + "epoch": 2.0, + "grad_norm": 0.7233218000939327, + "learning_rate": 3.0018364541690048e-06, + "loss": 0.0861, + "step": 491 + }, + { + "epoch": 2.0040733197556007, + "grad_norm": 0.4042479382149737, + "learning_rate": 2.9800752878514903e-06, + "loss": 0.0466, + "step": 492 + }, + { + "epoch": 2.0081466395112018, + "grad_norm": 0.5139407106351861, + "learning_rate": 2.958359749508603e-06, + "loss": 0.0515, + "step": 493 + }, + { + "epoch": 2.0122199592668024, + "grad_norm": 0.46115082125552953, + "learning_rate": 2.936690329671511e-06, + "loss": 0.0435, + "step": 494 + }, + { + "epoch": 2.016293279022403, + "grad_norm": 0.4268534345955416, + "learning_rate": 2.915067517829615e-06, + "loss": 0.0455, + "step": 495 + }, + { + "epoch": 2.020366598778004, + "grad_norm": 0.4635470622803733, + "learning_rate": 2.893491802419492e-06, + "loss": 0.0476, + "step": 496 + }, + { + "epoch": 2.024439918533605, + "grad_norm": 0.411133073158324, + "learning_rate": 2.871963670813861e-06, + "loss": 0.0383, + "step": 497 + }, + { + "epoch": 2.0285132382892055, + "grad_norm": 0.5300632118826186, + "learning_rate": 2.850483609310567e-06, + "loss": 0.0429, + "step": 498 + }, + { + "epoch": 2.0325865580448066, + "grad_norm": 0.5473407298179113, + "learning_rate": 2.829052103121611e-06, + "loss": 0.0461, + "step": 499 + }, + { + "epoch": 2.0366598778004072, + "grad_norm": 0.5221255489838956, + "learning_rate": 2.807669636362169e-06, + "loss": 0.0468, + "step": 500 + } + ], + "logging_steps": 1, + "max_steps": 735, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 250, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 32704414679040.0, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-500/zero_to_fp32.py b/checkpoint-500/zero_to_fp32.py new file mode 100644 index 0000000000000000000000000000000000000000..e69ecd9acb5a235ffbf927091051106d902b3d39 --- /dev/null +++ b/checkpoint-500/zero_to_fp32.py @@ -0,0 +1,674 @@ +#!/usr/bin/env python + +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets +# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in +# the future. Once extracted, the weights don't require DeepSpeed and can be used in any +# application. +# +# example: +# python zero_to_fp32.py . output_dir/ +# or +# python zero_to_fp32.py . output_dir/ --safe_serialization + +import argparse +import torch +import glob +import math +import os +import re +import json +from tqdm import tqdm +from collections import OrderedDict +from dataclasses import dataclass + +# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with +# DeepSpeed data structures it has to be available in the current python environment. +from deepspeed.utils import logger +from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS, + FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES, + FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS) + + +@dataclass +class zero_model_state: + buffers: dict() + param_shapes: dict() + shared_params: list + ds_version: int + frozen_param_shapes: dict() + frozen_param_fragments: dict() + + +debug = 0 + +# load to cpu +device = torch.device('cpu') + + +def atoi(text): + return int(text) if text.isdigit() else text + + +def natural_keys(text): + ''' + alist.sort(key=natural_keys) sorts in human order + http://nedbatchelder.com/blog/200712/human_sorting.html + (See Toothy's implementation in the comments) + ''' + return [atoi(c) for c in re.split(r'(\d+)', text)] + + +def get_model_state_file(checkpoint_dir, zero_stage): + if not os.path.isdir(checkpoint_dir): + raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist") + + # there should be only one file + if zero_stage <= 2: + file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt") + elif zero_stage == 3: + file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt") + + if not os.path.exists(file): + raise FileNotFoundError(f"can't find model states file at '{file}'") + + return file + + +def get_checkpoint_files(checkpoint_dir, glob_pattern): + # XXX: need to test that this simple glob rule works for multi-node setup too + ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys) + + if len(ckpt_files) == 0: + raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'") + + return ckpt_files + + +def get_optim_files(checkpoint_dir): + return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt") + + +def get_model_state_files(checkpoint_dir): + return get_checkpoint_files(checkpoint_dir, "*_model_states.pt") + + +def parse_model_states(files): + zero_model_states = [] + for file in files: + state_dict = torch.load(file, map_location=device) + + if BUFFER_NAMES not in state_dict: + raise ValueError(f"{file} is not a model state checkpoint") + buffer_names = state_dict[BUFFER_NAMES] + if debug: + print("Found buffers:", buffer_names) + + # recover just the buffers while restoring them to fp32 if they were saved in fp16 + buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names} + param_shapes = state_dict[PARAM_SHAPES] + + # collect parameters that are included in param_shapes + param_names = [] + for s in param_shapes: + for name in s.keys(): + param_names.append(name) + + # update with frozen parameters + frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None) + if frozen_param_shapes is not None: + if debug: + print(f"Found frozen_param_shapes: {frozen_param_shapes}") + param_names += list(frozen_param_shapes.keys()) + + # handle shared params + shared_params = [[k, v] for k, v in state_dict["shared_params"].items()] + + ds_version = state_dict.get(DS_VERSION, None) + + frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None) + + z_model_state = zero_model_state(buffers=buffers, + param_shapes=param_shapes, + shared_params=shared_params, + ds_version=ds_version, + frozen_param_shapes=frozen_param_shapes, + frozen_param_fragments=frozen_param_fragments) + zero_model_states.append(z_model_state) + + return zero_model_states + + +def parse_optim_states(files, ds_checkpoint_dir): + total_files = len(files) + state_dicts = [] + for f in files: + state_dict = torch.load(f, map_location=device) + # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights + # and also handle the case where it was already removed by another helper script + state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None) + state_dicts.append(state_dict) + + if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]: + raise ValueError(f"{files[0]} is not a zero checkpoint") + zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE] + world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT] + + # For ZeRO-2 each param group can have different partition_count as data parallelism for expert + # parameters can be different from data parallelism for non-expert parameters. So we can just + # use the max of the partition_count to get the dp world_size. + + if type(world_size) is list: + world_size = max(world_size) + + if world_size != total_files: + raise ValueError( + f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. " + "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes." + ) + + # the groups are named differently in each stage + if zero_stage <= 2: + fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS + elif zero_stage == 3: + fp32_groups_key = FP32_FLAT_GROUPS + else: + raise ValueError(f"unknown zero stage {zero_stage}") + + if zero_stage <= 2: + fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))] + elif zero_stage == 3: + # if there is more than one param group, there will be multiple flattened tensors - one + # flattened tensor per group - for simplicity merge them into a single tensor + # + # XXX: could make the script more memory efficient for when there are multiple groups - it + # will require matching the sub-lists of param_shapes for each param group flattened tensor + + fp32_flat_groups = [ + torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts)) + ] + + return zero_stage, world_size, fp32_flat_groups + + +def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters): + """ + Returns fp32 state_dict reconstructed from ds checkpoint + + Args: + - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are) + + """ + print(f"Processing zero checkpoint '{ds_checkpoint_dir}'") + + optim_files = get_optim_files(ds_checkpoint_dir) + zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir) + print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}") + + model_files = get_model_state_files(ds_checkpoint_dir) + + zero_model_states = parse_model_states(model_files) + print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}') + + if zero_stage <= 2: + return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states, + exclude_frozen_parameters) + elif zero_stage == 3: + return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states, + exclude_frozen_parameters) + + +def _zero2_merge_frozen_params(state_dict, zero_model_states): + if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0: + return + + frozen_param_shapes = zero_model_states[0].frozen_param_shapes + frozen_param_fragments = zero_model_states[0].frozen_param_fragments + + if debug: + num_elem = sum(s.numel() for s in frozen_param_shapes.values()) + print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}') + + wanted_params = len(frozen_param_shapes) + wanted_numel = sum(s.numel() for s in frozen_param_shapes.values()) + avail_numel = sum([p.numel() for p in frozen_param_fragments.values()]) + print(f'Frozen params: Have {avail_numel} numels to process.') + print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params') + + total_params = 0 + total_numel = 0 + for name, shape in frozen_param_shapes.items(): + total_params += 1 + unpartitioned_numel = shape.numel() + total_numel += unpartitioned_numel + + state_dict[name] = frozen_param_fragments[name] + + if debug: + print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ") + + print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements") + + +def _has_callable(obj, fn): + attr = getattr(obj, fn, None) + return callable(attr) + + +def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states): + param_shapes = zero_model_states[0].param_shapes + + # Reconstruction protocol: + # + # XXX: document this + + if debug: + for i in range(world_size): + for j in range(len(fp32_flat_groups[0])): + print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}") + + # XXX: memory usage doubles here (zero2) + num_param_groups = len(fp32_flat_groups[0]) + merged_single_partition_of_fp32_groups = [] + for i in range(num_param_groups): + merged_partitions = [sd[i] for sd in fp32_flat_groups] + full_single_fp32_vector = torch.cat(merged_partitions, 0) + merged_single_partition_of_fp32_groups.append(full_single_fp32_vector) + avail_numel = sum( + [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups]) + + if debug: + wanted_params = sum([len(shapes) for shapes in param_shapes]) + wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes]) + # not asserting if there is a mismatch due to possible padding + print(f"Have {avail_numel} numels to process.") + print(f"Need {wanted_numel} numels in {wanted_params} params.") + + # params + # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support + # out-of-core computing solution + total_numel = 0 + total_params = 0 + for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups): + offset = 0 + avail_numel = full_single_fp32_vector.numel() + for name, shape in shapes.items(): + + unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape) + total_numel += unpartitioned_numel + total_params += 1 + + if debug: + print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ") + state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape) + offset += unpartitioned_numel + + # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and + # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex + # paddings performed in the code it's almost impossible to predict the exact numbers w/o the + # live optimizer object, so we are checking that the numbers are within the right range + align_to = 2 * world_size + + def zero2_align(x): + return align_to * math.ceil(x / align_to) + + if debug: + print(f"original offset={offset}, avail_numel={avail_numel}") + + offset = zero2_align(offset) + avail_numel = zero2_align(avail_numel) + + if debug: + print(f"aligned offset={offset}, avail_numel={avail_numel}") + + # Sanity check + if offset != avail_numel: + raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong") + + print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements") + + +def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states, + exclude_frozen_parameters): + state_dict = OrderedDict() + + # buffers + buffers = zero_model_states[0].buffers + state_dict.update(buffers) + if debug: + print(f"added {len(buffers)} buffers") + + if not exclude_frozen_parameters: + _zero2_merge_frozen_params(state_dict, zero_model_states) + + _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states) + + # recover shared parameters + for pair in zero_model_states[0].shared_params: + if pair[1] in state_dict: + state_dict[pair[0]] = state_dict[pair[1]] + + return state_dict + + +def zero3_partitioned_param_info(unpartitioned_numel, world_size): + remainder = unpartitioned_numel % world_size + padding_numel = (world_size - remainder) if remainder else 0 + partitioned_numel = math.ceil(unpartitioned_numel / world_size) + return partitioned_numel, padding_numel + + +def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states): + if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0: + return + + if debug: + for i in range(world_size): + num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values()) + print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}') + + frozen_param_shapes = zero_model_states[0].frozen_param_shapes + wanted_params = len(frozen_param_shapes) + wanted_numel = sum(s.numel() for s in frozen_param_shapes.values()) + avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size + print(f'Frozen params: Have {avail_numel} numels to process.') + print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params') + + total_params = 0 + total_numel = 0 + for name, shape in zero_model_states[0].frozen_param_shapes.items(): + total_params += 1 + unpartitioned_numel = shape.numel() + total_numel += unpartitioned_numel + + param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states) + state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape) + + partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size) + + if debug: + print( + f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}" + ) + + print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements") + + +def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states): + param_shapes = zero_model_states[0].param_shapes + avail_numel = fp32_flat_groups[0].numel() * world_size + # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each + # param, re-consolidating each param, while dealing with padding if any + + # merge list of dicts, preserving order + param_shapes = {k: v for d in param_shapes for k, v in d.items()} + + if debug: + for i in range(world_size): + print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}") + + wanted_params = len(param_shapes) + wanted_numel = sum(shape.numel() for shape in param_shapes.values()) + # not asserting if there is a mismatch due to possible padding + avail_numel = fp32_flat_groups[0].numel() * world_size + print(f"Trainable params: Have {avail_numel} numels to process.") + print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.") + + # params + # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support + # out-of-core computing solution + offset = 0 + total_numel = 0 + total_params = 0 + for name, shape in tqdm(param_shapes.items(), desc='Gathering Sharded Weights'): + unpartitioned_numel = shape.numel() + total_numel += unpartitioned_numel + total_params += 1 + partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size) + + if debug: + print( + f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}" + ) + + # XXX: memory usage doubles here + state_dict[name] = torch.cat( + tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)), + 0).narrow(0, 0, unpartitioned_numel).view(shape) + offset += partitioned_numel + + offset *= world_size + + # Sanity check + if offset != avail_numel: + raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong") + + print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements") + + +def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states, + exclude_frozen_parameters): + state_dict = OrderedDict() + + # buffers + buffers = zero_model_states[0].buffers + state_dict.update(buffers) + if debug: + print(f"added {len(buffers)} buffers") + + if not exclude_frozen_parameters: + _zero3_merge_frozen_params(state_dict, world_size, zero_model_states) + + _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states) + + # recover shared parameters + for pair in zero_model_states[0].shared_params: + if pair[1] in state_dict: + state_dict[pair[0]] = state_dict[pair[1]] + + return state_dict + + +def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False): + """ + Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with + ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example + via a model hub. + + Args: + - ``checkpoint_dir``: path to the desired checkpoint folder + - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14`` + - ``exclude_frozen_parameters``: exclude frozen parameters + + Returns: + - pytorch ``state_dict`` + + Note: this approach may not work if your application doesn't have sufficient free CPU memory and + you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with + the checkpoint. + + A typical usage might be :: + + from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint + # do the training and checkpoint saving + state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu + model = model.cpu() # move to cpu + model.load_state_dict(state_dict) + # submit to model hub or save the model to share with others + + In this example the ``model`` will no longer be usable in the deepspeed context of the same + application. i.e. you will need to re-initialize the deepspeed engine, since + ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it. + + If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead. + + """ + if tag is None: + latest_path = os.path.join(checkpoint_dir, 'latest') + if os.path.isfile(latest_path): + with open(latest_path, 'r') as fd: + tag = fd.read().strip() + else: + raise ValueError(f"Unable to find 'latest' file at {latest_path}") + + ds_checkpoint_dir = os.path.join(checkpoint_dir, tag) + + if not os.path.isdir(ds_checkpoint_dir): + raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist") + + return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters) + + +def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, + output_dir, + max_shard_size="5GB", + safe_serialization=False, + tag=None, + exclude_frozen_parameters=False): + """ + Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be + loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed. + + Args: + - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``) + - ``output_dir``: directory to the pytorch fp32 state_dict output files + - ``max_shard_size``: the maximum size for a checkpoint before being sharded, default value is 5GB + - ``safe_serialization``: whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`). + - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14`` + - ``exclude_frozen_parameters``: exclude frozen parameters + """ + # Dependency pre-check + if safe_serialization: + try: + from safetensors.torch import save_file + except ImportError: + print('If you want to use `safe_serialization`, please `pip install safetensors`') + raise + if max_shard_size is not None: + try: + from huggingface_hub import split_torch_state_dict_into_shards + except ImportError: + print('If you want to use `max_shard_size`, please `pip install huggingface_hub`') + raise + + # Convert zero checkpoint to state_dict + state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters) + + # Shard the model if it is too big. + weights_name = "model.safetensors" if safe_serialization else "pytorch_model.bin" + if max_shard_size is not None: + filename_pattern = weights_name.replace(".bin", "{suffix}.bin").replace(".safetensors", "{suffix}.safetensors") + state_dict_split = split_torch_state_dict_into_shards(state_dict, + filename_pattern=filename_pattern, + max_shard_size=max_shard_size) + else: + from collections import namedtuple + StateDictSplit = namedtuple("StateDictSplit", ["is_sharded", "filename_to_tensors"]) + state_dict_split = StateDictSplit(is_sharded=False, + filename_to_tensors={weights_name: list(state_dict.keys())}) + + # Save the model + filename_to_tensors = state_dict_split.filename_to_tensors.items() + for shard_file, tensors in tqdm(filename_to_tensors, desc="Saving checkpoint shards"): + shard = {tensor: state_dict[tensor].contiguous() for tensor in tensors} + output_path = os.path.join(output_dir, shard_file) + if safe_serialization: + save_file(shard, output_path, metadata={"format": "pt"}) + else: + torch.save(shard, output_path) + + # Save index if sharded + if state_dict_split.is_sharded: + index = { + "metadata": state_dict_split.metadata, + "weight_map": state_dict_split.tensor_to_filename, + } + save_index_file = "model.safetensors.index.json" if safe_serialization else "pytorch_model.bin.index.json" + save_index_file = os.path.join(output_dir, save_index_file) + with open(save_index_file, "w", encoding="utf-8") as f: + content = json.dumps(index, indent=2, sort_keys=True) + "\n" + f.write(content) + + +def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None): + """ + 1. Put the provided model to cpu + 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` + 3. Load it into the provided model + + Args: + - ``model``: the model object to update + - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``) + - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14`` + + Returns: + - ``model`: modified model + + Make sure you have plenty of CPU memory available before you call this function. If you don't + have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it + conveniently placed for you in the checkpoint folder. + + A typical usage might be :: + + from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint + model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir) + # submit to model hub or save the model to share with others + + Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context + of the same application. i.e. you will need to re-initialize the deepspeed engine, since + ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it. + + """ + logger.info(f"Extracting fp32 weights") + state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag) + + logger.info(f"Overwriting model with fp32 weights") + model = model.cpu() + model.load_state_dict(state_dict, strict=False) + + return model + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("checkpoint_dir", + type=str, + help="path to the desired checkpoint folder, e.g., path/checkpoint-12") + parser.add_argument("output_dir", + type=str, + help="directory to the pytorch fp32 state_dict output files" + "(e.g. path/checkpoint-12-output/)") + parser.add_argument( + "--max_shard_size", + type=str, + default="5GB", + help="The maximum size for a checkpoint before being sharded. Checkpoints shard will then be each of size" + "lower than this size. If expressed as a string, needs to be digits followed by a unit (like `5MB`" + "We default it to 5GB in order for models to be able to run easily on free-tier google colab instances" + "without CPU OOM issues.") + parser.add_argument( + "--safe_serialization", + default=False, + action='store_true', + help="Whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).") + parser.add_argument("-t", + "--tag", + type=str, + default=None, + help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1") + parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters") + parser.add_argument("-d", "--debug", action='store_true', help="enable debug") + args = parser.parse_args() + + debug = args.debug + + convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir, + args.output_dir, + max_shard_size=args.max_shard_size, + safe_serialization=args.safe_serialization, + tag=args.tag, + exclude_frozen_parameters=args.exclude_frozen_parameters) diff --git a/config.json b/config.json new file mode 100644 index 0000000000000000000000000000000000000000..c499423bcd77384986c270517f6957e2160cf584 --- /dev/null +++ b/config.json @@ -0,0 +1,40 @@ +{ + "_name_or_path": "meta-llama/Llama-3.1-8B-Instruct", + "architectures": [ + "LlamaForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "bos_token_id": 128000, + "eos_token_id": [ + 128001, + 128008, + 128009 + ], + "head_dim": 128, + "hidden_act": "silu", + "hidden_size": 4096, + "initializer_range": 0.02, + "intermediate_size": 14336, + "max_position_embeddings": 131072, + "mlp_bias": false, + "model_type": "llama", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 8, + "pretraining_tp": 1, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "factor": 8.0, + "high_freq_factor": 4.0, + "low_freq_factor": 1.0, + "original_max_position_embeddings": 8192, + "rope_type": "llama3" + }, + "rope_theta": 500000.0, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.45.0", + "use_cache": false, + "vocab_size": 128256 +} diff --git a/generation_config.json b/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..6a807a364acb034610b0c0959eb3727910a0babe --- /dev/null +++ b/generation_config.json @@ -0,0 +1,12 @@ +{ + "bos_token_id": 128000, + "do_sample": true, + "eos_token_id": [ + 128001, + 128008, + 128009 + ], + "temperature": 0.6, + "top_p": 0.9, + "transformers_version": "4.45.0" +} diff --git a/model-00004-of-00004.safetensors b/model-00004-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ef74c1556bd8f865aef6404e72aeb94437ea7f4b --- /dev/null +++ b/model-00004-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be7f57e93335f721a5a65c70913898258420e951fb7006d7bd842b88658dc10d +size 1168138808 diff --git a/model.safetensors.index.json b/model.safetensors.index.json new file mode 100644 index 0000000000000000000000000000000000000000..0fd8120f1c6acddc268ebc2583058efaf699a771 --- /dev/null +++ b/model.safetensors.index.json @@ -0,0 +1,298 @@ +{ + "metadata": { + "total_size": 16060522496 + }, + "weight_map": { + "lm_head.weight": "model-00004-of-00004.safetensors", + "model.embed_tokens.weight": "model-00001-of-00004.safetensors", + "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.31.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.norm.weight": "model-00004-of-00004.safetensors" + } +} diff --git a/special_tokens_map.json b/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..14daf4588e61b4e4983af0fccaba4d5500c0977c --- /dev/null +++ b/special_tokens_map.json @@ -0,0 +1,26 @@ +{ + "additional_special_tokens": [ + { + "content": "<|eom_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } + ], + "bos_token": { + "content": "<|begin_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "<|eot_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": "<|eot_id|>" +} diff --git a/tokenizer.json b/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2 --- /dev/null +++ b/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b +size 17209920 diff --git a/tokenizer_config.json b/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7d655d20e4224cc5f793625e97b5f1842407cbba --- /dev/null +++ b/tokenizer_config.json @@ -0,0 +1,2068 @@ +{ + "added_tokens_decoder": { + "128000": { + "content": "<|begin_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128001": { + "content": "<|end_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128002": { + "content": "<|reserved_special_token_0|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128003": { + "content": "<|reserved_special_token_1|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128004": { + "content": "<|finetune_right_pad_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128005": { + "content": "<|reserved_special_token_2|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128006": { + "content": "<|start_header_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128007": { + "content": "<|end_header_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128008": { + "content": "<|eom_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128009": { + "content": "<|eot_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128010": { + "content": "<|python_tag|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128011": { + "content": "<|reserved_special_token_3|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128012": { + "content": "<|reserved_special_token_4|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128013": { + "content": "<|reserved_special_token_5|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128014": { + "content": "<|reserved_special_token_6|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128015": { + "content": "<|reserved_special_token_7|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128016": { + "content": "<|reserved_special_token_8|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128017": { + "content": "<|reserved_special_token_9|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128018": { + "content": "<|reserved_special_token_10|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128019": { + "content": "<|reserved_special_token_11|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128020": { + "content": "<|reserved_special_token_12|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128021": { + "content": "<|reserved_special_token_13|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128022": { + "content": "<|reserved_special_token_14|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128023": { + "content": "<|reserved_special_token_15|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128024": { + "content": "<|reserved_special_token_16|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128025": { + "content": "<|reserved_special_token_17|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128026": { + "content": "<|reserved_special_token_18|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128027": { + "content": "<|reserved_special_token_19|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128028": { + "content": "<|reserved_special_token_20|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128029": { + "content": "<|reserved_special_token_21|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128030": { + "content": "<|reserved_special_token_22|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128031": { + "content": "<|reserved_special_token_23|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128032": { + "content": "<|reserved_special_token_24|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128033": { + "content": "<|reserved_special_token_25|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128034": { + "content": "<|reserved_special_token_26|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128035": { + "content": "<|reserved_special_token_27|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128036": { + "content": "<|reserved_special_token_28|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128037": { + "content": "<|reserved_special_token_29|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128038": { + "content": "<|reserved_special_token_30|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128039": { + "content": "<|reserved_special_token_31|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128040": { + "content": "<|reserved_special_token_32|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128041": { + "content": "<|reserved_special_token_33|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128042": { + "content": "<|reserved_special_token_34|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128043": { + "content": "<|reserved_special_token_35|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128044": { + "content": "<|reserved_special_token_36|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128045": { + "content": "<|reserved_special_token_37|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128046": { + "content": "<|reserved_special_token_38|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128047": { + "content": "<|reserved_special_token_39|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128048": { + "content": "<|reserved_special_token_40|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128049": { + "content": "<|reserved_special_token_41|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128050": { + "content": "<|reserved_special_token_42|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128051": { + "content": "<|reserved_special_token_43|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128052": { + "content": "<|reserved_special_token_44|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128053": { + "content": "<|reserved_special_token_45|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128054": { + "content": "<|reserved_special_token_46|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128055": { + "content": "<|reserved_special_token_47|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128056": { + "content": "<|reserved_special_token_48|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128057": { + "content": "<|reserved_special_token_49|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128058": { + "content": "<|reserved_special_token_50|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128059": { + "content": "<|reserved_special_token_51|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128060": { + "content": "<|reserved_special_token_52|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128061": { + "content": "<|reserved_special_token_53|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128062": { + "content": "<|reserved_special_token_54|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128063": { + "content": "<|reserved_special_token_55|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128064": { + "content": "<|reserved_special_token_56|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128065": { + "content": "<|reserved_special_token_57|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128066": { + "content": "<|reserved_special_token_58|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128067": { + "content": "<|reserved_special_token_59|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128068": { + "content": "<|reserved_special_token_60|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128069": { + "content": "<|reserved_special_token_61|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128070": { + "content": "<|reserved_special_token_62|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128071": { + "content": "<|reserved_special_token_63|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128072": { + "content": "<|reserved_special_token_64|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128073": { + "content": "<|reserved_special_token_65|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128074": { + "content": "<|reserved_special_token_66|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128075": { + "content": "<|reserved_special_token_67|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128076": { + "content": "<|reserved_special_token_68|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128077": { + "content": "<|reserved_special_token_69|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128078": { + "content": "<|reserved_special_token_70|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128079": { + "content": "<|reserved_special_token_71|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128080": { + "content": "<|reserved_special_token_72|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128081": { + "content": "<|reserved_special_token_73|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128082": { + "content": "<|reserved_special_token_74|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128083": { + "content": "<|reserved_special_token_75|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128084": { + "content": "<|reserved_special_token_76|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128085": { + "content": "<|reserved_special_token_77|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128086": { + "content": "<|reserved_special_token_78|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128087": { + "content": "<|reserved_special_token_79|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128088": { + "content": "<|reserved_special_token_80|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128089": { + "content": "<|reserved_special_token_81|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128090": { + "content": "<|reserved_special_token_82|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128091": { + "content": "<|reserved_special_token_83|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128092": { + "content": "<|reserved_special_token_84|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128093": { + "content": "<|reserved_special_token_85|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128094": { + "content": "<|reserved_special_token_86|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128095": { + "content": "<|reserved_special_token_87|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128096": { + "content": "<|reserved_special_token_88|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128097": { + "content": "<|reserved_special_token_89|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128098": { + "content": "<|reserved_special_token_90|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128099": { + "content": "<|reserved_special_token_91|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128100": { + "content": "<|reserved_special_token_92|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128101": { + "content": "<|reserved_special_token_93|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128102": { + "content": "<|reserved_special_token_94|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128103": { + "content": "<|reserved_special_token_95|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128104": { + "content": "<|reserved_special_token_96|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128105": { + "content": "<|reserved_special_token_97|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128106": { + "content": "<|reserved_special_token_98|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128107": { + "content": "<|reserved_special_token_99|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128108": { + "content": "<|reserved_special_token_100|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128109": { + "content": "<|reserved_special_token_101|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128110": { + "content": "<|reserved_special_token_102|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128111": { + "content": "<|reserved_special_token_103|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128112": { + "content": "<|reserved_special_token_104|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128113": { + "content": "<|reserved_special_token_105|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128114": { + "content": "<|reserved_special_token_106|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128115": { + "content": "<|reserved_special_token_107|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128116": { + "content": "<|reserved_special_token_108|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128117": { + "content": "<|reserved_special_token_109|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128118": { + "content": "<|reserved_special_token_110|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128119": { + "content": "<|reserved_special_token_111|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128120": { + "content": "<|reserved_special_token_112|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128121": { + "content": "<|reserved_special_token_113|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128122": { + "content": "<|reserved_special_token_114|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128123": { + "content": "<|reserved_special_token_115|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128124": { + "content": "<|reserved_special_token_116|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128125": { + "content": "<|reserved_special_token_117|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128126": { + "content": "<|reserved_special_token_118|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128127": { + "content": "<|reserved_special_token_119|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128128": { + "content": "<|reserved_special_token_120|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128129": { + "content": "<|reserved_special_token_121|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128130": { + "content": "<|reserved_special_token_122|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128131": { + "content": "<|reserved_special_token_123|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128132": { + "content": "<|reserved_special_token_124|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128133": { + "content": "<|reserved_special_token_125|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128134": { + "content": "<|reserved_special_token_126|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128135": { + "content": "<|reserved_special_token_127|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128136": { + "content": "<|reserved_special_token_128|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128137": { + "content": "<|reserved_special_token_129|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128138": { + "content": "<|reserved_special_token_130|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128139": { + "content": "<|reserved_special_token_131|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128140": { + "content": "<|reserved_special_token_132|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128141": { + "content": "<|reserved_special_token_133|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128142": { + "content": "<|reserved_special_token_134|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128143": { + "content": "<|reserved_special_token_135|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128144": { + "content": "<|reserved_special_token_136|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128145": { + "content": "<|reserved_special_token_137|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128146": { + "content": "<|reserved_special_token_138|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128147": { + "content": "<|reserved_special_token_139|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128148": { + "content": "<|reserved_special_token_140|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128149": { + "content": "<|reserved_special_token_141|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128150": { + "content": "<|reserved_special_token_142|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128151": { + "content": "<|reserved_special_token_143|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128152": { + "content": "<|reserved_special_token_144|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128153": { + "content": "<|reserved_special_token_145|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128154": { + "content": "<|reserved_special_token_146|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128155": { + "content": "<|reserved_special_token_147|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128156": { + "content": "<|reserved_special_token_148|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128157": { + "content": "<|reserved_special_token_149|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128158": { + "content": "<|reserved_special_token_150|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128159": { + "content": "<|reserved_special_token_151|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128160": { + "content": "<|reserved_special_token_152|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128161": { + "content": "<|reserved_special_token_153|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128162": { + "content": "<|reserved_special_token_154|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128163": { + "content": "<|reserved_special_token_155|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128164": { + "content": "<|reserved_special_token_156|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128165": { + "content": "<|reserved_special_token_157|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128166": { + "content": "<|reserved_special_token_158|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128167": { + "content": "<|reserved_special_token_159|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128168": { + "content": "<|reserved_special_token_160|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128169": { + "content": "<|reserved_special_token_161|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128170": { + "content": "<|reserved_special_token_162|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128171": { + "content": "<|reserved_special_token_163|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128172": { + "content": "<|reserved_special_token_164|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128173": { + "content": "<|reserved_special_token_165|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128174": { + "content": "<|reserved_special_token_166|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128175": { + "content": "<|reserved_special_token_167|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128176": { + "content": "<|reserved_special_token_168|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128177": { + "content": "<|reserved_special_token_169|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128178": { + "content": "<|reserved_special_token_170|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128179": { + "content": "<|reserved_special_token_171|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128180": { + "content": "<|reserved_special_token_172|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128181": { + "content": "<|reserved_special_token_173|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128182": { + "content": "<|reserved_special_token_174|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128183": { + "content": "<|reserved_special_token_175|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128184": { + "content": "<|reserved_special_token_176|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128185": { + "content": "<|reserved_special_token_177|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128186": { + "content": "<|reserved_special_token_178|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128187": { + "content": "<|reserved_special_token_179|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128188": { + "content": "<|reserved_special_token_180|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128189": { + "content": "<|reserved_special_token_181|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128190": { + "content": "<|reserved_special_token_182|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128191": { + "content": "<|reserved_special_token_183|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128192": { + "content": "<|reserved_special_token_184|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128193": { + "content": "<|reserved_special_token_185|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128194": { + "content": "<|reserved_special_token_186|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128195": { + "content": "<|reserved_special_token_187|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128196": { + "content": "<|reserved_special_token_188|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128197": { + "content": "<|reserved_special_token_189|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128198": { + "content": "<|reserved_special_token_190|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128199": { + "content": "<|reserved_special_token_191|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128200": { + "content": "<|reserved_special_token_192|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128201": { + "content": "<|reserved_special_token_193|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128202": { + "content": "<|reserved_special_token_194|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128203": { + "content": "<|reserved_special_token_195|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128204": { + "content": "<|reserved_special_token_196|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128205": { + "content": "<|reserved_special_token_197|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128206": { + "content": "<|reserved_special_token_198|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128207": { + "content": "<|reserved_special_token_199|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128208": { + "content": "<|reserved_special_token_200|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128209": { + "content": "<|reserved_special_token_201|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128210": { + "content": "<|reserved_special_token_202|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128211": { + "content": "<|reserved_special_token_203|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128212": { + "content": "<|reserved_special_token_204|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128213": { + "content": "<|reserved_special_token_205|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128214": { + "content": "<|reserved_special_token_206|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128215": { + "content": "<|reserved_special_token_207|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128216": { + "content": "<|reserved_special_token_208|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128217": { + "content": "<|reserved_special_token_209|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128218": { + "content": "<|reserved_special_token_210|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128219": { + "content": "<|reserved_special_token_211|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128220": { + "content": "<|reserved_special_token_212|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128221": { + "content": "<|reserved_special_token_213|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128222": { + "content": "<|reserved_special_token_214|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128223": { + "content": "<|reserved_special_token_215|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128224": { + "content": "<|reserved_special_token_216|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128225": { + "content": "<|reserved_special_token_217|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128226": { + "content": "<|reserved_special_token_218|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128227": { + "content": "<|reserved_special_token_219|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128228": { + "content": "<|reserved_special_token_220|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128229": { + "content": "<|reserved_special_token_221|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128230": { + "content": "<|reserved_special_token_222|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128231": { + "content": "<|reserved_special_token_223|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128232": { + "content": "<|reserved_special_token_224|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128233": { + "content": "<|reserved_special_token_225|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128234": { + "content": "<|reserved_special_token_226|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128235": { + "content": "<|reserved_special_token_227|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128236": { + "content": "<|reserved_special_token_228|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128237": { + "content": "<|reserved_special_token_229|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128238": { + "content": "<|reserved_special_token_230|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128239": { + "content": "<|reserved_special_token_231|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128240": { + "content": "<|reserved_special_token_232|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128241": { + "content": "<|reserved_special_token_233|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128242": { + "content": "<|reserved_special_token_234|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128243": { + "content": "<|reserved_special_token_235|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128244": { + "content": "<|reserved_special_token_236|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128245": { + "content": "<|reserved_special_token_237|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128246": { + "content": "<|reserved_special_token_238|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128247": { + "content": "<|reserved_special_token_239|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128248": { + "content": "<|reserved_special_token_240|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128249": { + "content": "<|reserved_special_token_241|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128250": { + "content": "<|reserved_special_token_242|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128251": { + "content": "<|reserved_special_token_243|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128252": { + "content": "<|reserved_special_token_244|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128253": { + "content": "<|reserved_special_token_245|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128254": { + "content": "<|reserved_special_token_246|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128255": { + "content": "<|reserved_special_token_247|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + } + }, + "additional_special_tokens": [ + "<|eom_id|>" + ], + "bos_token": "<|begin_of_text|>", + "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n", + "clean_up_tokenization_spaces": true, + "eos_token": "<|eot_id|>", + "model_input_names": [ + "input_ids", + "attention_mask" + ], + "model_max_length": 131072, + "pad_token": "<|eot_id|>", + "padding_side": "right", + "split_special_tokens": false, + "tokenizer_class": "PreTrainedTokenizerFast" +} diff --git a/train_results.json b/train_results.json new file mode 100644 index 0000000000000000000000000000000000000000..78fb4a8fcb13b4b8db3a0ca67d3ebc6bf903e822 --- /dev/null +++ b/train_results.json @@ -0,0 +1,8 @@ +{ + "epoch": 2.9938900203665986, + "total_flos": 48064094208000.0, + "train_loss": 0.08951896556025865, + "train_runtime": 6057.9419, + "train_samples_per_second": 1.945, + "train_steps_per_second": 0.121 +} \ No newline at end of file diff --git a/trainer_log.jsonl b/trainer_log.jsonl new file mode 100644 index 0000000000000000000000000000000000000000..f9c0bbe4e4d94d640daee3f740234b219aec0b18 --- /dev/null +++ b/trainer_log.jsonl @@ -0,0 +1,736 @@ +{"current_steps": 1, "total_steps": 735, "loss": 0.3301, "lr": 1.3513513513513515e-07, "epoch": 0.004073319755600814, "percentage": 0.14, "elapsed_time": "0:00:15", "remaining_time": "3:12:18"} +{"current_steps": 2, "total_steps": 735, "loss": 0.3036, "lr": 2.702702702702703e-07, "epoch": 0.008146639511201629, "percentage": 0.27, "elapsed_time": "0:00:28", "remaining_time": "2:56:00"} +{"current_steps": 3, "total_steps": 735, "loss": 0.3424, "lr": 4.0540540540540546e-07, "epoch": 0.012219959266802444, "percentage": 0.41, "elapsed_time": "0:00:36", "remaining_time": "2:28:36"} +{"current_steps": 4, "total_steps": 735, "loss": 0.3184, "lr": 5.405405405405406e-07, "epoch": 0.016293279022403257, "percentage": 0.54, "elapsed_time": "0:00:43", "remaining_time": "2:13:02"} +{"current_steps": 5, "total_steps": 735, "loss": 0.3138, "lr": 6.756756756756758e-07, "epoch": 0.020366598778004074, "percentage": 0.68, "elapsed_time": "0:00:54", "remaining_time": "2:13:28"} +{"current_steps": 6, "total_steps": 735, "loss": 0.2982, "lr": 8.108108108108109e-07, "epoch": 0.024439918533604887, "percentage": 0.82, "elapsed_time": "0:01:04", "remaining_time": "2:09:48"} +{"current_steps": 7, "total_steps": 735, "loss": 0.3063, "lr": 9.459459459459461e-07, "epoch": 0.028513238289205704, "percentage": 0.95, "elapsed_time": "0:01:11", "remaining_time": "2:03:35"} +{"current_steps": 8, "total_steps": 735, "loss": 0.2749, "lr": 1.0810810810810812e-06, "epoch": 0.032586558044806514, "percentage": 1.09, "elapsed_time": "0:01:20", "remaining_time": "2:01:13"} +{"current_steps": 9, "total_steps": 735, "loss": 0.2718, "lr": 1.2162162162162164e-06, "epoch": 0.03665987780040733, "percentage": 1.22, "elapsed_time": "0:01:27", "remaining_time": "1:58:14"} +{"current_steps": 10, "total_steps": 735, "loss": 0.2238, "lr": 1.3513513513513515e-06, "epoch": 0.04073319755600815, "percentage": 1.36, "elapsed_time": "0:01:35", "remaining_time": "1:55:19"} +{"current_steps": 11, "total_steps": 735, "loss": 0.2513, "lr": 1.4864864864864868e-06, "epoch": 0.04480651731160896, "percentage": 1.5, "elapsed_time": "0:01:43", "remaining_time": "1:54:00"} +{"current_steps": 12, "total_steps": 735, "loss": 0.2327, "lr": 1.6216216216216219e-06, "epoch": 0.048879837067209775, "percentage": 1.63, "elapsed_time": "0:01:52", "remaining_time": "1:52:43"} +{"current_steps": 13, "total_steps": 735, "loss": 0.239, "lr": 1.756756756756757e-06, "epoch": 0.05295315682281059, "percentage": 1.77, "elapsed_time": "0:01:59", "remaining_time": "1:50:33"} +{"current_steps": 14, "total_steps": 735, "loss": 0.277, "lr": 1.8918918918918922e-06, "epoch": 0.05702647657841141, "percentage": 1.9, "elapsed_time": "0:02:06", "remaining_time": "1:48:31"} +{"current_steps": 15, "total_steps": 735, "loss": 0.2495, "lr": 2.0270270270270273e-06, "epoch": 0.06109979633401222, "percentage": 2.04, "elapsed_time": "0:02:13", "remaining_time": "1:47:03"} +{"current_steps": 16, "total_steps": 735, "loss": 0.2425, "lr": 2.1621621621621623e-06, "epoch": 0.06517311608961303, "percentage": 2.18, "elapsed_time": "0:02:21", "remaining_time": "1:45:51"} +{"current_steps": 17, "total_steps": 735, "loss": 0.1985, "lr": 2.297297297297298e-06, "epoch": 0.06924643584521385, "percentage": 2.31, "elapsed_time": "0:02:29", "remaining_time": "1:44:56"} +{"current_steps": 18, "total_steps": 735, "loss": 0.1822, "lr": 2.432432432432433e-06, "epoch": 0.07331975560081466, "percentage": 2.45, "elapsed_time": "0:02:36", "remaining_time": "1:43:43"} +{"current_steps": 19, "total_steps": 735, "loss": 0.2252, "lr": 2.5675675675675675e-06, "epoch": 0.07739307535641547, "percentage": 2.59, "elapsed_time": "0:02:41", "remaining_time": "1:41:41"} +{"current_steps": 20, "total_steps": 735, "loss": 0.1862, "lr": 2.702702702702703e-06, "epoch": 0.0814663951120163, "percentage": 2.72, "elapsed_time": "0:02:48", "remaining_time": "1:40:39"} +{"current_steps": 21, "total_steps": 735, "loss": 0.2098, "lr": 2.837837837837838e-06, "epoch": 0.0855397148676171, "percentage": 2.86, "elapsed_time": "0:02:55", "remaining_time": "1:39:40"} +{"current_steps": 22, "total_steps": 735, "loss": 0.1699, "lr": 2.9729729729729736e-06, "epoch": 0.08961303462321792, "percentage": 2.99, "elapsed_time": "0:03:05", "remaining_time": "1:40:26"} +{"current_steps": 23, "total_steps": 735, "loss": 0.1868, "lr": 3.1081081081081082e-06, "epoch": 0.09368635437881874, "percentage": 3.13, "elapsed_time": "0:03:15", "remaining_time": "1:40:59"} +{"current_steps": 24, "total_steps": 735, "loss": 0.1669, "lr": 3.2432432432432437e-06, "epoch": 0.09775967413441955, "percentage": 3.27, "elapsed_time": "0:03:23", "remaining_time": "1:40:35"} +{"current_steps": 25, "total_steps": 735, "loss": 0.1507, "lr": 3.3783783783783788e-06, "epoch": 0.10183299389002037, "percentage": 3.4, "elapsed_time": "0:03:37", "remaining_time": "1:43:00"} +{"current_steps": 26, "total_steps": 735, "loss": 0.1534, "lr": 3.513513513513514e-06, "epoch": 0.10590631364562118, "percentage": 3.54, "elapsed_time": "0:03:44", "remaining_time": "1:42:14"} +{"current_steps": 27, "total_steps": 735, "loss": 0.1387, "lr": 3.648648648648649e-06, "epoch": 0.109979633401222, "percentage": 3.67, "elapsed_time": "0:03:53", "remaining_time": "1:42:02"} +{"current_steps": 28, "total_steps": 735, "loss": 0.1406, "lr": 3.7837837837837844e-06, "epoch": 0.11405295315682282, "percentage": 3.81, "elapsed_time": "0:04:00", "remaining_time": "1:41:01"} +{"current_steps": 29, "total_steps": 735, "loss": 0.1323, "lr": 3.918918918918919e-06, "epoch": 0.11812627291242363, "percentage": 3.95, "elapsed_time": "0:04:12", "remaining_time": "1:42:34"} +{"current_steps": 30, "total_steps": 735, "loss": 0.1577, "lr": 4.0540540540540545e-06, "epoch": 0.12219959266802444, "percentage": 4.08, "elapsed_time": "0:04:19", "remaining_time": "1:41:47"} +{"current_steps": 31, "total_steps": 735, "loss": 0.1345, "lr": 4.189189189189189e-06, "epoch": 0.12627291242362526, "percentage": 4.22, "elapsed_time": "0:04:26", "remaining_time": "1:40:42"} +{"current_steps": 32, "total_steps": 735, "loss": 0.1641, "lr": 4.324324324324325e-06, "epoch": 0.13034623217922606, "percentage": 4.35, "elapsed_time": "0:04:33", "remaining_time": "1:40:09"} +{"current_steps": 33, "total_steps": 735, "loss": 0.1568, "lr": 4.45945945945946e-06, "epoch": 0.13441955193482688, "percentage": 4.49, "elapsed_time": "0:04:47", "remaining_time": "1:41:56"} +{"current_steps": 34, "total_steps": 735, "loss": 0.1158, "lr": 4.594594594594596e-06, "epoch": 0.1384928716904277, "percentage": 4.63, "elapsed_time": "0:04:54", "remaining_time": "1:41:21"} +{"current_steps": 35, "total_steps": 735, "loss": 0.1553, "lr": 4.72972972972973e-06, "epoch": 0.1425661914460285, "percentage": 4.76, "elapsed_time": "0:05:05", "remaining_time": "1:41:50"} +{"current_steps": 36, "total_steps": 735, "loss": 0.1472, "lr": 4.864864864864866e-06, "epoch": 0.14663951120162932, "percentage": 4.9, "elapsed_time": "0:05:12", "remaining_time": "1:41:07"} +{"current_steps": 37, "total_steps": 735, "loss": 0.1216, "lr": 5e-06, "epoch": 0.15071283095723015, "percentage": 5.03, "elapsed_time": "0:05:20", "remaining_time": "1:40:40"} +{"current_steps": 38, "total_steps": 735, "loss": 0.1383, "lr": 5.135135135135135e-06, "epoch": 0.15478615071283094, "percentage": 5.17, "elapsed_time": "0:05:28", "remaining_time": "1:40:22"} +{"current_steps": 39, "total_steps": 735, "loss": 0.1488, "lr": 5.2702702702702705e-06, "epoch": 0.15885947046843177, "percentage": 5.31, "elapsed_time": "0:05:35", "remaining_time": "1:39:38"} +{"current_steps": 40, "total_steps": 735, "loss": 0.1658, "lr": 5.405405405405406e-06, "epoch": 0.1629327902240326, "percentage": 5.44, "elapsed_time": "0:05:42", "remaining_time": "1:39:18"} +{"current_steps": 41, "total_steps": 735, "loss": 0.1244, "lr": 5.540540540540541e-06, "epoch": 0.1670061099796334, "percentage": 5.58, "elapsed_time": "0:05:50", "remaining_time": "1:38:46"} +{"current_steps": 42, "total_steps": 735, "loss": 0.1129, "lr": 5.675675675675676e-06, "epoch": 0.1710794297352342, "percentage": 5.71, "elapsed_time": "0:05:59", "remaining_time": "1:38:45"} +{"current_steps": 43, "total_steps": 735, "loss": 0.176, "lr": 5.810810810810811e-06, "epoch": 0.17515274949083504, "percentage": 5.85, "elapsed_time": "0:06:06", "remaining_time": "1:38:20"} +{"current_steps": 44, "total_steps": 735, "loss": 0.1235, "lr": 5.945945945945947e-06, "epoch": 0.17922606924643583, "percentage": 5.99, "elapsed_time": "0:06:13", "remaining_time": "1:37:40"} +{"current_steps": 45, "total_steps": 735, "loss": 0.1352, "lr": 6.081081081081082e-06, "epoch": 0.18329938900203666, "percentage": 6.12, "elapsed_time": "0:06:19", "remaining_time": "1:36:56"} +{"current_steps": 46, "total_steps": 735, "loss": 0.1375, "lr": 6.2162162162162164e-06, "epoch": 0.18737270875763748, "percentage": 6.26, "elapsed_time": "0:06:28", "remaining_time": "1:36:56"} +{"current_steps": 47, "total_steps": 735, "loss": 0.1451, "lr": 6.351351351351351e-06, "epoch": 0.19144602851323828, "percentage": 6.39, "elapsed_time": "0:06:35", "remaining_time": "1:36:32"} +{"current_steps": 48, "total_steps": 735, "loss": 0.1073, "lr": 6.486486486486487e-06, "epoch": 0.1955193482688391, "percentage": 6.53, "elapsed_time": "0:06:42", "remaining_time": "1:36:02"} +{"current_steps": 49, "total_steps": 735, "loss": 0.1502, "lr": 6.621621621621622e-06, "epoch": 0.19959266802443992, "percentage": 6.67, "elapsed_time": "0:06:53", "remaining_time": "1:36:24"} +{"current_steps": 50, "total_steps": 735, "loss": 0.116, "lr": 6.7567567567567575e-06, "epoch": 0.20366598778004075, "percentage": 6.8, "elapsed_time": "0:07:01", "remaining_time": "1:36:17"} +{"current_steps": 51, "total_steps": 735, "loss": 0.1438, "lr": 6.891891891891892e-06, "epoch": 0.20773930753564154, "percentage": 6.94, "elapsed_time": "0:07:11", "remaining_time": "1:36:31"} +{"current_steps": 52, "total_steps": 735, "loss": 0.1143, "lr": 7.027027027027028e-06, "epoch": 0.21181262729124237, "percentage": 7.07, "elapsed_time": "0:07:20", "remaining_time": "1:36:28"} +{"current_steps": 53, "total_steps": 735, "loss": 0.1443, "lr": 7.162162162162163e-06, "epoch": 0.2158859470468432, "percentage": 7.21, "elapsed_time": "0:07:29", "remaining_time": "1:36:27"} +{"current_steps": 54, "total_steps": 735, "loss": 0.1341, "lr": 7.297297297297298e-06, "epoch": 0.219959266802444, "percentage": 7.35, "elapsed_time": "0:07:37", "remaining_time": "1:36:03"} +{"current_steps": 55, "total_steps": 735, "loss": 0.1283, "lr": 7.4324324324324324e-06, "epoch": 0.2240325865580448, "percentage": 7.48, "elapsed_time": "0:07:47", "remaining_time": "1:36:17"} +{"current_steps": 56, "total_steps": 735, "loss": 0.1302, "lr": 7.567567567567569e-06, "epoch": 0.22810590631364563, "percentage": 7.62, "elapsed_time": "0:07:55", "remaining_time": "1:36:04"} +{"current_steps": 57, "total_steps": 735, "loss": 0.113, "lr": 7.702702702702704e-06, "epoch": 0.23217922606924643, "percentage": 7.76, "elapsed_time": "0:08:03", "remaining_time": "1:35:46"} +{"current_steps": 58, "total_steps": 735, "loss": 0.1293, "lr": 7.837837837837838e-06, "epoch": 0.23625254582484725, "percentage": 7.89, "elapsed_time": "0:08:09", "remaining_time": "1:35:13"} +{"current_steps": 59, "total_steps": 735, "loss": 0.164, "lr": 7.972972972972974e-06, "epoch": 0.24032586558044808, "percentage": 8.03, "elapsed_time": "0:08:15", "remaining_time": "1:34:39"} +{"current_steps": 60, "total_steps": 735, "loss": 0.1548, "lr": 8.108108108108109e-06, "epoch": 0.24439918533604887, "percentage": 8.16, "elapsed_time": "0:08:24", "remaining_time": "1:34:34"} +{"current_steps": 61, "total_steps": 735, "loss": 0.1225, "lr": 8.243243243243245e-06, "epoch": 0.2484725050916497, "percentage": 8.3, "elapsed_time": "0:08:31", "remaining_time": "1:34:13"} +{"current_steps": 62, "total_steps": 735, "loss": 0.1175, "lr": 8.378378378378378e-06, "epoch": 0.2525458248472505, "percentage": 8.44, "elapsed_time": "0:08:39", "remaining_time": "1:33:54"} +{"current_steps": 63, "total_steps": 735, "loss": 0.1204, "lr": 8.513513513513514e-06, "epoch": 0.25661914460285135, "percentage": 8.57, "elapsed_time": "0:08:49", "remaining_time": "1:34:04"} +{"current_steps": 64, "total_steps": 735, "loss": 0.1253, "lr": 8.64864864864865e-06, "epoch": 0.2606924643584521, "percentage": 8.71, "elapsed_time": "0:08:56", "remaining_time": "1:33:41"} +{"current_steps": 65, "total_steps": 735, "loss": 0.191, "lr": 8.783783783783785e-06, "epoch": 0.26476578411405294, "percentage": 8.84, "elapsed_time": "0:09:05", "remaining_time": "1:33:39"} +{"current_steps": 66, "total_steps": 735, "loss": 0.1287, "lr": 8.91891891891892e-06, "epoch": 0.26883910386965376, "percentage": 8.98, "elapsed_time": "0:09:13", "remaining_time": "1:33:31"} +{"current_steps": 67, "total_steps": 735, "loss": 0.138, "lr": 9.054054054054054e-06, "epoch": 0.2729124236252546, "percentage": 9.12, "elapsed_time": "0:09:21", "remaining_time": "1:33:18"} +{"current_steps": 68, "total_steps": 735, "loss": 0.1259, "lr": 9.189189189189191e-06, "epoch": 0.2769857433808554, "percentage": 9.25, "elapsed_time": "0:09:28", "remaining_time": "1:32:53"} +{"current_steps": 69, "total_steps": 735, "loss": 0.1292, "lr": 9.324324324324325e-06, "epoch": 0.28105906313645623, "percentage": 9.39, "elapsed_time": "0:09:36", "remaining_time": "1:32:46"} +{"current_steps": 70, "total_steps": 735, "loss": 0.1142, "lr": 9.45945945945946e-06, "epoch": 0.285132382892057, "percentage": 9.52, "elapsed_time": "0:09:44", "remaining_time": "1:32:30"} +{"current_steps": 71, "total_steps": 735, "loss": 0.1188, "lr": 9.594594594594594e-06, "epoch": 0.2892057026476578, "percentage": 9.66, "elapsed_time": "0:09:50", "remaining_time": "1:32:05"} +{"current_steps": 72, "total_steps": 735, "loss": 0.1052, "lr": 9.729729729729732e-06, "epoch": 0.29327902240325865, "percentage": 9.8, "elapsed_time": "0:09:58", "remaining_time": "1:31:52"} +{"current_steps": 73, "total_steps": 735, "loss": 0.1246, "lr": 9.864864864864865e-06, "epoch": 0.2973523421588595, "percentage": 9.93, "elapsed_time": "0:10:07", "remaining_time": "1:31:49"} +{"current_steps": 74, "total_steps": 735, "loss": 0.1651, "lr": 1e-05, "epoch": 0.3014256619144603, "percentage": 10.07, "elapsed_time": "0:10:15", "remaining_time": "1:31:38"} +{"current_steps": 75, "total_steps": 735, "loss": 0.1259, "lr": 9.99994352762958e-06, "epoch": 0.3054989816700611, "percentage": 10.2, "elapsed_time": "0:10:22", "remaining_time": "1:31:18"} +{"current_steps": 76, "total_steps": 735, "loss": 0.1485, "lr": 9.999774111793974e-06, "epoch": 0.3095723014256619, "percentage": 10.34, "elapsed_time": "0:10:28", "remaining_time": "1:30:52"} +{"current_steps": 77, "total_steps": 735, "loss": 0.1708, "lr": 9.999491756320105e-06, "epoch": 0.3136456211812627, "percentage": 10.48, "elapsed_time": "0:10:36", "remaining_time": "1:30:39"} +{"current_steps": 78, "total_steps": 735, "loss": 0.1483, "lr": 9.99909646758609e-06, "epoch": 0.31771894093686354, "percentage": 10.61, "elapsed_time": "0:10:44", "remaining_time": "1:30:27"} +{"current_steps": 79, "total_steps": 735, "loss": 0.1124, "lr": 9.99858825452108e-06, "epoch": 0.32179226069246436, "percentage": 10.75, "elapsed_time": "0:10:50", "remaining_time": "1:30:05"} +{"current_steps": 80, "total_steps": 735, "loss": 0.1849, "lr": 9.997967128605078e-06, "epoch": 0.3258655804480652, "percentage": 10.88, "elapsed_time": "0:11:01", "remaining_time": "1:30:17"} +{"current_steps": 81, "total_steps": 735, "loss": 0.1199, "lr": 9.997233103868664e-06, "epoch": 0.329938900203666, "percentage": 11.02, "elapsed_time": "0:11:08", "remaining_time": "1:29:56"} +{"current_steps": 82, "total_steps": 735, "loss": 0.1748, "lr": 9.996386196892683e-06, "epoch": 0.3340122199592668, "percentage": 11.16, "elapsed_time": "0:11:14", "remaining_time": "1:29:29"} +{"current_steps": 83, "total_steps": 735, "loss": 0.1449, "lr": 9.995426426807875e-06, "epoch": 0.3380855397148676, "percentage": 11.29, "elapsed_time": "0:11:21", "remaining_time": "1:29:13"} +{"current_steps": 84, "total_steps": 735, "loss": 0.1349, "lr": 9.994353815294438e-06, "epoch": 0.3421588594704684, "percentage": 11.43, "elapsed_time": "0:11:29", "remaining_time": "1:29:05"} +{"current_steps": 85, "total_steps": 735, "loss": 0.1111, "lr": 9.993168386581533e-06, "epoch": 0.34623217922606925, "percentage": 11.56, "elapsed_time": "0:11:36", "remaining_time": "1:28:43"} +{"current_steps": 86, "total_steps": 735, "loss": 0.1271, "lr": 9.991870167446751e-06, "epoch": 0.35030549898167007, "percentage": 11.7, "elapsed_time": "0:11:43", "remaining_time": "1:28:30"} +{"current_steps": 87, "total_steps": 735, "loss": 0.122, "lr": 9.990459187215498e-06, "epoch": 0.3543788187372709, "percentage": 11.84, "elapsed_time": "0:11:51", "remaining_time": "1:28:16"} +{"current_steps": 88, "total_steps": 735, "loss": 0.1429, "lr": 9.98893547776033e-06, "epoch": 0.35845213849287166, "percentage": 11.97, "elapsed_time": "0:11:58", "remaining_time": "1:27:59"} +{"current_steps": 89, "total_steps": 735, "loss": 0.1789, "lr": 9.987299073500245e-06, "epoch": 0.3625254582484725, "percentage": 12.11, "elapsed_time": "0:12:04", "remaining_time": "1:27:38"} +{"current_steps": 90, "total_steps": 735, "loss": 0.1217, "lr": 9.985550011399889e-06, "epoch": 0.3665987780040733, "percentage": 12.24, "elapsed_time": "0:12:12", "remaining_time": "1:27:30"} +{"current_steps": 91, "total_steps": 735, "loss": 0.1517, "lr": 9.98368833096874e-06, "epoch": 0.37067209775967414, "percentage": 12.38, "elapsed_time": "0:12:23", "remaining_time": "1:27:39"} +{"current_steps": 92, "total_steps": 735, "loss": 0.1648, "lr": 9.981714074260196e-06, "epoch": 0.37474541751527496, "percentage": 12.52, "elapsed_time": "0:12:30", "remaining_time": "1:27:26"} +{"current_steps": 93, "total_steps": 735, "loss": 0.1173, "lr": 9.979627285870644e-06, "epoch": 0.3788187372708758, "percentage": 12.65, "elapsed_time": "0:12:36", "remaining_time": "1:27:04"} +{"current_steps": 94, "total_steps": 735, "loss": 0.2148, "lr": 9.977428012938437e-06, "epoch": 0.38289205702647655, "percentage": 12.79, "elapsed_time": "0:12:42", "remaining_time": "1:26:41"} +{"current_steps": 95, "total_steps": 735, "loss": 0.1272, "lr": 9.975116305142836e-06, "epoch": 0.3869653767820774, "percentage": 12.93, "elapsed_time": "0:12:50", "remaining_time": "1:26:30"} +{"current_steps": 96, "total_steps": 735, "loss": 0.1149, "lr": 9.97269221470289e-06, "epoch": 0.3910386965376782, "percentage": 13.06, "elapsed_time": "0:13:00", "remaining_time": "1:26:37"} +{"current_steps": 97, "total_steps": 735, "loss": 0.1081, "lr": 9.97015579637625e-06, "epoch": 0.395112016293279, "percentage": 13.2, "elapsed_time": "0:13:09", "remaining_time": "1:26:31"} +{"current_steps": 98, "total_steps": 735, "loss": 0.1249, "lr": 9.967507107457942e-06, "epoch": 0.39918533604887985, "percentage": 13.33, "elapsed_time": "0:13:18", "remaining_time": "1:26:28"} +{"current_steps": 99, "total_steps": 735, "loss": 0.1404, "lr": 9.96474620777906e-06, "epoch": 0.40325865580448067, "percentage": 13.47, "elapsed_time": "0:13:24", "remaining_time": "1:26:05"} +{"current_steps": 100, "total_steps": 735, "loss": 0.1433, "lr": 9.961873159705426e-06, "epoch": 0.4073319755600815, "percentage": 13.61, "elapsed_time": "0:13:31", "remaining_time": "1:25:50"} +{"current_steps": 101, "total_steps": 735, "loss": 0.1723, "lr": 9.95888802813617e-06, "epoch": 0.41140529531568226, "percentage": 13.74, "elapsed_time": "0:13:36", "remaining_time": "1:25:27"} +{"current_steps": 102, "total_steps": 735, "loss": 0.1219, "lr": 9.955790880502278e-06, "epoch": 0.4154786150712831, "percentage": 13.88, "elapsed_time": "0:13:43", "remaining_time": "1:25:11"} +{"current_steps": 103, "total_steps": 735, "loss": 0.1157, "lr": 9.952581786765057e-06, "epoch": 0.4195519348268839, "percentage": 14.01, "elapsed_time": "0:13:51", "remaining_time": "1:25:04"} +{"current_steps": 104, "total_steps": 735, "loss": 0.1642, "lr": 9.949260819414557e-06, "epoch": 0.42362525458248473, "percentage": 14.15, "elapsed_time": "0:13:57", "remaining_time": "1:24:43"} +{"current_steps": 105, "total_steps": 735, "loss": 0.1224, "lr": 9.945828053467939e-06, "epoch": 0.42769857433808556, "percentage": 14.29, "elapsed_time": "0:14:04", "remaining_time": "1:24:29"} +{"current_steps": 106, "total_steps": 735, "loss": 0.1596, "lr": 9.942283566467773e-06, "epoch": 0.4317718940936864, "percentage": 14.42, "elapsed_time": "0:14:10", "remaining_time": "1:24:09"} +{"current_steps": 107, "total_steps": 735, "loss": 0.1541, "lr": 9.938627438480295e-06, "epoch": 0.43584521384928715, "percentage": 14.56, "elapsed_time": "0:14:18", "remaining_time": "1:23:57"} +{"current_steps": 108, "total_steps": 735, "loss": 0.1533, "lr": 9.93485975209359e-06, "epoch": 0.439918533604888, "percentage": 14.69, "elapsed_time": "0:14:25", "remaining_time": "1:23:47"} +{"current_steps": 109, "total_steps": 735, "loss": 0.1539, "lr": 9.930980592415728e-06, "epoch": 0.4439918533604888, "percentage": 14.83, "elapsed_time": "0:14:32", "remaining_time": "1:23:33"} +{"current_steps": 110, "total_steps": 735, "loss": 0.2379, "lr": 9.926990047072849e-06, "epoch": 0.4480651731160896, "percentage": 14.97, "elapsed_time": "0:14:38", "remaining_time": "1:23:12"} +{"current_steps": 111, "total_steps": 735, "loss": 0.1181, "lr": 9.922888206207174e-06, "epoch": 0.45213849287169044, "percentage": 15.1, "elapsed_time": "0:14:49", "remaining_time": "1:23:22"} +{"current_steps": 112, "total_steps": 735, "loss": 0.1157, "lr": 9.918675162474974e-06, "epoch": 0.45621181262729127, "percentage": 15.24, "elapsed_time": "0:14:57", "remaining_time": "1:23:13"} +{"current_steps": 113, "total_steps": 735, "loss": 0.1671, "lr": 9.914351011044472e-06, "epoch": 0.46028513238289204, "percentage": 15.37, "elapsed_time": "0:15:08", "remaining_time": "1:23:23"} +{"current_steps": 114, "total_steps": 735, "loss": 0.1094, "lr": 9.909915849593705e-06, "epoch": 0.46435845213849286, "percentage": 15.51, "elapsed_time": "0:15:16", "remaining_time": "1:23:12"} +{"current_steps": 115, "total_steps": 735, "loss": 0.1205, "lr": 9.905369778308304e-06, "epoch": 0.4684317718940937, "percentage": 15.65, "elapsed_time": "0:15:23", "remaining_time": "1:22:59"} +{"current_steps": 116, "total_steps": 735, "loss": 0.1551, "lr": 9.900712899879237e-06, "epoch": 0.4725050916496945, "percentage": 15.78, "elapsed_time": "0:15:29", "remaining_time": "1:22:42"} +{"current_steps": 117, "total_steps": 735, "loss": 0.1402, "lr": 9.895945319500488e-06, "epoch": 0.47657841140529533, "percentage": 15.92, "elapsed_time": "0:15:35", "remaining_time": "1:22:22"} +{"current_steps": 118, "total_steps": 735, "loss": 0.1381, "lr": 9.891067144866687e-06, "epoch": 0.48065173116089616, "percentage": 16.05, "elapsed_time": "0:15:42", "remaining_time": "1:22:09"} +{"current_steps": 119, "total_steps": 735, "loss": 0.1038, "lr": 9.886078486170665e-06, "epoch": 0.4847250509164969, "percentage": 16.19, "elapsed_time": "0:15:49", "remaining_time": "1:21:57"} +{"current_steps": 120, "total_steps": 735, "loss": 0.1372, "lr": 9.880979456100974e-06, "epoch": 0.48879837067209775, "percentage": 16.33, "elapsed_time": "0:15:57", "remaining_time": "1:21:47"} +{"current_steps": 121, "total_steps": 735, "loss": 0.1322, "lr": 9.875770169839343e-06, "epoch": 0.49287169042769857, "percentage": 16.46, "elapsed_time": "0:16:04", "remaining_time": "1:21:33"} +{"current_steps": 122, "total_steps": 735, "loss": 0.1257, "lr": 9.870450745058066e-06, "epoch": 0.4969450101832994, "percentage": 16.6, "elapsed_time": "0:16:13", "remaining_time": "1:21:33"} +{"current_steps": 123, "total_steps": 735, "loss": 0.1317, "lr": 9.865021301917358e-06, "epoch": 0.5010183299389002, "percentage": 16.73, "elapsed_time": "0:16:21", "remaining_time": "1:21:23"} +{"current_steps": 124, "total_steps": 735, "loss": 0.1104, "lr": 9.859481963062623e-06, "epoch": 0.505091649694501, "percentage": 16.87, "elapsed_time": "0:16:27", "remaining_time": "1:21:07"} +{"current_steps": 125, "total_steps": 735, "loss": 0.124, "lr": 9.853832853621703e-06, "epoch": 0.5091649694501018, "percentage": 17.01, "elapsed_time": "0:16:34", "remaining_time": "1:20:53"} +{"current_steps": 126, "total_steps": 735, "loss": 0.1191, "lr": 9.848074101202037e-06, "epoch": 0.5132382892057027, "percentage": 17.14, "elapsed_time": "0:16:41", "remaining_time": "1:20:39"} +{"current_steps": 127, "total_steps": 735, "loss": 0.1188, "lr": 9.842205835887785e-06, "epoch": 0.5173116089613035, "percentage": 17.28, "elapsed_time": "0:16:51", "remaining_time": "1:20:43"} +{"current_steps": 128, "total_steps": 735, "loss": 0.1392, "lr": 9.836228190236892e-06, "epoch": 0.5213849287169042, "percentage": 17.41, "elapsed_time": "0:17:04", "remaining_time": "1:20:59"} +{"current_steps": 129, "total_steps": 735, "loss": 0.1331, "lr": 9.83014129927808e-06, "epoch": 0.5254582484725051, "percentage": 17.55, "elapsed_time": "0:17:13", "remaining_time": "1:20:53"} +{"current_steps": 130, "total_steps": 735, "loss": 0.1393, "lr": 9.823945300507815e-06, "epoch": 0.5295315682281059, "percentage": 17.69, "elapsed_time": "0:17:19", "remaining_time": "1:20:39"} +{"current_steps": 131, "total_steps": 735, "loss": 0.1376, "lr": 9.817640333887194e-06, "epoch": 0.5336048879837068, "percentage": 17.82, "elapsed_time": "0:17:27", "remaining_time": "1:20:28"} +{"current_steps": 132, "total_steps": 735, "loss": 0.1075, "lr": 9.81122654183878e-06, "epoch": 0.5376782077393075, "percentage": 17.96, "elapsed_time": "0:17:35", "remaining_time": "1:20:21"} +{"current_steps": 133, "total_steps": 735, "loss": 0.1149, "lr": 9.804704069243389e-06, "epoch": 0.5417515274949084, "percentage": 18.1, "elapsed_time": "0:17:44", "remaining_time": "1:20:20"} +{"current_steps": 134, "total_steps": 735, "loss": 0.1077, "lr": 9.798073063436815e-06, "epoch": 0.5458248472505092, "percentage": 18.23, "elapsed_time": "0:17:52", "remaining_time": "1:20:12"} +{"current_steps": 135, "total_steps": 735, "loss": 0.1892, "lr": 9.791333674206507e-06, "epoch": 0.5498981670061099, "percentage": 18.37, "elapsed_time": "0:18:05", "remaining_time": "1:20:22"} +{"current_steps": 136, "total_steps": 735, "loss": 0.1075, "lr": 9.784486053788179e-06, "epoch": 0.5539714867617108, "percentage": 18.5, "elapsed_time": "0:18:12", "remaining_time": "1:20:12"} +{"current_steps": 137, "total_steps": 735, "loss": 0.1472, "lr": 9.77753035686237e-06, "epoch": 0.5580448065173116, "percentage": 18.64, "elapsed_time": "0:18:20", "remaining_time": "1:20:02"} +{"current_steps": 138, "total_steps": 735, "loss": 0.1598, "lr": 9.770466740550963e-06, "epoch": 0.5621181262729125, "percentage": 18.78, "elapsed_time": "0:18:26", "remaining_time": "1:19:48"} +{"current_steps": 139, "total_steps": 735, "loss": 0.1186, "lr": 9.763295364413616e-06, "epoch": 0.5661914460285132, "percentage": 18.91, "elapsed_time": "0:18:33", "remaining_time": "1:19:36"} +{"current_steps": 140, "total_steps": 735, "loss": 0.1386, "lr": 9.756016390444174e-06, "epoch": 0.570264765784114, "percentage": 19.05, "elapsed_time": "0:18:42", "remaining_time": "1:19:29"} +{"current_steps": 141, "total_steps": 735, "loss": 0.1282, "lr": 9.748629983067004e-06, "epoch": 0.5743380855397149, "percentage": 19.18, "elapsed_time": "0:18:47", "remaining_time": "1:19:11"} +{"current_steps": 142, "total_steps": 735, "loss": 0.1754, "lr": 9.741136309133279e-06, "epoch": 0.5784114052953157, "percentage": 19.32, "elapsed_time": "0:18:53", "remaining_time": "1:18:53"} +{"current_steps": 143, "total_steps": 735, "loss": 0.1194, "lr": 9.733535537917211e-06, "epoch": 0.5824847250509165, "percentage": 19.46, "elapsed_time": "0:18:59", "remaining_time": "1:18:37"} +{"current_steps": 144, "total_steps": 735, "loss": 0.1162, "lr": 9.725827841112226e-06, "epoch": 0.5865580448065173, "percentage": 19.59, "elapsed_time": "0:19:09", "remaining_time": "1:18:36"} +{"current_steps": 145, "total_steps": 735, "loss": 0.1121, "lr": 9.718013392827087e-06, "epoch": 0.5906313645621182, "percentage": 19.73, "elapsed_time": "0:19:15", "remaining_time": "1:18:21"} +{"current_steps": 146, "total_steps": 735, "loss": 0.16, "lr": 9.710092369581966e-06, "epoch": 0.594704684317719, "percentage": 19.86, "elapsed_time": "0:19:22", "remaining_time": "1:18:11"} +{"current_steps": 147, "total_steps": 735, "loss": 0.1211, "lr": 9.702064950304442e-06, "epoch": 0.5987780040733197, "percentage": 20.0, "elapsed_time": "0:19:29", "remaining_time": "1:17:59"} +{"current_steps": 148, "total_steps": 735, "loss": 0.0946, "lr": 9.693931316325473e-06, "epoch": 0.6028513238289206, "percentage": 20.14, "elapsed_time": "0:19:37", "remaining_time": "1:17:50"} +{"current_steps": 149, "total_steps": 735, "loss": 0.1016, "lr": 9.685691651375297e-06, "epoch": 0.6069246435845214, "percentage": 20.27, "elapsed_time": "0:19:44", "remaining_time": "1:17:39"} +{"current_steps": 150, "total_steps": 735, "loss": 0.1014, "lr": 9.677346141579277e-06, "epoch": 0.6109979633401222, "percentage": 20.41, "elapsed_time": "0:19:53", "remaining_time": "1:17:34"} +{"current_steps": 151, "total_steps": 735, "loss": 0.1562, "lr": 9.668894975453705e-06, "epoch": 0.615071283095723, "percentage": 20.54, "elapsed_time": "0:20:01", "remaining_time": "1:17:27"} +{"current_steps": 152, "total_steps": 735, "loss": 0.1372, "lr": 9.66033834390153e-06, "epoch": 0.6191446028513238, "percentage": 20.68, "elapsed_time": "0:20:07", "remaining_time": "1:17:13"} +{"current_steps": 153, "total_steps": 735, "loss": 0.1254, "lr": 9.65167644020806e-06, "epoch": 0.6232179226069247, "percentage": 20.82, "elapsed_time": "0:20:17", "remaining_time": "1:17:09"} +{"current_steps": 154, "total_steps": 735, "loss": 0.0989, "lr": 9.64290946003659e-06, "epoch": 0.6272912423625254, "percentage": 20.95, "elapsed_time": "0:20:23", "remaining_time": "1:16:57"} +{"current_steps": 155, "total_steps": 735, "loss": 0.1013, "lr": 9.63403760142398e-06, "epoch": 0.6313645621181263, "percentage": 21.09, "elapsed_time": "0:20:31", "remaining_time": "1:16:46"} +{"current_steps": 156, "total_steps": 735, "loss": 0.1134, "lr": 9.625061064776183e-06, "epoch": 0.6354378818737271, "percentage": 21.22, "elapsed_time": "0:20:39", "remaining_time": "1:16:40"} +{"current_steps": 157, "total_steps": 735, "loss": 0.0939, "lr": 9.61598005286372e-06, "epoch": 0.639511201629328, "percentage": 21.36, "elapsed_time": "0:20:47", "remaining_time": "1:16:33"} +{"current_steps": 158, "total_steps": 735, "loss": 0.1785, "lr": 9.606794770817102e-06, "epoch": 0.6435845213849287, "percentage": 21.5, "elapsed_time": "0:20:55", "remaining_time": "1:16:26"} +{"current_steps": 159, "total_steps": 735, "loss": 0.1571, "lr": 9.597505426122184e-06, "epoch": 0.6476578411405295, "percentage": 21.63, "elapsed_time": "0:21:02", "remaining_time": "1:16:12"} +{"current_steps": 160, "total_steps": 735, "loss": 0.1745, "lr": 9.588112228615495e-06, "epoch": 0.6517311608961304, "percentage": 21.77, "elapsed_time": "0:21:10", "remaining_time": "1:16:06"} +{"current_steps": 161, "total_steps": 735, "loss": 0.1353, "lr": 9.57861539047949e-06, "epoch": 0.6558044806517311, "percentage": 21.9, "elapsed_time": "0:21:23", "remaining_time": "1:16:14"} +{"current_steps": 162, "total_steps": 735, "loss": 0.1521, "lr": 9.569015126237744e-06, "epoch": 0.659877800407332, "percentage": 22.04, "elapsed_time": "0:21:29", "remaining_time": "1:16:01"} +{"current_steps": 163, "total_steps": 735, "loss": 0.1161, "lr": 9.559311652750135e-06, "epoch": 0.6639511201629328, "percentage": 22.18, "elapsed_time": "0:21:37", "remaining_time": "1:15:51"} +{"current_steps": 164, "total_steps": 735, "loss": 0.0976, "lr": 9.549505189207924e-06, "epoch": 0.6680244399185336, "percentage": 22.31, "elapsed_time": "0:21:43", "remaining_time": "1:15:38"} +{"current_steps": 165, "total_steps": 735, "loss": 0.171, "lr": 9.539595957128803e-06, "epoch": 0.6720977596741344, "percentage": 22.45, "elapsed_time": "0:21:52", "remaining_time": "1:15:34"} +{"current_steps": 166, "total_steps": 735, "loss": 0.1159, "lr": 9.529584180351902e-06, "epoch": 0.6761710794297352, "percentage": 22.59, "elapsed_time": "0:22:03", "remaining_time": "1:15:36"} +{"current_steps": 167, "total_steps": 735, "loss": 0.1278, "lr": 9.519470085032733e-06, "epoch": 0.6802443991853361, "percentage": 22.72, "elapsed_time": "0:22:18", "remaining_time": "1:15:51"} +{"current_steps": 168, "total_steps": 735, "loss": 0.104, "lr": 9.509253899638066e-06, "epoch": 0.6843177189409368, "percentage": 22.86, "elapsed_time": "0:22:25", "remaining_time": "1:15:41"} +{"current_steps": 169, "total_steps": 735, "loss": 0.1682, "lr": 9.498935854940785e-06, "epoch": 0.6883910386965377, "percentage": 22.99, "elapsed_time": "0:22:32", "remaining_time": "1:15:31"} +{"current_steps": 170, "total_steps": 735, "loss": 0.1089, "lr": 9.488516184014667e-06, "epoch": 0.6924643584521385, "percentage": 23.13, "elapsed_time": "0:22:41", "remaining_time": "1:15:25"} +{"current_steps": 171, "total_steps": 735, "loss": 0.1521, "lr": 9.477995122229117e-06, "epoch": 0.6965376782077393, "percentage": 23.27, "elapsed_time": "0:22:48", "remaining_time": "1:15:14"} +{"current_steps": 172, "total_steps": 735, "loss": 0.1012, "lr": 9.467372907243858e-06, "epoch": 0.7006109979633401, "percentage": 23.4, "elapsed_time": "0:22:56", "remaining_time": "1:15:06"} +{"current_steps": 173, "total_steps": 735, "loss": 0.117, "lr": 9.456649779003548e-06, "epoch": 0.7046843177189409, "percentage": 23.54, "elapsed_time": "0:23:08", "remaining_time": "1:15:09"} +{"current_steps": 174, "total_steps": 735, "loss": 0.1284, "lr": 9.44582597973238e-06, "epoch": 0.7087576374745418, "percentage": 23.67, "elapsed_time": "0:23:19", "remaining_time": "1:15:12"} +{"current_steps": 175, "total_steps": 735, "loss": 0.1429, "lr": 9.434901753928593e-06, "epoch": 0.7128309572301426, "percentage": 23.81, "elapsed_time": "0:23:28", "remaining_time": "1:15:08"} +{"current_steps": 176, "total_steps": 735, "loss": 0.1006, "lr": 9.423877348358956e-06, "epoch": 0.7169042769857433, "percentage": 23.95, "elapsed_time": "0:23:36", "remaining_time": "1:15:00"} +{"current_steps": 177, "total_steps": 735, "loss": 0.1042, "lr": 9.4127530120532e-06, "epoch": 0.7209775967413442, "percentage": 24.08, "elapsed_time": "0:23:43", "remaining_time": "1:14:48"} +{"current_steps": 178, "total_steps": 735, "loss": 0.1676, "lr": 9.401528996298375e-06, "epoch": 0.725050916496945, "percentage": 24.22, "elapsed_time": "0:23:49", "remaining_time": "1:14:34"} +{"current_steps": 179, "total_steps": 735, "loss": 0.1082, "lr": 9.390205554633193e-06, "epoch": 0.7291242362525459, "percentage": 24.35, "elapsed_time": "0:23:57", "remaining_time": "1:14:24"} +{"current_steps": 180, "total_steps": 735, "loss": 0.1401, "lr": 9.378782942842292e-06, "epoch": 0.7331975560081466, "percentage": 24.49, "elapsed_time": "0:24:04", "remaining_time": "1:14:13"} +{"current_steps": 181, "total_steps": 735, "loss": 0.1855, "lr": 9.367261418950459e-06, "epoch": 0.7372708757637475, "percentage": 24.63, "elapsed_time": "0:24:14", "remaining_time": "1:14:10"} +{"current_steps": 182, "total_steps": 735, "loss": 0.1729, "lr": 9.355641243216798e-06, "epoch": 0.7413441955193483, "percentage": 24.76, "elapsed_time": "0:24:19", "remaining_time": "1:13:55"} +{"current_steps": 183, "total_steps": 735, "loss": 0.1078, "lr": 9.343922678128854e-06, "epoch": 0.745417515274949, "percentage": 24.9, "elapsed_time": "0:24:27", "remaining_time": "1:13:45"} +{"current_steps": 184, "total_steps": 735, "loss": 0.1239, "lr": 9.332105988396692e-06, "epoch": 0.7494908350305499, "percentage": 25.03, "elapsed_time": "0:24:38", "remaining_time": "1:13:47"} +{"current_steps": 185, "total_steps": 735, "loss": 0.1309, "lr": 9.3201914409469e-06, "epoch": 0.7535641547861507, "percentage": 25.17, "elapsed_time": "0:24:45", "remaining_time": "1:13:36"} +{"current_steps": 186, "total_steps": 735, "loss": 0.1159, "lr": 9.308179304916573e-06, "epoch": 0.7576374745417516, "percentage": 25.31, "elapsed_time": "0:24:53", "remaining_time": "1:13:28"} +{"current_steps": 187, "total_steps": 735, "loss": 0.1052, "lr": 9.29606985164723e-06, "epoch": 0.7617107942973523, "percentage": 25.44, "elapsed_time": "0:24:59", "remaining_time": "1:13:14"} +{"current_steps": 188, "total_steps": 735, "loss": 0.1351, "lr": 9.283863354678683e-06, "epoch": 0.7657841140529531, "percentage": 25.58, "elapsed_time": "0:25:12", "remaining_time": "1:13:20"} +{"current_steps": 189, "total_steps": 735, "loss": 0.1304, "lr": 9.27156008974286e-06, "epoch": 0.769857433808554, "percentage": 25.71, "elapsed_time": "0:25:19", "remaining_time": "1:13:10"} +{"current_steps": 190, "total_steps": 735, "loss": 0.1054, "lr": 9.259160334757575e-06, "epoch": 0.7739307535641547, "percentage": 25.85, "elapsed_time": "0:25:28", "remaining_time": "1:13:03"} +{"current_steps": 191, "total_steps": 735, "loss": 0.1323, "lr": 9.246664369820249e-06, "epoch": 0.7780040733197556, "percentage": 25.99, "elapsed_time": "0:25:34", "remaining_time": "1:12:49"} +{"current_steps": 192, "total_steps": 735, "loss": 0.2385, "lr": 9.234072477201588e-06, "epoch": 0.7820773930753564, "percentage": 26.12, "elapsed_time": "0:25:40", "remaining_time": "1:12:35"} +{"current_steps": 193, "total_steps": 735, "loss": 0.1312, "lr": 9.2213849413392e-06, "epoch": 0.7861507128309573, "percentage": 26.26, "elapsed_time": "0:25:47", "remaining_time": "1:12:25"} +{"current_steps": 194, "total_steps": 735, "loss": 0.1032, "lr": 9.208602048831176e-06, "epoch": 0.790224032586558, "percentage": 26.39, "elapsed_time": "0:25:54", "remaining_time": "1:12:16"} +{"current_steps": 195, "total_steps": 735, "loss": 0.1089, "lr": 9.195724088429611e-06, "epoch": 0.7942973523421588, "percentage": 26.53, "elapsed_time": "0:26:04", "remaining_time": "1:12:11"} +{"current_steps": 196, "total_steps": 735, "loss": 0.1166, "lr": 9.18275135103409e-06, "epoch": 0.7983706720977597, "percentage": 26.67, "elapsed_time": "0:26:12", "remaining_time": "1:12:03"} +{"current_steps": 197, "total_steps": 735, "loss": 0.1317, "lr": 9.169684129685099e-06, "epoch": 0.8024439918533605, "percentage": 26.8, "elapsed_time": "0:26:18", "remaining_time": "1:11:51"} +{"current_steps": 198, "total_steps": 735, "loss": 0.1892, "lr": 9.156522719557428e-06, "epoch": 0.8065173116089613, "percentage": 26.94, "elapsed_time": "0:26:26", "remaining_time": "1:11:43"} +{"current_steps": 199, "total_steps": 735, "loss": 0.1526, "lr": 9.143267417953486e-06, "epoch": 0.8105906313645621, "percentage": 27.07, "elapsed_time": "0:26:32", "remaining_time": "1:11:29"} +{"current_steps": 200, "total_steps": 735, "loss": 0.1791, "lr": 9.129918524296596e-06, "epoch": 0.814663951120163, "percentage": 27.21, "elapsed_time": "0:26:42", "remaining_time": "1:11:26"} +{"current_steps": 201, "total_steps": 735, "loss": 0.1018, "lr": 9.11647634012422e-06, "epoch": 0.8187372708757638, "percentage": 27.35, "elapsed_time": "0:26:49", "remaining_time": "1:11:16"} +{"current_steps": 202, "total_steps": 735, "loss": 0.1174, "lr": 9.102941169081167e-06, "epoch": 0.8228105906313645, "percentage": 27.48, "elapsed_time": "0:26:58", "remaining_time": "1:11:10"} +{"current_steps": 203, "total_steps": 735, "loss": 0.14, "lr": 9.089313316912708e-06, "epoch": 0.8268839103869654, "percentage": 27.62, "elapsed_time": "0:27:05", "remaining_time": "1:11:00"} +{"current_steps": 204, "total_steps": 735, "loss": 0.1208, "lr": 9.075593091457692e-06, "epoch": 0.8309572301425662, "percentage": 27.76, "elapsed_time": "0:27:16", "remaining_time": "1:10:59"} +{"current_steps": 205, "total_steps": 735, "loss": 0.1166, "lr": 9.061780802641582e-06, "epoch": 0.835030549898167, "percentage": 27.89, "elapsed_time": "0:27:22", "remaining_time": "1:10:46"} +{"current_steps": 206, "total_steps": 735, "loss": 0.1046, "lr": 9.047876762469451e-06, "epoch": 0.8391038696537678, "percentage": 28.03, "elapsed_time": "0:27:31", "remaining_time": "1:10:40"} +{"current_steps": 207, "total_steps": 735, "loss": 0.1049, "lr": 9.033881285018945e-06, "epoch": 0.8431771894093686, "percentage": 28.16, "elapsed_time": "0:27:39", "remaining_time": "1:10:33"} +{"current_steps": 208, "total_steps": 735, "loss": 0.1605, "lr": 9.019794686433174e-06, "epoch": 0.8472505091649695, "percentage": 28.3, "elapsed_time": "0:27:50", "remaining_time": "1:10:33"} +{"current_steps": 209, "total_steps": 735, "loss": 0.1008, "lr": 9.005617284913586e-06, "epoch": 0.8513238289205702, "percentage": 28.44, "elapsed_time": "0:27:57", "remaining_time": "1:10:22"} +{"current_steps": 210, "total_steps": 735, "loss": 0.1174, "lr": 8.991349400712772e-06, "epoch": 0.8553971486761711, "percentage": 28.57, "elapsed_time": "0:28:05", "remaining_time": "1:10:13"} +{"current_steps": 211, "total_steps": 735, "loss": 0.1252, "lr": 8.976991356127225e-06, "epoch": 0.8594704684317719, "percentage": 28.71, "elapsed_time": "0:28:11", "remaining_time": "1:10:01"} +{"current_steps": 212, "total_steps": 735, "loss": 0.1054, "lr": 8.962543475490068e-06, "epoch": 0.8635437881873728, "percentage": 28.84, "elapsed_time": "0:28:19", "remaining_time": "1:09:52"} +{"current_steps": 213, "total_steps": 735, "loss": 0.1059, "lr": 8.948006085163735e-06, "epoch": 0.8676171079429735, "percentage": 28.98, "elapsed_time": "0:28:27", "remaining_time": "1:09:44"} +{"current_steps": 214, "total_steps": 735, "loss": 0.1055, "lr": 8.933379513532575e-06, "epoch": 0.8716904276985743, "percentage": 29.12, "elapsed_time": "0:28:33", "remaining_time": "1:09:31"} +{"current_steps": 215, "total_steps": 735, "loss": 0.1047, "lr": 8.91866409099546e-06, "epoch": 0.8757637474541752, "percentage": 29.25, "elapsed_time": "0:28:41", "remaining_time": "1:09:23"} +{"current_steps": 216, "total_steps": 735, "loss": 0.1028, "lr": 8.903860149958308e-06, "epoch": 0.879837067209776, "percentage": 29.39, "elapsed_time": "0:28:49", "remaining_time": "1:09:15"} +{"current_steps": 217, "total_steps": 735, "loss": 0.131, "lr": 8.888968024826575e-06, "epoch": 0.8839103869653768, "percentage": 29.52, "elapsed_time": "0:28:55", "remaining_time": "1:09:03"} +{"current_steps": 218, "total_steps": 735, "loss": 0.1014, "lr": 8.873988051997702e-06, "epoch": 0.8879837067209776, "percentage": 29.66, "elapsed_time": "0:29:04", "remaining_time": "1:08:58"} +{"current_steps": 219, "total_steps": 735, "loss": 0.1335, "lr": 8.85892056985352e-06, "epoch": 0.8920570264765784, "percentage": 29.8, "elapsed_time": "0:29:11", "remaining_time": "1:08:47"} +{"current_steps": 220, "total_steps": 735, "loss": 0.2286, "lr": 8.8437659187526e-06, "epoch": 0.8961303462321792, "percentage": 29.93, "elapsed_time": "0:29:19", "remaining_time": "1:08:38"} +{"current_steps": 221, "total_steps": 735, "loss": 0.1827, "lr": 8.828524441022575e-06, "epoch": 0.90020366598778, "percentage": 30.07, "elapsed_time": "0:29:27", "remaining_time": "1:08:30"} +{"current_steps": 222, "total_steps": 735, "loss": 0.1027, "lr": 8.813196480952393e-06, "epoch": 0.9042769857433809, "percentage": 30.2, "elapsed_time": "0:29:35", "remaining_time": "1:08:22"} +{"current_steps": 223, "total_steps": 735, "loss": 0.1198, "lr": 8.797782384784549e-06, "epoch": 0.9083503054989817, "percentage": 30.34, "elapsed_time": "0:29:42", "remaining_time": "1:08:13"} +{"current_steps": 224, "total_steps": 735, "loss": 0.1029, "lr": 8.782282500707262e-06, "epoch": 0.9124236252545825, "percentage": 30.48, "elapsed_time": "0:29:50", "remaining_time": "1:08:05"} +{"current_steps": 225, "total_steps": 735, "loss": 0.1241, "lr": 8.766697178846611e-06, "epoch": 0.9164969450101833, "percentage": 30.61, "elapsed_time": "0:29:58", "remaining_time": "1:07:56"} +{"current_steps": 226, "total_steps": 735, "loss": 0.1343, "lr": 8.751026771258622e-06, "epoch": 0.9205702647657841, "percentage": 30.75, "elapsed_time": "0:30:06", "remaining_time": "1:07:49"} +{"current_steps": 227, "total_steps": 735, "loss": 0.1058, "lr": 8.735271631921322e-06, "epoch": 0.924643584521385, "percentage": 30.88, "elapsed_time": "0:30:15", "remaining_time": "1:07:42"} +{"current_steps": 228, "total_steps": 735, "loss": 0.1332, "lr": 8.719432116726738e-06, "epoch": 0.9287169042769857, "percentage": 31.02, "elapsed_time": "0:30:22", "remaining_time": "1:07:32"} +{"current_steps": 229, "total_steps": 735, "loss": 0.1451, "lr": 8.703508583472855e-06, "epoch": 0.9327902240325866, "percentage": 31.16, "elapsed_time": "0:30:30", "remaining_time": "1:07:23"} +{"current_steps": 230, "total_steps": 735, "loss": 0.1248, "lr": 8.68750139185554e-06, "epoch": 0.9368635437881874, "percentage": 31.29, "elapsed_time": "0:30:37", "remaining_time": "1:07:14"} +{"current_steps": 231, "total_steps": 735, "loss": 0.119, "lr": 8.671410903460416e-06, "epoch": 0.9409368635437881, "percentage": 31.43, "elapsed_time": "0:30:44", "remaining_time": "1:07:04"} +{"current_steps": 232, "total_steps": 735, "loss": 0.1559, "lr": 8.65523748175469e-06, "epoch": 0.945010183299389, "percentage": 31.56, "elapsed_time": "0:30:53", "remaining_time": "1:06:57"} +{"current_steps": 233, "total_steps": 735, "loss": 0.1693, "lr": 8.63898149207895e-06, "epoch": 0.9490835030549898, "percentage": 31.7, "elapsed_time": "0:31:00", "remaining_time": "1:06:48"} +{"current_steps": 234, "total_steps": 735, "loss": 0.1346, "lr": 8.622643301638902e-06, "epoch": 0.9531568228105907, "percentage": 31.84, "elapsed_time": "0:31:09", "remaining_time": "1:06:43"} +{"current_steps": 235, "total_steps": 735, "loss": 0.0968, "lr": 8.606223279497081e-06, "epoch": 0.9572301425661914, "percentage": 31.97, "elapsed_time": "0:31:16", "remaining_time": "1:06:33"} +{"current_steps": 236, "total_steps": 735, "loss": 0.0966, "lr": 8.589721796564521e-06, "epoch": 0.9613034623217923, "percentage": 32.11, "elapsed_time": "0:31:25", "remaining_time": "1:06:27"} +{"current_steps": 237, "total_steps": 735, "loss": 0.1201, "lr": 8.57313922559236e-06, "epoch": 0.9653767820773931, "percentage": 32.24, "elapsed_time": "0:31:32", "remaining_time": "1:06:16"} +{"current_steps": 238, "total_steps": 735, "loss": 0.1097, "lr": 8.556475941163436e-06, "epoch": 0.9694501018329938, "percentage": 32.38, "elapsed_time": "0:31:39", "remaining_time": "1:06:07"} +{"current_steps": 239, "total_steps": 735, "loss": 0.1552, "lr": 8.539732319683817e-06, "epoch": 0.9735234215885947, "percentage": 32.52, "elapsed_time": "0:31:46", "remaining_time": "1:05:57"} +{"current_steps": 240, "total_steps": 735, "loss": 0.1138, "lr": 8.5229087393743e-06, "epoch": 0.9775967413441955, "percentage": 32.65, "elapsed_time": "0:31:53", "remaining_time": "1:05:46"} +{"current_steps": 241, "total_steps": 735, "loss": 0.1525, "lr": 8.506005580261872e-06, "epoch": 0.9816700610997964, "percentage": 32.79, "elapsed_time": "0:32:00", "remaining_time": "1:05:37"} +{"current_steps": 242, "total_steps": 735, "loss": 0.1082, "lr": 8.489023224171114e-06, "epoch": 0.9857433808553971, "percentage": 32.93, "elapsed_time": "0:32:13", "remaining_time": "1:05:37"} +{"current_steps": 243, "total_steps": 735, "loss": 0.0877, "lr": 8.47196205471559e-06, "epoch": 0.9898167006109979, "percentage": 33.06, "elapsed_time": "0:32:19", "remaining_time": "1:05:27"} +{"current_steps": 244, "total_steps": 735, "loss": 0.1675, "lr": 8.45482245728917e-06, "epoch": 0.9938900203665988, "percentage": 33.2, "elapsed_time": "0:32:28", "remaining_time": "1:05:21"} +{"current_steps": 245, "total_steps": 735, "loss": 0.15, "lr": 8.437604819057336e-06, "epoch": 0.9979633401221996, "percentage": 33.33, "elapsed_time": "0:32:35", "remaining_time": "1:05:10"} +{"current_steps": 246, "total_steps": 735, "loss": 0.1072, "lr": 8.420309528948422e-06, "epoch": 1.0020366598778003, "percentage": 33.47, "elapsed_time": "0:32:44", "remaining_time": "1:05:04"} +{"current_steps": 247, "total_steps": 735, "loss": 0.0844, "lr": 8.40293697764484e-06, "epoch": 1.0061099796334012, "percentage": 33.61, "elapsed_time": "0:32:52", "remaining_time": "1:04:56"} +{"current_steps": 248, "total_steps": 735, "loss": 0.0859, "lr": 8.385487557574253e-06, "epoch": 1.010183299389002, "percentage": 33.74, "elapsed_time": "0:33:00", "remaining_time": "1:04:48"} +{"current_steps": 249, "total_steps": 735, "loss": 0.0809, "lr": 8.367961662900704e-06, "epoch": 1.0142566191446027, "percentage": 33.88, "elapsed_time": "0:33:07", "remaining_time": "1:04:38"} +{"current_steps": 250, "total_steps": 735, "loss": 0.0996, "lr": 8.35035968951572e-06, "epoch": 1.0183299389002036, "percentage": 34.01, "elapsed_time": "0:33:19", "remaining_time": "1:04:39"} +{"current_steps": 251, "total_steps": 735, "loss": 0.0999, "lr": 8.33268203502937e-06, "epoch": 1.0224032586558045, "percentage": 34.15, "elapsed_time": "0:34:26", "remaining_time": "1:06:24"} +{"current_steps": 252, "total_steps": 735, "loss": 0.0836, "lr": 8.314929098761268e-06, "epoch": 1.0264765784114054, "percentage": 34.29, "elapsed_time": "0:34:35", "remaining_time": "1:06:18"} +{"current_steps": 253, "total_steps": 735, "loss": 0.0866, "lr": 8.297101281731576e-06, "epoch": 1.030549898167006, "percentage": 34.42, "elapsed_time": "0:34:48", "remaining_time": "1:06:19"} +{"current_steps": 254, "total_steps": 735, "loss": 0.0901, "lr": 8.279198986651925e-06, "epoch": 1.034623217922607, "percentage": 34.56, "elapsed_time": "0:34:58", "remaining_time": "1:06:14"} +{"current_steps": 255, "total_steps": 735, "loss": 0.0789, "lr": 8.261222617916335e-06, "epoch": 1.0386965376782078, "percentage": 34.69, "elapsed_time": "0:35:06", "remaining_time": "1:06:05"} +{"current_steps": 256, "total_steps": 735, "loss": 0.1017, "lr": 8.243172581592066e-06, "epoch": 1.0427698574338085, "percentage": 34.83, "elapsed_time": "0:35:17", "remaining_time": "1:06:01"} +{"current_steps": 257, "total_steps": 735, "loss": 0.085, "lr": 8.22504928541045e-06, "epoch": 1.0468431771894093, "percentage": 34.97, "elapsed_time": "0:35:26", "remaining_time": "1:05:55"} +{"current_steps": 258, "total_steps": 735, "loss": 0.0777, "lr": 8.206853138757687e-06, "epoch": 1.0509164969450102, "percentage": 35.1, "elapsed_time": "0:35:35", "remaining_time": "1:05:47"} +{"current_steps": 259, "total_steps": 735, "loss": 0.0833, "lr": 8.188584552665592e-06, "epoch": 1.054989816700611, "percentage": 35.24, "elapsed_time": "0:35:43", "remaining_time": "1:05:38"} +{"current_steps": 260, "total_steps": 735, "loss": 0.0968, "lr": 8.17024393980231e-06, "epoch": 1.0590631364562118, "percentage": 35.37, "elapsed_time": "0:35:51", "remaining_time": "1:05:31"} +{"current_steps": 261, "total_steps": 735, "loss": 0.0632, "lr": 8.15183171446299e-06, "epoch": 1.0631364562118126, "percentage": 35.51, "elapsed_time": "0:35:58", "remaining_time": "1:05:20"} +{"current_steps": 262, "total_steps": 735, "loss": 0.073, "lr": 8.133348292560442e-06, "epoch": 1.0672097759674135, "percentage": 35.65, "elapsed_time": "0:36:06", "remaining_time": "1:05:10"} +{"current_steps": 263, "total_steps": 735, "loss": 0.1158, "lr": 8.114794091615718e-06, "epoch": 1.0712830957230142, "percentage": 35.78, "elapsed_time": "0:36:13", "remaining_time": "1:04:59"} +{"current_steps": 264, "total_steps": 735, "loss": 0.0616, "lr": 8.096169530748708e-06, "epoch": 1.075356415478615, "percentage": 35.92, "elapsed_time": "0:36:19", "remaining_time": "1:04:48"} +{"current_steps": 265, "total_steps": 735, "loss": 0.0924, "lr": 8.077475030668647e-06, "epoch": 1.079429735234216, "percentage": 36.05, "elapsed_time": "0:36:28", "remaining_time": "1:04:40"} +{"current_steps": 266, "total_steps": 735, "loss": 0.0841, "lr": 8.058711013664633e-06, "epoch": 1.0835030549898166, "percentage": 36.19, "elapsed_time": "0:36:35", "remaining_time": "1:04:30"} +{"current_steps": 267, "total_steps": 735, "loss": 0.0781, "lr": 8.039877903596069e-06, "epoch": 1.0875763747454175, "percentage": 36.33, "elapsed_time": "0:36:47", "remaining_time": "1:04:29"} +{"current_steps": 268, "total_steps": 735, "loss": 0.074, "lr": 8.020976125883105e-06, "epoch": 1.0916496945010183, "percentage": 36.46, "elapsed_time": "0:36:54", "remaining_time": "1:04:19"} +{"current_steps": 269, "total_steps": 735, "loss": 0.0909, "lr": 8.002006107497018e-06, "epoch": 1.0957230142566192, "percentage": 36.6, "elapsed_time": "0:37:04", "remaining_time": "1:04:12"} +{"current_steps": 270, "total_steps": 735, "loss": 0.0682, "lr": 7.982968276950568e-06, "epoch": 1.0997963340122199, "percentage": 36.73, "elapsed_time": "0:37:09", "remaining_time": "1:04:00"} +{"current_steps": 271, "total_steps": 735, "loss": 0.1067, "lr": 7.963863064288326e-06, "epoch": 1.1038696537678208, "percentage": 36.87, "elapsed_time": "0:37:15", "remaining_time": "1:03:47"} +{"current_steps": 272, "total_steps": 735, "loss": 0.0743, "lr": 7.944690901076949e-06, "epoch": 1.1079429735234216, "percentage": 37.01, "elapsed_time": "0:37:27", "remaining_time": "1:03:45"} +{"current_steps": 273, "total_steps": 735, "loss": 0.0903, "lr": 7.925452220395436e-06, "epoch": 1.1120162932790225, "percentage": 37.14, "elapsed_time": "0:37:35", "remaining_time": "1:03:36"} +{"current_steps": 274, "total_steps": 735, "loss": 0.0835, "lr": 7.906147456825349e-06, "epoch": 1.1160896130346232, "percentage": 37.28, "elapsed_time": "0:37:42", "remaining_time": "1:03:27"} +{"current_steps": 275, "total_steps": 735, "loss": 0.0739, "lr": 7.886777046440993e-06, "epoch": 1.120162932790224, "percentage": 37.41, "elapsed_time": "0:37:56", "remaining_time": "1:03:28"} +{"current_steps": 276, "total_steps": 735, "loss": 0.1219, "lr": 7.867341426799562e-06, "epoch": 1.124236252545825, "percentage": 37.55, "elapsed_time": "0:38:05", "remaining_time": "1:03:21"} +{"current_steps": 277, "total_steps": 735, "loss": 0.0913, "lr": 7.847841036931263e-06, "epoch": 1.1283095723014256, "percentage": 37.69, "elapsed_time": "0:38:12", "remaining_time": "1:03:09"} +{"current_steps": 278, "total_steps": 735, "loss": 0.0757, "lr": 7.828276317329388e-06, "epoch": 1.1323828920570265, "percentage": 37.82, "elapsed_time": "0:38:20", "remaining_time": "1:03:01"} +{"current_steps": 279, "total_steps": 735, "loss": 0.0641, "lr": 7.80864770994038e-06, "epoch": 1.1364562118126273, "percentage": 37.96, "elapsed_time": "0:38:27", "remaining_time": "1:02:51"} +{"current_steps": 280, "total_steps": 735, "loss": 0.0821, "lr": 7.788955658153829e-06, "epoch": 1.140529531568228, "percentage": 38.1, "elapsed_time": "0:38:34", "remaining_time": "1:02:40"} +{"current_steps": 281, "total_steps": 735, "loss": 0.0686, "lr": 7.769200606792476e-06, "epoch": 1.1446028513238289, "percentage": 38.23, "elapsed_time": "0:38:41", "remaining_time": "1:02:30"} +{"current_steps": 282, "total_steps": 735, "loss": 0.0959, "lr": 7.749383002102147e-06, "epoch": 1.1486761710794298, "percentage": 38.37, "elapsed_time": "0:38:48", "remaining_time": "1:02:19"} +{"current_steps": 283, "total_steps": 735, "loss": 0.0859, "lr": 7.72950329174169e-06, "epoch": 1.1527494908350306, "percentage": 38.5, "elapsed_time": "0:38:56", "remaining_time": "1:02:11"} +{"current_steps": 284, "total_steps": 735, "loss": 0.0874, "lr": 7.709561924772855e-06, "epoch": 1.1568228105906313, "percentage": 38.64, "elapsed_time": "0:39:03", "remaining_time": "1:02:01"} +{"current_steps": 285, "total_steps": 735, "loss": 0.1012, "lr": 7.689559351650142e-06, "epoch": 1.1608961303462322, "percentage": 38.78, "elapsed_time": "0:39:11", "remaining_time": "1:01:53"} +{"current_steps": 286, "total_steps": 735, "loss": 0.0923, "lr": 7.66949602421064e-06, "epoch": 1.164969450101833, "percentage": 38.91, "elapsed_time": "0:39:18", "remaining_time": "1:01:42"} +{"current_steps": 287, "total_steps": 735, "loss": 0.0676, "lr": 7.649372395663816e-06, "epoch": 1.1690427698574337, "percentage": 39.05, "elapsed_time": "0:39:24", "remaining_time": "1:01:31"} +{"current_steps": 288, "total_steps": 735, "loss": 0.0812, "lr": 7.629188920581267e-06, "epoch": 1.1731160896130346, "percentage": 39.18, "elapsed_time": "0:39:31", "remaining_time": "1:01:21"} +{"current_steps": 289, "total_steps": 735, "loss": 0.0755, "lr": 7.608946054886468e-06, "epoch": 1.1771894093686355, "percentage": 39.32, "elapsed_time": "0:39:38", "remaining_time": "1:01:10"} +{"current_steps": 290, "total_steps": 735, "loss": 0.097, "lr": 7.588644255844464e-06, "epoch": 1.1812627291242364, "percentage": 39.46, "elapsed_time": "0:39:45", "remaining_time": "1:01:01"} +{"current_steps": 291, "total_steps": 735, "loss": 0.0719, "lr": 7.568283982051538e-06, "epoch": 1.185336048879837, "percentage": 39.59, "elapsed_time": "0:39:53", "remaining_time": "1:00:52"} +{"current_steps": 292, "total_steps": 735, "loss": 0.0715, "lr": 7.5478656934248626e-06, "epoch": 1.189409368635438, "percentage": 39.73, "elapsed_time": "0:40:01", "remaining_time": "1:00:43"} +{"current_steps": 293, "total_steps": 735, "loss": 0.0745, "lr": 7.527389851192099e-06, "epoch": 1.1934826883910388, "percentage": 39.86, "elapsed_time": "0:40:11", "remaining_time": "1:00:37"} +{"current_steps": 294, "total_steps": 735, "loss": 0.077, "lr": 7.506856917880989e-06, "epoch": 1.1975560081466394, "percentage": 40.0, "elapsed_time": "0:40:20", "remaining_time": "1:00:30"} +{"current_steps": 295, "total_steps": 735, "loss": 0.0757, "lr": 7.486267357308896e-06, "epoch": 1.2016293279022403, "percentage": 40.14, "elapsed_time": "0:40:27", "remaining_time": "1:00:20"} +{"current_steps": 296, "total_steps": 735, "loss": 0.0821, "lr": 7.465621634572336e-06, "epoch": 1.2057026476578412, "percentage": 40.27, "elapsed_time": "0:40:35", "remaining_time": "1:00:12"} +{"current_steps": 297, "total_steps": 735, "loss": 0.0857, "lr": 7.444920216036473e-06, "epoch": 1.2097759674134418, "percentage": 40.41, "elapsed_time": "0:40:43", "remaining_time": "1:00:04"} +{"current_steps": 298, "total_steps": 735, "loss": 0.0706, "lr": 7.4241635693245766e-06, "epoch": 1.2138492871690427, "percentage": 40.54, "elapsed_time": "0:40:50", "remaining_time": "0:59:54"} +{"current_steps": 299, "total_steps": 735, "loss": 0.0698, "lr": 7.40335216330746e-06, "epoch": 1.2179226069246436, "percentage": 40.68, "elapsed_time": "0:40:57", "remaining_time": "0:59:43"} +{"current_steps": 300, "total_steps": 735, "loss": 0.0778, "lr": 7.382486468092899e-06, "epoch": 1.2219959266802445, "percentage": 40.82, "elapsed_time": "0:41:03", "remaining_time": "0:59:32"} +{"current_steps": 301, "total_steps": 735, "loss": 0.0881, "lr": 7.361566955014999e-06, "epoch": 1.2260692464358451, "percentage": 40.95, "elapsed_time": "0:41:12", "remaining_time": "0:59:25"} +{"current_steps": 302, "total_steps": 735, "loss": 0.0807, "lr": 7.340594096623559e-06, "epoch": 1.230142566191446, "percentage": 41.09, "elapsed_time": "0:41:23", "remaining_time": "0:59:20"} +{"current_steps": 303, "total_steps": 735, "loss": 0.0761, "lr": 7.319568366673389e-06, "epoch": 1.234215885947047, "percentage": 41.22, "elapsed_time": "0:41:31", "remaining_time": "0:59:12"} +{"current_steps": 304, "total_steps": 735, "loss": 0.084, "lr": 7.2984902401136115e-06, "epoch": 1.2382892057026478, "percentage": 41.36, "elapsed_time": "0:41:36", "remaining_time": "0:59:00"} +{"current_steps": 305, "total_steps": 735, "loss": 0.0762, "lr": 7.277360193076936e-06, "epoch": 1.2423625254582484, "percentage": 41.5, "elapsed_time": "0:41:43", "remaining_time": "0:58:49"} +{"current_steps": 306, "total_steps": 735, "loss": 0.0723, "lr": 7.256178702868899e-06, "epoch": 1.2464358452138493, "percentage": 41.63, "elapsed_time": "0:41:51", "remaining_time": "0:58:40"} +{"current_steps": 307, "total_steps": 735, "loss": 0.0809, "lr": 7.234946247957087e-06, "epoch": 1.2505091649694502, "percentage": 41.77, "elapsed_time": "0:41:58", "remaining_time": "0:58:31"} +{"current_steps": 308, "total_steps": 735, "loss": 0.0822, "lr": 7.213663307960321e-06, "epoch": 1.2545824847250509, "percentage": 41.9, "elapsed_time": "0:42:04", "remaining_time": "0:58:19"} +{"current_steps": 309, "total_steps": 735, "loss": 0.1005, "lr": 7.192330363637832e-06, "epoch": 1.2586558044806517, "percentage": 42.04, "elapsed_time": "0:42:14", "remaining_time": "0:58:14"} +{"current_steps": 310, "total_steps": 735, "loss": 0.0737, "lr": 7.170947896878392e-06, "epoch": 1.2627291242362526, "percentage": 42.18, "elapsed_time": "0:42:21", "remaining_time": "0:58:04"} +{"current_steps": 311, "total_steps": 735, "loss": 0.0694, "lr": 7.149516390689433e-06, "epoch": 1.2668024439918533, "percentage": 42.31, "elapsed_time": "0:42:28", "remaining_time": "0:57:54"} +{"current_steps": 312, "total_steps": 735, "loss": 0.0712, "lr": 7.12803632918614e-06, "epoch": 1.2708757637474541, "percentage": 42.45, "elapsed_time": "0:42:36", "remaining_time": "0:57:45"} +{"current_steps": 313, "total_steps": 735, "loss": 0.0709, "lr": 7.1065081975805086e-06, "epoch": 1.274949083503055, "percentage": 42.59, "elapsed_time": "0:42:43", "remaining_time": "0:57:35"} +{"current_steps": 314, "total_steps": 735, "loss": 0.0884, "lr": 7.084932482170385e-06, "epoch": 1.2790224032586557, "percentage": 42.72, "elapsed_time": "0:42:49", "remaining_time": "0:57:25"} +{"current_steps": 315, "total_steps": 735, "loss": 0.0791, "lr": 7.063309670328491e-06, "epoch": 1.2830957230142566, "percentage": 42.86, "elapsed_time": "0:43:01", "remaining_time": "0:57:22"} +{"current_steps": 316, "total_steps": 735, "loss": 0.0725, "lr": 7.041640250491398e-06, "epoch": 1.2871690427698574, "percentage": 42.99, "elapsed_time": "0:43:08", "remaining_time": "0:57:12"} +{"current_steps": 317, "total_steps": 735, "loss": 0.0676, "lr": 7.019924712148511e-06, "epoch": 1.2912423625254583, "percentage": 43.13, "elapsed_time": "0:43:17", "remaining_time": "0:57:04"} +{"current_steps": 318, "total_steps": 735, "loss": 0.0734, "lr": 6.998163545830998e-06, "epoch": 1.2953156822810592, "percentage": 43.27, "elapsed_time": "0:43:24", "remaining_time": "0:56:55"} +{"current_steps": 319, "total_steps": 735, "loss": 0.0775, "lr": 6.976357243100718e-06, "epoch": 1.2993890020366599, "percentage": 43.4, "elapsed_time": "0:43:31", "remaining_time": "0:56:45"} +{"current_steps": 320, "total_steps": 735, "loss": 0.0878, "lr": 6.954506296539112e-06, "epoch": 1.3034623217922607, "percentage": 43.54, "elapsed_time": "0:43:37", "remaining_time": "0:56:34"} +{"current_steps": 321, "total_steps": 735, "loss": 0.081, "lr": 6.9326111997360775e-06, "epoch": 1.3075356415478616, "percentage": 43.67, "elapsed_time": "0:43:44", "remaining_time": "0:56:24"} +{"current_steps": 322, "total_steps": 735, "loss": 0.0918, "lr": 6.910672447278827e-06, "epoch": 1.3116089613034623, "percentage": 43.81, "elapsed_time": "0:43:54", "remaining_time": "0:56:18"} +{"current_steps": 323, "total_steps": 735, "loss": 0.0859, "lr": 6.8886905347406985e-06, "epoch": 1.3156822810590632, "percentage": 43.95, "elapsed_time": "0:44:01", "remaining_time": "0:56:09"} +{"current_steps": 324, "total_steps": 735, "loss": 0.0916, "lr": 6.866665958669976e-06, "epoch": 1.319755600814664, "percentage": 44.08, "elapsed_time": "0:44:08", "remaining_time": "0:56:00"} +{"current_steps": 325, "total_steps": 735, "loss": 0.0755, "lr": 6.844599216578667e-06, "epoch": 1.3238289205702647, "percentage": 44.22, "elapsed_time": "0:44:15", "remaining_time": "0:55:50"} +{"current_steps": 326, "total_steps": 735, "loss": 0.0903, "lr": 6.822490806931262e-06, "epoch": 1.3279022403258656, "percentage": 44.35, "elapsed_time": "0:44:23", "remaining_time": "0:55:41"} +{"current_steps": 327, "total_steps": 735, "loss": 0.0833, "lr": 6.800341229133486e-06, "epoch": 1.3319755600814664, "percentage": 44.49, "elapsed_time": "0:44:34", "remaining_time": "0:55:36"} +{"current_steps": 328, "total_steps": 735, "loss": 0.1131, "lr": 6.778150983520999e-06, "epoch": 1.336048879837067, "percentage": 44.63, "elapsed_time": "0:44:41", "remaining_time": "0:55:27"} +{"current_steps": 329, "total_steps": 735, "loss": 0.0783, "lr": 6.755920571348111e-06, "epoch": 1.340122199592668, "percentage": 44.76, "elapsed_time": "0:44:56", "remaining_time": "0:55:27"} +{"current_steps": 330, "total_steps": 735, "loss": 0.0791, "lr": 6.73365049477645e-06, "epoch": 1.3441955193482689, "percentage": 44.9, "elapsed_time": "0:45:02", "remaining_time": "0:55:16"} +{"current_steps": 331, "total_steps": 735, "loss": 0.116, "lr": 6.711341256863623e-06, "epoch": 1.3482688391038695, "percentage": 45.03, "elapsed_time": "0:45:08", "remaining_time": "0:55:05"} +{"current_steps": 332, "total_steps": 735, "loss": 0.079, "lr": 6.688993361551847e-06, "epoch": 1.3523421588594704, "percentage": 45.17, "elapsed_time": "0:45:15", "remaining_time": "0:54:56"} +{"current_steps": 333, "total_steps": 735, "loss": 0.0876, "lr": 6.66660731365657e-06, "epoch": 1.3564154786150713, "percentage": 45.31, "elapsed_time": "0:45:22", "remaining_time": "0:54:46"} +{"current_steps": 334, "total_steps": 735, "loss": 0.08, "lr": 6.64418361885507e-06, "epoch": 1.3604887983706722, "percentage": 45.44, "elapsed_time": "0:45:36", "remaining_time": "0:54:45"} +{"current_steps": 335, "total_steps": 735, "loss": 0.0803, "lr": 6.621722783675024e-06, "epoch": 1.364562118126273, "percentage": 45.58, "elapsed_time": "0:45:44", "remaining_time": "0:54:36"} +{"current_steps": 336, "total_steps": 735, "loss": 0.0773, "lr": 6.599225315483076e-06, "epoch": 1.3686354378818737, "percentage": 45.71, "elapsed_time": "0:45:52", "remaining_time": "0:54:28"} +{"current_steps": 337, "total_steps": 735, "loss": 0.0699, "lr": 6.576691722473368e-06, "epoch": 1.3727087576374746, "percentage": 45.85, "elapsed_time": "0:45:58", "remaining_time": "0:54:17"} +{"current_steps": 338, "total_steps": 735, "loss": 0.0898, "lr": 6.554122513656065e-06, "epoch": 1.3767820773930755, "percentage": 45.99, "elapsed_time": "0:46:07", "remaining_time": "0:54:10"} +{"current_steps": 339, "total_steps": 735, "loss": 0.0796, "lr": 6.531518198845854e-06, "epoch": 1.3808553971486761, "percentage": 46.12, "elapsed_time": "0:46:14", "remaining_time": "0:54:01"} +{"current_steps": 340, "total_steps": 735, "loss": 0.0864, "lr": 6.508879288650431e-06, "epoch": 1.384928716904277, "percentage": 46.26, "elapsed_time": "0:46:24", "remaining_time": "0:53:54"} +{"current_steps": 341, "total_steps": 735, "loss": 0.1032, "lr": 6.486206294458966e-06, "epoch": 1.3890020366598779, "percentage": 46.39, "elapsed_time": "0:46:32", "remaining_time": "0:53:46"} +{"current_steps": 342, "total_steps": 735, "loss": 0.0739, "lr": 6.463499728430549e-06, "epoch": 1.3930753564154785, "percentage": 46.53, "elapsed_time": "0:46:39", "remaining_time": "0:53:37"} +{"current_steps": 343, "total_steps": 735, "loss": 0.0911, "lr": 6.4407601034826225e-06, "epoch": 1.3971486761710794, "percentage": 46.67, "elapsed_time": "0:46:46", "remaining_time": "0:53:27"} +{"current_steps": 344, "total_steps": 735, "loss": 0.0763, "lr": 6.417987933279397e-06, "epoch": 1.4012219959266803, "percentage": 46.8, "elapsed_time": "0:46:53", "remaining_time": "0:53:17"} +{"current_steps": 345, "total_steps": 735, "loss": 0.0685, "lr": 6.395183732220242e-06, "epoch": 1.405295315682281, "percentage": 46.94, "elapsed_time": "0:46:59", "remaining_time": "0:53:07"} +{"current_steps": 346, "total_steps": 735, "loss": 0.0763, "lr": 6.372348015428077e-06, "epoch": 1.4093686354378818, "percentage": 47.07, "elapsed_time": "0:47:07", "remaining_time": "0:52:58"} +{"current_steps": 347, "total_steps": 735, "loss": 0.0811, "lr": 6.349481298737723e-06, "epoch": 1.4134419551934827, "percentage": 47.21, "elapsed_time": "0:47:13", "remaining_time": "0:52:48"} +{"current_steps": 348, "total_steps": 735, "loss": 0.0651, "lr": 6.32658409868426e-06, "epoch": 1.4175152749490836, "percentage": 47.35, "elapsed_time": "0:47:21", "remaining_time": "0:52:39"} +{"current_steps": 349, "total_steps": 735, "loss": 0.0763, "lr": 6.303656932491349e-06, "epoch": 1.4215885947046842, "percentage": 47.48, "elapsed_time": "0:47:27", "remaining_time": "0:52:29"} +{"current_steps": 350, "total_steps": 735, "loss": 0.1054, "lr": 6.280700318059563e-06, "epoch": 1.4256619144602851, "percentage": 47.62, "elapsed_time": "0:47:35", "remaining_time": "0:52:21"} +{"current_steps": 351, "total_steps": 735, "loss": 0.102, "lr": 6.257714773954674e-06, "epoch": 1.429735234215886, "percentage": 47.76, "elapsed_time": "0:47:41", "remaining_time": "0:52:10"} +{"current_steps": 352, "total_steps": 735, "loss": 0.0813, "lr": 6.234700819395946e-06, "epoch": 1.4338085539714869, "percentage": 47.89, "elapsed_time": "0:47:48", "remaining_time": "0:52:01"} +{"current_steps": 353, "total_steps": 735, "loss": 0.0829, "lr": 6.211658974244407e-06, "epoch": 1.4378818737270875, "percentage": 48.03, "elapsed_time": "0:47:56", "remaining_time": "0:51:53"} +{"current_steps": 354, "total_steps": 735, "loss": 0.0782, "lr": 6.1885897589911e-06, "epoch": 1.4419551934826884, "percentage": 48.16, "elapsed_time": "0:48:04", "remaining_time": "0:51:44"} +{"current_steps": 355, "total_steps": 735, "loss": 0.0975, "lr": 6.1654936947453355e-06, "epoch": 1.4460285132382893, "percentage": 48.3, "elapsed_time": "0:48:17", "remaining_time": "0:51:41"} +{"current_steps": 356, "total_steps": 735, "loss": 0.071, "lr": 6.142371303222909e-06, "epoch": 1.45010183299389, "percentage": 48.44, "elapsed_time": "0:48:24", "remaining_time": "0:51:31"} +{"current_steps": 357, "total_steps": 735, "loss": 0.0812, "lr": 6.119223106734328e-06, "epoch": 1.4541751527494908, "percentage": 48.57, "elapsed_time": "0:48:32", "remaining_time": "0:51:23"} +{"current_steps": 358, "total_steps": 735, "loss": 0.0689, "lr": 6.0960496281729995e-06, "epoch": 1.4582484725050917, "percentage": 48.71, "elapsed_time": "0:48:39", "remaining_time": "0:51:14"} +{"current_steps": 359, "total_steps": 735, "loss": 0.075, "lr": 6.072851391003432e-06, "epoch": 1.4623217922606924, "percentage": 48.84, "elapsed_time": "0:48:48", "remaining_time": "0:51:07"} +{"current_steps": 360, "total_steps": 735, "loss": 0.0851, "lr": 6.0496289192494e-06, "epoch": 1.4663951120162932, "percentage": 48.98, "elapsed_time": "0:48:55", "remaining_time": "0:50:57"} +{"current_steps": 361, "total_steps": 735, "loss": 0.0939, "lr": 6.026382737482116e-06, "epoch": 1.4704684317718941, "percentage": 49.12, "elapsed_time": "0:49:02", "remaining_time": "0:50:48"} +{"current_steps": 362, "total_steps": 735, "loss": 0.089, "lr": 6.003113370808375e-06, "epoch": 1.4745417515274948, "percentage": 49.25, "elapsed_time": "0:49:08", "remaining_time": "0:50:38"} +{"current_steps": 363, "total_steps": 735, "loss": 0.1087, "lr": 5.979821344858695e-06, "epoch": 1.4786150712830957, "percentage": 49.39, "elapsed_time": "0:49:20", "remaining_time": "0:50:34"} +{"current_steps": 364, "total_steps": 735, "loss": 0.0875, "lr": 5.956507185775441e-06, "epoch": 1.4826883910386965, "percentage": 49.52, "elapsed_time": "0:49:29", "remaining_time": "0:50:26"} +{"current_steps": 365, "total_steps": 735, "loss": 0.074, "lr": 5.933171420200946e-06, "epoch": 1.4867617107942974, "percentage": 49.66, "elapsed_time": "0:49:36", "remaining_time": "0:50:17"} +{"current_steps": 366, "total_steps": 735, "loss": 0.0771, "lr": 5.909814575265609e-06, "epoch": 1.4908350305498983, "percentage": 49.8, "elapsed_time": "0:49:44", "remaining_time": "0:50:08"} +{"current_steps": 367, "total_steps": 735, "loss": 0.064, "lr": 5.88643717857599e-06, "epoch": 1.494908350305499, "percentage": 49.93, "elapsed_time": "0:49:49", "remaining_time": "0:49:57"} +{"current_steps": 368, "total_steps": 735, "loss": 0.0732, "lr": 5.863039758202889e-06, "epoch": 1.4989816700610998, "percentage": 50.07, "elapsed_time": "0:49:57", "remaining_time": "0:49:49"} +{"current_steps": 369, "total_steps": 735, "loss": 0.0794, "lr": 5.839622842669423e-06, "epoch": 1.5030549898167007, "percentage": 50.2, "elapsed_time": "0:50:05", "remaining_time": "0:49:40"} +{"current_steps": 370, "total_steps": 735, "loss": 0.0873, "lr": 5.816186960939084e-06, "epoch": 1.5071283095723014, "percentage": 50.34, "elapsed_time": "0:50:14", "remaining_time": "0:49:33"} +{"current_steps": 371, "total_steps": 735, "loss": 0.0808, "lr": 5.7927326424037875e-06, "epoch": 1.5112016293279023, "percentage": 50.48, "elapsed_time": "0:50:20", "remaining_time": "0:49:23"} +{"current_steps": 372, "total_steps": 735, "loss": 0.0743, "lr": 5.7692604168719225e-06, "epoch": 1.5152749490835031, "percentage": 50.61, "elapsed_time": "0:50:28", "remaining_time": "0:49:14"} +{"current_steps": 373, "total_steps": 735, "loss": 0.0737, "lr": 5.745770814556373e-06, "epoch": 1.5193482688391038, "percentage": 50.75, "elapsed_time": "0:50:36", "remaining_time": "0:49:07"} +{"current_steps": 374, "total_steps": 735, "loss": 0.1075, "lr": 5.722264366062549e-06, "epoch": 1.5234215885947047, "percentage": 50.88, "elapsed_time": "0:50:44", "remaining_time": "0:48:58"} +{"current_steps": 375, "total_steps": 735, "loss": 0.0854, "lr": 5.698741602376395e-06, "epoch": 1.5274949083503055, "percentage": 51.02, "elapsed_time": "0:50:53", "remaining_time": "0:48:51"} +{"current_steps": 376, "total_steps": 735, "loss": 0.0735, "lr": 5.675203054852403e-06, "epoch": 1.5315682281059062, "percentage": 51.16, "elapsed_time": "0:51:03", "remaining_time": "0:48:45"} +{"current_steps": 377, "total_steps": 735, "loss": 0.0893, "lr": 5.651649255201603e-06, "epoch": 1.535641547861507, "percentage": 51.29, "elapsed_time": "0:51:09", "remaining_time": "0:48:34"} +{"current_steps": 378, "total_steps": 735, "loss": 0.0808, "lr": 5.628080735479553e-06, "epoch": 1.539714867617108, "percentage": 51.43, "elapsed_time": "0:51:15", "remaining_time": "0:48:24"} +{"current_steps": 379, "total_steps": 735, "loss": 0.0693, "lr": 5.604498028074323e-06, "epoch": 1.5437881873727086, "percentage": 51.56, "elapsed_time": "0:51:22", "remaining_time": "0:48:15"} +{"current_steps": 380, "total_steps": 735, "loss": 0.0708, "lr": 5.580901665694471e-06, "epoch": 1.5478615071283097, "percentage": 51.7, "elapsed_time": "0:51:29", "remaining_time": "0:48:06"} +{"current_steps": 381, "total_steps": 735, "loss": 0.0916, "lr": 5.557292181357003e-06, "epoch": 1.5519348268839104, "percentage": 51.84, "elapsed_time": "0:51:37", "remaining_time": "0:47:58"} +{"current_steps": 382, "total_steps": 735, "loss": 0.075, "lr": 5.533670108375334e-06, "epoch": 1.556008146639511, "percentage": 51.97, "elapsed_time": "0:51:43", "remaining_time": "0:47:48"} +{"current_steps": 383, "total_steps": 735, "loss": 0.0629, "lr": 5.510035980347249e-06, "epoch": 1.5600814663951121, "percentage": 52.11, "elapsed_time": "0:51:51", "remaining_time": "0:47:39"} +{"current_steps": 384, "total_steps": 735, "loss": 0.129, "lr": 5.486390331142841e-06, "epoch": 1.5641547861507128, "percentage": 52.24, "elapsed_time": "0:52:01", "remaining_time": "0:47:32"} +{"current_steps": 385, "total_steps": 735, "loss": 0.0994, "lr": 5.462733694892452e-06, "epoch": 1.5682281059063137, "percentage": 52.38, "elapsed_time": "0:52:08", "remaining_time": "0:47:24"} +{"current_steps": 386, "total_steps": 735, "loss": 0.0884, "lr": 5.439066605974615e-06, "epoch": 1.5723014256619146, "percentage": 52.52, "elapsed_time": "0:52:25", "remaining_time": "0:47:23"} +{"current_steps": 387, "total_steps": 735, "loss": 0.0758, "lr": 5.415389599003972e-06, "epoch": 1.5763747454175152, "percentage": 52.65, "elapsed_time": "0:52:31", "remaining_time": "0:47:13"} +{"current_steps": 388, "total_steps": 735, "loss": 0.0822, "lr": 5.391703208819209e-06, "epoch": 1.580448065173116, "percentage": 52.79, "elapsed_time": "0:52:37", "remaining_time": "0:47:04"} +{"current_steps": 389, "total_steps": 735, "loss": 0.0794, "lr": 5.368007970470964e-06, "epoch": 1.584521384928717, "percentage": 52.93, "elapsed_time": "0:52:44", "remaining_time": "0:46:54"} +{"current_steps": 390, "total_steps": 735, "loss": 0.0908, "lr": 5.344304419209748e-06, "epoch": 1.5885947046843176, "percentage": 53.06, "elapsed_time": "0:52:52", "remaining_time": "0:46:46"} +{"current_steps": 391, "total_steps": 735, "loss": 0.065, "lr": 5.3205930904738544e-06, "epoch": 1.5926680244399185, "percentage": 53.2, "elapsed_time": "0:52:59", "remaining_time": "0:46:37"} +{"current_steps": 392, "total_steps": 735, "loss": 0.1063, "lr": 5.296874519877256e-06, "epoch": 1.5967413441955194, "percentage": 53.33, "elapsed_time": "0:53:05", "remaining_time": "0:46:27"} +{"current_steps": 393, "total_steps": 735, "loss": 0.092, "lr": 5.273149243197517e-06, "epoch": 1.60081466395112, "percentage": 53.47, "elapsed_time": "0:53:19", "remaining_time": "0:46:24"} +{"current_steps": 394, "total_steps": 735, "loss": 0.0781, "lr": 5.2494177963636785e-06, "epoch": 1.6048879837067211, "percentage": 53.61, "elapsed_time": "0:53:27", "remaining_time": "0:46:15"} +{"current_steps": 395, "total_steps": 735, "loss": 0.0844, "lr": 5.225680715444168e-06, "epoch": 1.6089613034623218, "percentage": 53.74, "elapsed_time": "0:53:34", "remaining_time": "0:46:06"} +{"current_steps": 396, "total_steps": 735, "loss": 0.0755, "lr": 5.201938536634674e-06, "epoch": 1.6130346232179225, "percentage": 53.88, "elapsed_time": "0:53:42", "remaining_time": "0:45:58"} +{"current_steps": 397, "total_steps": 735, "loss": 0.0692, "lr": 5.178191796246043e-06, "epoch": 1.6171079429735236, "percentage": 54.01, "elapsed_time": "0:53:51", "remaining_time": "0:45:51"} +{"current_steps": 398, "total_steps": 735, "loss": 0.0844, "lr": 5.154441030692162e-06, "epoch": 1.6211812627291242, "percentage": 54.15, "elapsed_time": "0:53:58", "remaining_time": "0:45:42"} +{"current_steps": 399, "total_steps": 735, "loss": 0.0605, "lr": 5.1306867764778445e-06, "epoch": 1.625254582484725, "percentage": 54.29, "elapsed_time": "0:54:04", "remaining_time": "0:45:32"} +{"current_steps": 400, "total_steps": 735, "loss": 0.0616, "lr": 5.106929570186706e-06, "epoch": 1.629327902240326, "percentage": 54.42, "elapsed_time": "0:54:11", "remaining_time": "0:45:23"} +{"current_steps": 401, "total_steps": 735, "loss": 0.0888, "lr": 5.083169948469049e-06, "epoch": 1.6334012219959266, "percentage": 54.56, "elapsed_time": "0:54:18", "remaining_time": "0:45:14"} +{"current_steps": 402, "total_steps": 735, "loss": 0.0924, "lr": 5.059408448029737e-06, "epoch": 1.6374745417515275, "percentage": 54.69, "elapsed_time": "0:54:24", "remaining_time": "0:45:04"} +{"current_steps": 403, "total_steps": 735, "loss": 0.0899, "lr": 5.0356456056160715e-06, "epoch": 1.6415478615071284, "percentage": 54.83, "elapsed_time": "0:54:31", "remaining_time": "0:44:55"} +{"current_steps": 404, "total_steps": 735, "loss": 0.0676, "lr": 5.0118819580056686e-06, "epoch": 1.645621181262729, "percentage": 54.97, "elapsed_time": "0:54:38", "remaining_time": "0:44:45"} +{"current_steps": 405, "total_steps": 735, "loss": 0.0719, "lr": 4.988118041994332e-06, "epoch": 1.64969450101833, "percentage": 55.1, "elapsed_time": "0:54:45", "remaining_time": "0:44:37"} +{"current_steps": 406, "total_steps": 735, "loss": 0.0757, "lr": 4.964354394383929e-06, "epoch": 1.6537678207739308, "percentage": 55.24, "elapsed_time": "0:54:54", "remaining_time": "0:44:29"} +{"current_steps": 407, "total_steps": 735, "loss": 0.066, "lr": 4.940591551970264e-06, "epoch": 1.6578411405295315, "percentage": 55.37, "elapsed_time": "0:55:02", "remaining_time": "0:44:21"} +{"current_steps": 408, "total_steps": 735, "loss": 0.0724, "lr": 4.9168300515309515e-06, "epoch": 1.6619144602851323, "percentage": 55.51, "elapsed_time": "0:55:09", "remaining_time": "0:44:12"} +{"current_steps": 409, "total_steps": 735, "loss": 0.1022, "lr": 4.8930704298132965e-06, "epoch": 1.6659877800407332, "percentage": 55.65, "elapsed_time": "0:55:18", "remaining_time": "0:44:04"} +{"current_steps": 410, "total_steps": 735, "loss": 0.0718, "lr": 4.869313223522159e-06, "epoch": 1.6700610997963339, "percentage": 55.78, "elapsed_time": "0:55:24", "remaining_time": "0:43:55"} +{"current_steps": 411, "total_steps": 735, "loss": 0.0707, "lr": 4.845558969307839e-06, "epoch": 1.674134419551935, "percentage": 55.92, "elapsed_time": "0:55:32", "remaining_time": "0:43:46"} +{"current_steps": 412, "total_steps": 735, "loss": 0.0785, "lr": 4.821808203753959e-06, "epoch": 1.6782077393075356, "percentage": 56.05, "elapsed_time": "0:55:38", "remaining_time": "0:43:37"} +{"current_steps": 413, "total_steps": 735, "loss": 0.0755, "lr": 4.798061463365327e-06, "epoch": 1.6822810590631363, "percentage": 56.19, "elapsed_time": "0:55:50", "remaining_time": "0:43:32"} +{"current_steps": 414, "total_steps": 735, "loss": 0.0725, "lr": 4.774319284555833e-06, "epoch": 1.6863543788187374, "percentage": 56.33, "elapsed_time": "0:55:59", "remaining_time": "0:43:24"} +{"current_steps": 415, "total_steps": 735, "loss": 0.0698, "lr": 4.7505822036363214e-06, "epoch": 1.690427698574338, "percentage": 56.46, "elapsed_time": "0:56:08", "remaining_time": "0:43:17"} +{"current_steps": 416, "total_steps": 735, "loss": 0.0779, "lr": 4.726850756802486e-06, "epoch": 1.694501018329939, "percentage": 56.6, "elapsed_time": "0:56:17", "remaining_time": "0:43:09"} +{"current_steps": 417, "total_steps": 735, "loss": 0.0677, "lr": 4.703125480122747e-06, "epoch": 1.6985743380855398, "percentage": 56.73, "elapsed_time": "0:56:23", "remaining_time": "0:43:00"} +{"current_steps": 418, "total_steps": 735, "loss": 0.0959, "lr": 4.679406909526147e-06, "epoch": 1.7026476578411405, "percentage": 56.87, "elapsed_time": "0:56:30", "remaining_time": "0:42:51"} +{"current_steps": 419, "total_steps": 735, "loss": 0.0782, "lr": 4.655695580790254e-06, "epoch": 1.7067209775967414, "percentage": 57.01, "elapsed_time": "0:56:38", "remaining_time": "0:42:43"} +{"current_steps": 420, "total_steps": 735, "loss": 0.104, "lr": 4.631992029529037e-06, "epoch": 1.7107942973523422, "percentage": 57.14, "elapsed_time": "0:56:46", "remaining_time": "0:42:35"} +{"current_steps": 421, "total_steps": 735, "loss": 0.0794, "lr": 4.608296791180793e-06, "epoch": 1.7148676171079429, "percentage": 57.28, "elapsed_time": "0:56:55", "remaining_time": "0:42:27"} +{"current_steps": 422, "total_steps": 735, "loss": 0.0727, "lr": 4.584610400996028e-06, "epoch": 1.7189409368635438, "percentage": 57.41, "elapsed_time": "0:57:02", "remaining_time": "0:42:18"} +{"current_steps": 423, "total_steps": 735, "loss": 0.0812, "lr": 4.560933394025386e-06, "epoch": 1.7230142566191446, "percentage": 57.55, "elapsed_time": "0:57:09", "remaining_time": "0:42:09"} +{"current_steps": 424, "total_steps": 735, "loss": 0.0857, "lr": 4.537266305107549e-06, "epoch": 1.7270875763747453, "percentage": 57.69, "elapsed_time": "0:57:21", "remaining_time": "0:42:04"} +{"current_steps": 425, "total_steps": 735, "loss": 0.0658, "lr": 4.513609668857162e-06, "epoch": 1.7311608961303462, "percentage": 57.82, "elapsed_time": "0:57:29", "remaining_time": "0:41:56"} +{"current_steps": 426, "total_steps": 735, "loss": 0.0637, "lr": 4.489964019652752e-06, "epoch": 1.735234215885947, "percentage": 57.96, "elapsed_time": "0:57:36", "remaining_time": "0:41:46"} +{"current_steps": 427, "total_steps": 735, "loss": 0.0878, "lr": 4.4663298916246665e-06, "epoch": 1.7393075356415477, "percentage": 58.1, "elapsed_time": "0:57:43", "remaining_time": "0:41:38"} +{"current_steps": 428, "total_steps": 735, "loss": 0.0801, "lr": 4.442707818642999e-06, "epoch": 1.7433808553971488, "percentage": 58.23, "elapsed_time": "0:57:51", "remaining_time": "0:41:30"} +{"current_steps": 429, "total_steps": 735, "loss": 0.0831, "lr": 4.419098334305529e-06, "epoch": 1.7474541751527495, "percentage": 58.37, "elapsed_time": "0:58:03", "remaining_time": "0:41:24"} +{"current_steps": 430, "total_steps": 735, "loss": 0.0782, "lr": 4.395501971925677e-06, "epoch": 1.7515274949083504, "percentage": 58.5, "elapsed_time": "0:58:11", "remaining_time": "0:41:16"} +{"current_steps": 431, "total_steps": 735, "loss": 0.0795, "lr": 4.371919264520449e-06, "epoch": 1.7556008146639512, "percentage": 58.64, "elapsed_time": "0:58:24", "remaining_time": "0:41:11"} +{"current_steps": 432, "total_steps": 735, "loss": 0.0798, "lr": 4.348350744798399e-06, "epoch": 1.759674134419552, "percentage": 58.78, "elapsed_time": "0:58:31", "remaining_time": "0:41:02"} +{"current_steps": 433, "total_steps": 735, "loss": 0.0728, "lr": 4.324796945147598e-06, "epoch": 1.7637474541751528, "percentage": 58.91, "elapsed_time": "0:58:38", "remaining_time": "0:40:53"} +{"current_steps": 434, "total_steps": 735, "loss": 0.0782, "lr": 4.301258397623606e-06, "epoch": 1.7678207739307537, "percentage": 59.05, "elapsed_time": "0:58:46", "remaining_time": "0:40:45"} +{"current_steps": 435, "total_steps": 735, "loss": 0.0856, "lr": 4.2777356339374526e-06, "epoch": 1.7718940936863543, "percentage": 59.18, "elapsed_time": "0:58:52", "remaining_time": "0:40:36"} +{"current_steps": 436, "total_steps": 735, "loss": 0.0783, "lr": 4.254229185443628e-06, "epoch": 1.7759674134419552, "percentage": 59.32, "elapsed_time": "0:58:59", "remaining_time": "0:40:27"} +{"current_steps": 437, "total_steps": 735, "loss": 0.0701, "lr": 4.230739583128078e-06, "epoch": 1.780040733197556, "percentage": 59.46, "elapsed_time": "0:59:06", "remaining_time": "0:40:18"} +{"current_steps": 438, "total_steps": 735, "loss": 0.0977, "lr": 4.2072673575962125e-06, "epoch": 1.7841140529531567, "percentage": 59.59, "elapsed_time": "0:59:12", "remaining_time": "0:40:08"} +{"current_steps": 439, "total_steps": 735, "loss": 0.1103, "lr": 4.183813039060919e-06, "epoch": 1.7881873727087576, "percentage": 59.73, "elapsed_time": "0:59:19", "remaining_time": "0:40:00"} +{"current_steps": 440, "total_steps": 735, "loss": 0.0787, "lr": 4.160377157330579e-06, "epoch": 1.7922606924643585, "percentage": 59.86, "elapsed_time": "0:59:27", "remaining_time": "0:39:51"} +{"current_steps": 441, "total_steps": 735, "loss": 0.0648, "lr": 4.136960241797113e-06, "epoch": 1.7963340122199591, "percentage": 60.0, "elapsed_time": "0:59:34", "remaining_time": "0:39:42"} +{"current_steps": 442, "total_steps": 735, "loss": 0.0856, "lr": 4.113562821424012e-06, "epoch": 1.8004073319755602, "percentage": 60.14, "elapsed_time": "0:59:42", "remaining_time": "0:39:34"} +{"current_steps": 443, "total_steps": 735, "loss": 0.0769, "lr": 4.090185424734392e-06, "epoch": 1.804480651731161, "percentage": 60.27, "elapsed_time": "0:59:50", "remaining_time": "0:39:26"} +{"current_steps": 444, "total_steps": 735, "loss": 0.0761, "lr": 4.066828579799054e-06, "epoch": 1.8085539714867616, "percentage": 60.41, "elapsed_time": "1:00:01", "remaining_time": "0:39:20"} +{"current_steps": 445, "total_steps": 735, "loss": 0.0684, "lr": 4.043492814224559e-06, "epoch": 1.8126272912423627, "percentage": 60.54, "elapsed_time": "1:00:07", "remaining_time": "0:39:10"} +{"current_steps": 446, "total_steps": 735, "loss": 0.0792, "lr": 4.020178655141307e-06, "epoch": 1.8167006109979633, "percentage": 60.68, "elapsed_time": "1:00:17", "remaining_time": "0:39:04"} +{"current_steps": 447, "total_steps": 735, "loss": 0.082, "lr": 3.9968866291916254e-06, "epoch": 1.8207739307535642, "percentage": 60.82, "elapsed_time": "1:00:27", "remaining_time": "0:38:57"} +{"current_steps": 448, "total_steps": 735, "loss": 0.0638, "lr": 3.973617262517886e-06, "epoch": 1.824847250509165, "percentage": 60.95, "elapsed_time": "1:00:35", "remaining_time": "0:38:48"} +{"current_steps": 449, "total_steps": 735, "loss": 0.0795, "lr": 3.950371080750602e-06, "epoch": 1.8289205702647657, "percentage": 61.09, "elapsed_time": "1:00:42", "remaining_time": "0:38:40"} +{"current_steps": 450, "total_steps": 735, "loss": 0.1063, "lr": 3.927148608996569e-06, "epoch": 1.8329938900203666, "percentage": 61.22, "elapsed_time": "1:00:50", "remaining_time": "0:38:31"} +{"current_steps": 451, "total_steps": 735, "loss": 0.087, "lr": 3.903950371827001e-06, "epoch": 1.8370672097759675, "percentage": 61.36, "elapsed_time": "1:00:59", "remaining_time": "0:38:24"} +{"current_steps": 452, "total_steps": 735, "loss": 0.0767, "lr": 3.880776893265673e-06, "epoch": 1.8411405295315681, "percentage": 61.5, "elapsed_time": "1:01:06", "remaining_time": "0:38:15"} +{"current_steps": 453, "total_steps": 735, "loss": 0.0575, "lr": 3.85762869677709e-06, "epoch": 1.845213849287169, "percentage": 61.63, "elapsed_time": "1:01:12", "remaining_time": "0:38:06"} +{"current_steps": 454, "total_steps": 735, "loss": 0.0896, "lr": 3.834506305254667e-06, "epoch": 1.84928716904277, "percentage": 61.77, "elapsed_time": "1:01:18", "remaining_time": "0:37:56"} +{"current_steps": 455, "total_steps": 735, "loss": 0.0856, "lr": 3.811410241008902e-06, "epoch": 1.8533604887983706, "percentage": 61.9, "elapsed_time": "1:01:27", "remaining_time": "0:37:49"} +{"current_steps": 456, "total_steps": 735, "loss": 0.0733, "lr": 3.788341025755595e-06, "epoch": 1.8574338085539714, "percentage": 62.04, "elapsed_time": "1:01:34", "remaining_time": "0:37:40"} +{"current_steps": 457, "total_steps": 735, "loss": 0.0829, "lr": 3.765299180604055e-06, "epoch": 1.8615071283095723, "percentage": 62.18, "elapsed_time": "1:01:41", "remaining_time": "0:37:31"} +{"current_steps": 458, "total_steps": 735, "loss": 0.0704, "lr": 3.7422852260453274e-06, "epoch": 1.865580448065173, "percentage": 62.31, "elapsed_time": "1:01:48", "remaining_time": "0:37:22"} +{"current_steps": 459, "total_steps": 735, "loss": 0.09, "lr": 3.719299681940437e-06, "epoch": 1.869653767820774, "percentage": 62.45, "elapsed_time": "1:01:56", "remaining_time": "0:37:14"} +{"current_steps": 460, "total_steps": 735, "loss": 0.0816, "lr": 3.696343067508651e-06, "epoch": 1.8737270875763747, "percentage": 62.59, "elapsed_time": "1:02:05", "remaining_time": "0:37:07"} +{"current_steps": 461, "total_steps": 735, "loss": 0.0648, "lr": 3.673415901315743e-06, "epoch": 1.8778004073319754, "percentage": 62.72, "elapsed_time": "1:02:12", "remaining_time": "0:36:58"} +{"current_steps": 462, "total_steps": 735, "loss": 0.0732, "lr": 3.650518701262278e-06, "epoch": 1.8818737270875765, "percentage": 62.86, "elapsed_time": "1:02:21", "remaining_time": "0:36:50"} +{"current_steps": 463, "total_steps": 735, "loss": 0.0731, "lr": 3.6276519845719237e-06, "epoch": 1.8859470468431772, "percentage": 62.99, "elapsed_time": "1:02:34", "remaining_time": "0:36:45"} +{"current_steps": 464, "total_steps": 735, "loss": 0.0847, "lr": 3.6048162677797595e-06, "epoch": 1.890020366598778, "percentage": 63.13, "elapsed_time": "1:02:40", "remaining_time": "0:36:36"} +{"current_steps": 465, "total_steps": 735, "loss": 0.0666, "lr": 3.582012066720605e-06, "epoch": 1.894093686354379, "percentage": 63.27, "elapsed_time": "1:02:49", "remaining_time": "0:36:28"} +{"current_steps": 466, "total_steps": 735, "loss": 0.0814, "lr": 3.559239896517379e-06, "epoch": 1.8981670061099796, "percentage": 63.4, "elapsed_time": "1:02:56", "remaining_time": "0:36:20"} +{"current_steps": 467, "total_steps": 735, "loss": 0.0766, "lr": 3.536500271569452e-06, "epoch": 1.9022403258655805, "percentage": 63.54, "elapsed_time": "1:03:03", "remaining_time": "0:36:11"} +{"current_steps": 468, "total_steps": 735, "loss": 0.0677, "lr": 3.5137937055410343e-06, "epoch": 1.9063136456211813, "percentage": 63.67, "elapsed_time": "1:03:11", "remaining_time": "0:36:03"} +{"current_steps": 469, "total_steps": 735, "loss": 0.0883, "lr": 3.4911207113495703e-06, "epoch": 1.910386965376782, "percentage": 63.81, "elapsed_time": "1:03:19", "remaining_time": "0:35:54"} +{"current_steps": 470, "total_steps": 735, "loss": 0.0779, "lr": 3.4684818011541484e-06, "epoch": 1.9144602851323829, "percentage": 63.95, "elapsed_time": "1:03:25", "remaining_time": "0:35:45"} +{"current_steps": 471, "total_steps": 735, "loss": 0.0882, "lr": 3.4458774863439366e-06, "epoch": 1.9185336048879837, "percentage": 64.08, "elapsed_time": "1:03:33", "remaining_time": "0:35:37"} +{"current_steps": 472, "total_steps": 735, "loss": 0.088, "lr": 3.423308277526633e-06, "epoch": 1.9226069246435844, "percentage": 64.22, "elapsed_time": "1:03:39", "remaining_time": "0:35:28"} +{"current_steps": 473, "total_steps": 735, "loss": 0.0836, "lr": 3.4007746845169253e-06, "epoch": 1.9266802443991853, "percentage": 64.35, "elapsed_time": "1:03:48", "remaining_time": "0:35:20"} +{"current_steps": 474, "total_steps": 735, "loss": 0.0671, "lr": 3.3782772163249767e-06, "epoch": 1.9307535641547862, "percentage": 64.49, "elapsed_time": "1:03:56", "remaining_time": "0:35:12"} +{"current_steps": 475, "total_steps": 735, "loss": 0.0694, "lr": 3.3558163811449317e-06, "epoch": 1.9348268839103868, "percentage": 64.63, "elapsed_time": "1:04:06", "remaining_time": "0:35:05"} +{"current_steps": 476, "total_steps": 735, "loss": 0.0847, "lr": 3.3333926863434317e-06, "epoch": 1.938900203665988, "percentage": 64.76, "elapsed_time": "1:04:13", "remaining_time": "0:34:56"} +{"current_steps": 477, "total_steps": 735, "loss": 0.0724, "lr": 3.311006638448155e-06, "epoch": 1.9429735234215886, "percentage": 64.9, "elapsed_time": "1:04:20", "remaining_time": "0:34:47"} +{"current_steps": 478, "total_steps": 735, "loss": 0.066, "lr": 3.288658743136378e-06, "epoch": 1.9470468431771895, "percentage": 65.03, "elapsed_time": "1:04:27", "remaining_time": "0:34:39"} +{"current_steps": 479, "total_steps": 735, "loss": 0.1077, "lr": 3.2663495052235505e-06, "epoch": 1.9511201629327903, "percentage": 65.17, "elapsed_time": "1:04:34", "remaining_time": "0:34:30"} +{"current_steps": 480, "total_steps": 735, "loss": 0.085, "lr": 3.2440794286518896e-06, "epoch": 1.955193482688391, "percentage": 65.31, "elapsed_time": "1:04:42", "remaining_time": "0:34:22"} +{"current_steps": 481, "total_steps": 735, "loss": 0.0656, "lr": 3.2218490164790015e-06, "epoch": 1.9592668024439919, "percentage": 65.44, "elapsed_time": "1:04:51", "remaining_time": "0:34:14"} +{"current_steps": 482, "total_steps": 735, "loss": 0.0754, "lr": 3.199658770866515e-06, "epoch": 1.9633401221995928, "percentage": 65.58, "elapsed_time": "1:04:57", "remaining_time": "0:34:05"} +{"current_steps": 483, "total_steps": 735, "loss": 0.0668, "lr": 3.1775091930687374e-06, "epoch": 1.9674134419551934, "percentage": 65.71, "elapsed_time": "1:05:03", "remaining_time": "0:33:56"} +{"current_steps": 484, "total_steps": 735, "loss": 0.0686, "lr": 3.1554007834213357e-06, "epoch": 1.9714867617107943, "percentage": 65.85, "elapsed_time": "1:05:12", "remaining_time": "0:33:49"} +{"current_steps": 485, "total_steps": 735, "loss": 0.0848, "lr": 3.1333340413300263e-06, "epoch": 1.9755600814663952, "percentage": 65.99, "elapsed_time": "1:05:19", "remaining_time": "0:33:40"} +{"current_steps": 486, "total_steps": 735, "loss": 0.0701, "lr": 3.1113094652593023e-06, "epoch": 1.9796334012219958, "percentage": 66.12, "elapsed_time": "1:05:26", "remaining_time": "0:33:31"} +{"current_steps": 487, "total_steps": 735, "loss": 0.1013, "lr": 3.0893275527211742e-06, "epoch": 1.9837067209775967, "percentage": 66.26, "elapsed_time": "1:05:36", "remaining_time": "0:33:24"} +{"current_steps": 488, "total_steps": 735, "loss": 0.0832, "lr": 3.067388800263923e-06, "epoch": 1.9877800407331976, "percentage": 66.39, "elapsed_time": "1:05:45", "remaining_time": "0:33:17"} +{"current_steps": 489, "total_steps": 735, "loss": 0.0678, "lr": 3.04549370346089e-06, "epoch": 1.9918533604887982, "percentage": 66.53, "elapsed_time": "1:05:52", "remaining_time": "0:33:08"} +{"current_steps": 490, "total_steps": 735, "loss": 0.0768, "lr": 3.0236427568992845e-06, "epoch": 1.9959266802443993, "percentage": 66.67, "elapsed_time": "1:06:00", "remaining_time": "0:33:00"} +{"current_steps": 491, "total_steps": 735, "loss": 0.0861, "lr": 3.0018364541690048e-06, "epoch": 2.0, "percentage": 66.8, "elapsed_time": "1:06:09", "remaining_time": "0:32:52"} +{"current_steps": 492, "total_steps": 735, "loss": 0.0466, "lr": 2.9800752878514903e-06, "epoch": 2.0040733197556007, "percentage": 66.94, "elapsed_time": "1:06:19", "remaining_time": "0:32:45"} +{"current_steps": 493, "total_steps": 735, "loss": 0.0515, "lr": 2.958359749508603e-06, "epoch": 2.0081466395112018, "percentage": 67.07, "elapsed_time": "1:06:27", "remaining_time": "0:32:37"} +{"current_steps": 494, "total_steps": 735, "loss": 0.0435, "lr": 2.936690329671511e-06, "epoch": 2.0122199592668024, "percentage": 67.21, "elapsed_time": "1:06:34", "remaining_time": "0:32:28"} +{"current_steps": 495, "total_steps": 735, "loss": 0.0455, "lr": 2.915067517829615e-06, "epoch": 2.016293279022403, "percentage": 67.35, "elapsed_time": "1:06:41", "remaining_time": "0:32:20"} +{"current_steps": 496, "total_steps": 735, "loss": 0.0476, "lr": 2.893491802419492e-06, "epoch": 2.020366598778004, "percentage": 67.48, "elapsed_time": "1:06:49", "remaining_time": "0:32:12"} +{"current_steps": 497, "total_steps": 735, "loss": 0.0383, "lr": 2.871963670813861e-06, "epoch": 2.024439918533605, "percentage": 67.62, "elapsed_time": "1:06:55", "remaining_time": "0:32:02"} +{"current_steps": 498, "total_steps": 735, "loss": 0.0429, "lr": 2.850483609310567e-06, "epoch": 2.0285132382892055, "percentage": 67.76, "elapsed_time": "1:07:03", "remaining_time": "0:31:55"} +{"current_steps": 499, "total_steps": 735, "loss": 0.0461, "lr": 2.829052103121611e-06, "epoch": 2.0325865580448066, "percentage": 67.89, "elapsed_time": "1:07:11", "remaining_time": "0:31:46"} +{"current_steps": 500, "total_steps": 735, "loss": 0.0468, "lr": 2.807669636362169e-06, "epoch": 2.0366598778004072, "percentage": 68.03, "elapsed_time": "1:07:19", "remaining_time": "0:31:38"} +{"current_steps": 501, "total_steps": 735, "loss": 0.0661, "lr": 2.7863366920396805e-06, "epoch": 2.0407331975560083, "percentage": 68.16, "elapsed_time": "1:08:30", "remaining_time": "0:31:59"} +{"current_steps": 502, "total_steps": 735, "loss": 0.0528, "lr": 2.765053752042915e-06, "epoch": 2.044806517311609, "percentage": 68.3, "elapsed_time": "1:08:37", "remaining_time": "0:31:51"} +{"current_steps": 503, "total_steps": 735, "loss": 0.0445, "lr": 2.7438212971311016e-06, "epoch": 2.0488798370672097, "percentage": 68.44, "elapsed_time": "1:08:44", "remaining_time": "0:31:42"} +{"current_steps": 504, "total_steps": 735, "loss": 0.0377, "lr": 2.722639806923066e-06, "epoch": 2.0529531568228108, "percentage": 68.57, "elapsed_time": "1:08:50", "remaining_time": "0:31:33"} +{"current_steps": 505, "total_steps": 735, "loss": 0.0493, "lr": 2.7015097598863906e-06, "epoch": 2.0570264765784114, "percentage": 68.71, "elapsed_time": "1:09:03", "remaining_time": "0:31:27"} +{"current_steps": 506, "total_steps": 735, "loss": 0.0397, "lr": 2.680431633326614e-06, "epoch": 2.061099796334012, "percentage": 68.84, "elapsed_time": "1:09:12", "remaining_time": "0:31:19"} +{"current_steps": 507, "total_steps": 735, "loss": 0.0485, "lr": 2.659405903376442e-06, "epoch": 2.065173116089613, "percentage": 68.98, "elapsed_time": "1:09:21", "remaining_time": "0:31:11"} +{"current_steps": 508, "total_steps": 735, "loss": 0.0479, "lr": 2.6384330449850028e-06, "epoch": 2.069246435845214, "percentage": 69.12, "elapsed_time": "1:09:31", "remaining_time": "0:31:03"} +{"current_steps": 509, "total_steps": 735, "loss": 0.0466, "lr": 2.617513531907103e-06, "epoch": 2.0733197556008145, "percentage": 69.25, "elapsed_time": "1:09:44", "remaining_time": "0:30:58"} +{"current_steps": 510, "total_steps": 735, "loss": 0.0472, "lr": 2.5966478366925406e-06, "epoch": 2.0773930753564156, "percentage": 69.39, "elapsed_time": "1:09:51", "remaining_time": "0:30:49"} +{"current_steps": 511, "total_steps": 735, "loss": 0.0448, "lr": 2.5758364306754247e-06, "epoch": 2.0814663951120163, "percentage": 69.52, "elapsed_time": "1:09:59", "remaining_time": "0:30:41"} +{"current_steps": 512, "total_steps": 735, "loss": 0.0448, "lr": 2.5550797839635283e-06, "epoch": 2.085539714867617, "percentage": 69.66, "elapsed_time": "1:10:06", "remaining_time": "0:30:32"} +{"current_steps": 513, "total_steps": 735, "loss": 0.0446, "lr": 2.5343783654276644e-06, "epoch": 2.089613034623218, "percentage": 69.8, "elapsed_time": "1:10:15", "remaining_time": "0:30:24"} +{"current_steps": 514, "total_steps": 735, "loss": 0.0433, "lr": 2.5137326426911067e-06, "epoch": 2.0936863543788187, "percentage": 69.93, "elapsed_time": "1:10:23", "remaining_time": "0:30:15"} +{"current_steps": 515, "total_steps": 735, "loss": 0.048, "lr": 2.493143082119013e-06, "epoch": 2.0977596741344193, "percentage": 70.07, "elapsed_time": "1:10:30", "remaining_time": "0:30:07"} +{"current_steps": 516, "total_steps": 735, "loss": 0.0478, "lr": 2.472610148807903e-06, "epoch": 2.1018329938900204, "percentage": 70.2, "elapsed_time": "1:10:38", "remaining_time": "0:29:58"} +{"current_steps": 517, "total_steps": 735, "loss": 0.0464, "lr": 2.452134306575139e-06, "epoch": 2.105906313645621, "percentage": 70.34, "elapsed_time": "1:10:46", "remaining_time": "0:29:50"} +{"current_steps": 518, "total_steps": 735, "loss": 0.0535, "lr": 2.431716017948462e-06, "epoch": 2.109979633401222, "percentage": 70.48, "elapsed_time": "1:10:59", "remaining_time": "0:29:44"} +{"current_steps": 519, "total_steps": 735, "loss": 0.0374, "lr": 2.4113557441555384e-06, "epoch": 2.114052953156823, "percentage": 70.61, "elapsed_time": "1:11:06", "remaining_time": "0:29:35"} +{"current_steps": 520, "total_steps": 735, "loss": 0.0344, "lr": 2.391053945113533e-06, "epoch": 2.1181262729124235, "percentage": 70.75, "elapsed_time": "1:11:14", "remaining_time": "0:29:27"} +{"current_steps": 521, "total_steps": 735, "loss": 0.0452, "lr": 2.370811079418735e-06, "epoch": 2.1221995926680246, "percentage": 70.88, "elapsed_time": "1:11:24", "remaining_time": "0:29:19"} +{"current_steps": 522, "total_steps": 735, "loss": 0.0491, "lr": 2.350627604336186e-06, "epoch": 2.1262729124236253, "percentage": 71.02, "elapsed_time": "1:11:32", "remaining_time": "0:29:11"} +{"current_steps": 523, "total_steps": 735, "loss": 0.0522, "lr": 2.330503975789361e-06, "epoch": 2.130346232179226, "percentage": 71.16, "elapsed_time": "1:11:42", "remaining_time": "0:29:03"} +{"current_steps": 524, "total_steps": 735, "loss": 0.0433, "lr": 2.3104406483498593e-06, "epoch": 2.134419551934827, "percentage": 71.29, "elapsed_time": "1:11:53", "remaining_time": "0:28:57"} +{"current_steps": 525, "total_steps": 735, "loss": 0.0431, "lr": 2.290438075227146e-06, "epoch": 2.1384928716904277, "percentage": 71.43, "elapsed_time": "1:12:02", "remaining_time": "0:28:48"} +{"current_steps": 526, "total_steps": 735, "loss": 0.045, "lr": 2.270496708258309e-06, "epoch": 2.1425661914460283, "percentage": 71.56, "elapsed_time": "1:12:13", "remaining_time": "0:28:41"} +{"current_steps": 527, "total_steps": 735, "loss": 0.041, "lr": 2.2506169978978543e-06, "epoch": 2.1466395112016294, "percentage": 71.7, "elapsed_time": "1:12:20", "remaining_time": "0:28:33"} +{"current_steps": 528, "total_steps": 735, "loss": 0.0453, "lr": 2.230799393207526e-06, "epoch": 2.15071283095723, "percentage": 71.84, "elapsed_time": "1:12:27", "remaining_time": "0:28:24"} +{"current_steps": 529, "total_steps": 735, "loss": 0.0333, "lr": 2.2110443418461723e-06, "epoch": 2.1547861507128308, "percentage": 71.97, "elapsed_time": "1:12:36", "remaining_time": "0:28:16"} +{"current_steps": 530, "total_steps": 735, "loss": 0.041, "lr": 2.191352290059621e-06, "epoch": 2.158859470468432, "percentage": 72.11, "elapsed_time": "1:12:45", "remaining_time": "0:28:08"} +{"current_steps": 531, "total_steps": 735, "loss": 0.0518, "lr": 2.171723682670613e-06, "epoch": 2.1629327902240325, "percentage": 72.24, "elapsed_time": "1:12:54", "remaining_time": "0:28:00"} +{"current_steps": 532, "total_steps": 735, "loss": 0.0413, "lr": 2.152158963068739e-06, "epoch": 2.167006109979633, "percentage": 72.38, "elapsed_time": "1:13:02", "remaining_time": "0:27:52"} +{"current_steps": 533, "total_steps": 735, "loss": 0.0407, "lr": 2.1326585732004384e-06, "epoch": 2.1710794297352343, "percentage": 72.52, "elapsed_time": "1:13:10", "remaining_time": "0:27:43"} +{"current_steps": 534, "total_steps": 735, "loss": 0.047, "lr": 2.1132229535590092e-06, "epoch": 2.175152749490835, "percentage": 72.65, "elapsed_time": "1:13:18", "remaining_time": "0:27:35"} +{"current_steps": 535, "total_steps": 735, "loss": 0.0442, "lr": 2.093852543174652e-06, "epoch": 2.179226069246436, "percentage": 72.79, "elapsed_time": "1:13:26", "remaining_time": "0:27:27"} +{"current_steps": 536, "total_steps": 735, "loss": 0.0409, "lr": 2.0745477796045664e-06, "epoch": 2.1832993890020367, "percentage": 72.93, "elapsed_time": "1:13:32", "remaining_time": "0:27:18"} +{"current_steps": 537, "total_steps": 735, "loss": 0.0455, "lr": 2.0553090989230527e-06, "epoch": 2.1873727087576373, "percentage": 73.06, "elapsed_time": "1:13:41", "remaining_time": "0:27:10"} +{"current_steps": 538, "total_steps": 735, "loss": 0.0362, "lr": 2.036136935711674e-06, "epoch": 2.1914460285132384, "percentage": 73.2, "elapsed_time": "1:13:47", "remaining_time": "0:27:01"} +{"current_steps": 539, "total_steps": 735, "loss": 0.034, "lr": 2.017031723049432e-06, "epoch": 2.195519348268839, "percentage": 73.33, "elapsed_time": "1:13:54", "remaining_time": "0:26:52"} +{"current_steps": 540, "total_steps": 735, "loss": 0.0456, "lr": 1.997993892502983e-06, "epoch": 2.1995926680244398, "percentage": 73.47, "elapsed_time": "1:14:03", "remaining_time": "0:26:44"} +{"current_steps": 541, "total_steps": 735, "loss": 0.0472, "lr": 1.979023874116895e-06, "epoch": 2.203665987780041, "percentage": 73.61, "elapsed_time": "1:14:12", "remaining_time": "0:26:36"} +{"current_steps": 542, "total_steps": 735, "loss": 0.0417, "lr": 1.9601220964039324e-06, "epoch": 2.2077393075356415, "percentage": 73.74, "elapsed_time": "1:14:20", "remaining_time": "0:26:28"} +{"current_steps": 543, "total_steps": 735, "loss": 0.0439, "lr": 1.9412889863353683e-06, "epoch": 2.211812627291242, "percentage": 73.88, "elapsed_time": "1:14:26", "remaining_time": "0:26:19"} +{"current_steps": 544, "total_steps": 735, "loss": 0.0403, "lr": 1.9225249693313547e-06, "epoch": 2.2158859470468433, "percentage": 74.01, "elapsed_time": "1:14:34", "remaining_time": "0:26:10"} +{"current_steps": 545, "total_steps": 735, "loss": 0.0447, "lr": 1.9038304692512943e-06, "epoch": 2.219959266802444, "percentage": 74.15, "elapsed_time": "1:14:43", "remaining_time": "0:26:02"} +{"current_steps": 546, "total_steps": 735, "loss": 0.043, "lr": 1.8852059083842838e-06, "epoch": 2.224032586558045, "percentage": 74.29, "elapsed_time": "1:14:50", "remaining_time": "0:25:54"} +{"current_steps": 547, "total_steps": 735, "loss": 0.0503, "lr": 1.8666517074395607e-06, "epoch": 2.2281059063136457, "percentage": 74.42, "elapsed_time": "1:15:11", "remaining_time": "0:25:50"} +{"current_steps": 548, "total_steps": 735, "loss": 0.034, "lr": 1.8481682855370098e-06, "epoch": 2.2321792260692463, "percentage": 74.56, "elapsed_time": "1:15:17", "remaining_time": "0:25:41"} +{"current_steps": 549, "total_steps": 735, "loss": 0.0405, "lr": 1.829756060197692e-06, "epoch": 2.2362525458248474, "percentage": 74.69, "elapsed_time": "1:15:24", "remaining_time": "0:25:32"} +{"current_steps": 550, "total_steps": 735, "loss": 0.0473, "lr": 1.8114154473344081e-06, "epoch": 2.240325865580448, "percentage": 74.83, "elapsed_time": "1:15:31", "remaining_time": "0:25:24"} +{"current_steps": 551, "total_steps": 735, "loss": 0.0378, "lr": 1.7931468612423142e-06, "epoch": 2.2443991853360488, "percentage": 74.97, "elapsed_time": "1:15:37", "remaining_time": "0:25:15"} +{"current_steps": 552, "total_steps": 735, "loss": 0.0379, "lr": 1.7749507145895518e-06, "epoch": 2.24847250509165, "percentage": 75.1, "elapsed_time": "1:15:45", "remaining_time": "0:25:07"} +{"current_steps": 553, "total_steps": 735, "loss": 0.0461, "lr": 1.756827418407936e-06, "epoch": 2.2525458248472505, "percentage": 75.24, "elapsed_time": "1:15:52", "remaining_time": "0:24:58"} +{"current_steps": 554, "total_steps": 735, "loss": 0.0405, "lr": 1.7387773820836668e-06, "epoch": 2.256619144602851, "percentage": 75.37, "elapsed_time": "1:15:59", "remaining_time": "0:24:49"} +{"current_steps": 555, "total_steps": 735, "loss": 0.0398, "lr": 1.7208010133480751e-06, "epoch": 2.2606924643584523, "percentage": 75.51, "elapsed_time": "1:16:07", "remaining_time": "0:24:41"} +{"current_steps": 556, "total_steps": 735, "loss": 0.0445, "lr": 1.7028987182684248e-06, "epoch": 2.264765784114053, "percentage": 75.65, "elapsed_time": "1:16:15", "remaining_time": "0:24:33"} +{"current_steps": 557, "total_steps": 735, "loss": 0.0407, "lr": 1.6850709012387328e-06, "epoch": 2.2688391038696536, "percentage": 75.78, "elapsed_time": "1:16:24", "remaining_time": "0:24:25"} +{"current_steps": 558, "total_steps": 735, "loss": 0.04, "lr": 1.6673179649706312e-06, "epoch": 2.2729124236252547, "percentage": 75.92, "elapsed_time": "1:16:32", "remaining_time": "0:24:16"} +{"current_steps": 559, "total_steps": 735, "loss": 0.0399, "lr": 1.64964031048428e-06, "epoch": 2.2769857433808554, "percentage": 76.05, "elapsed_time": "1:16:40", "remaining_time": "0:24:08"} +{"current_steps": 560, "total_steps": 735, "loss": 0.042, "lr": 1.632038337099297e-06, "epoch": 2.281059063136456, "percentage": 76.19, "elapsed_time": "1:16:48", "remaining_time": "0:24:00"} +{"current_steps": 561, "total_steps": 735, "loss": 0.0396, "lr": 1.6145124424257497e-06, "epoch": 2.285132382892057, "percentage": 76.33, "elapsed_time": "1:16:56", "remaining_time": "0:23:51"} +{"current_steps": 562, "total_steps": 735, "loss": 0.0411, "lr": 1.5970630223551614e-06, "epoch": 2.2892057026476578, "percentage": 76.46, "elapsed_time": "1:17:03", "remaining_time": "0:23:43"} +{"current_steps": 563, "total_steps": 735, "loss": 0.0371, "lr": 1.5796904710515792e-06, "epoch": 2.293279022403259, "percentage": 76.6, "elapsed_time": "1:17:10", "remaining_time": "0:23:34"} +{"current_steps": 564, "total_steps": 735, "loss": 0.0443, "lr": 1.5623951809426663e-06, "epoch": 2.2973523421588595, "percentage": 76.73, "elapsed_time": "1:17:20", "remaining_time": "0:23:27"} +{"current_steps": 565, "total_steps": 735, "loss": 0.0377, "lr": 1.5451775427108302e-06, "epoch": 2.30142566191446, "percentage": 76.87, "elapsed_time": "1:17:26", "remaining_time": "0:23:18"} +{"current_steps": 566, "total_steps": 735, "loss": 0.0392, "lr": 1.5280379452844124e-06, "epoch": 2.3054989816700613, "percentage": 77.01, "elapsed_time": "1:17:34", "remaining_time": "0:23:09"} +{"current_steps": 567, "total_steps": 735, "loss": 0.0371, "lr": 1.510976775828887e-06, "epoch": 2.309572301425662, "percentage": 77.14, "elapsed_time": "1:17:43", "remaining_time": "0:23:01"} +{"current_steps": 568, "total_steps": 735, "loss": 0.0414, "lr": 1.493994419738129e-06, "epoch": 2.3136456211812626, "percentage": 77.28, "elapsed_time": "1:17:49", "remaining_time": "0:22:53"} +{"current_steps": 569, "total_steps": 735, "loss": 0.0481, "lr": 1.4770912606257003e-06, "epoch": 2.3177189409368637, "percentage": 77.41, "elapsed_time": "1:18:02", "remaining_time": "0:22:46"} +{"current_steps": 570, "total_steps": 735, "loss": 0.045, "lr": 1.4602676803161842e-06, "epoch": 2.3217922606924644, "percentage": 77.55, "elapsed_time": "1:18:09", "remaining_time": "0:22:37"} +{"current_steps": 571, "total_steps": 735, "loss": 0.0382, "lr": 1.4435240588365645e-06, "epoch": 2.325865580448065, "percentage": 77.69, "elapsed_time": "1:18:16", "remaining_time": "0:22:28"} +{"current_steps": 572, "total_steps": 735, "loss": 0.0388, "lr": 1.4268607744076419e-06, "epoch": 2.329938900203666, "percentage": 77.82, "elapsed_time": "1:18:23", "remaining_time": "0:22:20"} +{"current_steps": 573, "total_steps": 735, "loss": 0.0453, "lr": 1.41027820343548e-06, "epoch": 2.3340122199592668, "percentage": 77.96, "elapsed_time": "1:18:32", "remaining_time": "0:22:12"} +{"current_steps": 574, "total_steps": 735, "loss": 0.0506, "lr": 1.3937767205029196e-06, "epoch": 2.3380855397148674, "percentage": 78.1, "elapsed_time": "1:18:42", "remaining_time": "0:22:04"} +{"current_steps": 575, "total_steps": 735, "loss": 0.048, "lr": 1.3773566983610992e-06, "epoch": 2.3421588594704685, "percentage": 78.23, "elapsed_time": "1:18:52", "remaining_time": "0:21:56"} +{"current_steps": 576, "total_steps": 735, "loss": 0.0372, "lr": 1.3610185079210514e-06, "epoch": 2.346232179226069, "percentage": 78.37, "elapsed_time": "1:18:57", "remaining_time": "0:21:47"} +{"current_steps": 577, "total_steps": 735, "loss": 0.0418, "lr": 1.34476251824531e-06, "epoch": 2.35030549898167, "percentage": 78.5, "elapsed_time": "1:19:06", "remaining_time": "0:21:39"} +{"current_steps": 578, "total_steps": 735, "loss": 0.0448, "lr": 1.3285890965395853e-06, "epoch": 2.354378818737271, "percentage": 78.64, "elapsed_time": "1:19:12", "remaining_time": "0:21:30"} +{"current_steps": 579, "total_steps": 735, "loss": 0.0459, "lr": 1.3124986081444625e-06, "epoch": 2.3584521384928716, "percentage": 78.78, "elapsed_time": "1:19:19", "remaining_time": "0:21:22"} +{"current_steps": 580, "total_steps": 735, "loss": 0.0424, "lr": 1.296491416527147e-06, "epoch": 2.3625254582484727, "percentage": 78.91, "elapsed_time": "1:19:27", "remaining_time": "0:21:14"} +{"current_steps": 581, "total_steps": 735, "loss": 0.0522, "lr": 1.2805678832732627e-06, "epoch": 2.3665987780040734, "percentage": 79.05, "elapsed_time": "1:19:36", "remaining_time": "0:21:06"} +{"current_steps": 582, "total_steps": 735, "loss": 0.0403, "lr": 1.264728368078678e-06, "epoch": 2.370672097759674, "percentage": 79.18, "elapsed_time": "1:19:42", "remaining_time": "0:20:57"} +{"current_steps": 583, "total_steps": 735, "loss": 0.0435, "lr": 1.248973228741378e-06, "epoch": 2.374745417515275, "percentage": 79.32, "elapsed_time": "1:19:52", "remaining_time": "0:20:49"} +{"current_steps": 584, "total_steps": 735, "loss": 0.0399, "lr": 1.2333028211533916e-06, "epoch": 2.378818737270876, "percentage": 79.46, "elapsed_time": "1:20:01", "remaining_time": "0:20:41"} +{"current_steps": 585, "total_steps": 735, "loss": 0.0457, "lr": 1.21771749929274e-06, "epoch": 2.3828920570264764, "percentage": 79.59, "elapsed_time": "1:20:08", "remaining_time": "0:20:32"} +{"current_steps": 586, "total_steps": 735, "loss": 0.0422, "lr": 1.2022176152154525e-06, "epoch": 2.3869653767820775, "percentage": 79.73, "elapsed_time": "1:20:14", "remaining_time": "0:20:24"} +{"current_steps": 587, "total_steps": 735, "loss": 0.0408, "lr": 1.1868035190476085e-06, "epoch": 2.391038696537678, "percentage": 79.86, "elapsed_time": "1:20:21", "remaining_time": "0:20:15"} +{"current_steps": 588, "total_steps": 735, "loss": 0.0374, "lr": 1.1714755589774252e-06, "epoch": 2.395112016293279, "percentage": 80.0, "elapsed_time": "1:20:27", "remaining_time": "0:20:06"} +{"current_steps": 589, "total_steps": 735, "loss": 0.0331, "lr": 1.1562340812474004e-06, "epoch": 2.39918533604888, "percentage": 80.14, "elapsed_time": "1:20:33", "remaining_time": "0:19:58"} +{"current_steps": 590, "total_steps": 735, "loss": 0.0381, "lr": 1.1410794301464817e-06, "epoch": 2.4032586558044806, "percentage": 80.27, "elapsed_time": "1:20:40", "remaining_time": "0:19:49"} +{"current_steps": 591, "total_steps": 735, "loss": 0.0338, "lr": 1.1260119480023008e-06, "epoch": 2.4073319755600817, "percentage": 80.41, "elapsed_time": "1:20:47", "remaining_time": "0:19:41"} +{"current_steps": 592, "total_steps": 735, "loss": 0.0382, "lr": 1.1110319751734271e-06, "epoch": 2.4114052953156824, "percentage": 80.54, "elapsed_time": "1:20:53", "remaining_time": "0:19:32"} +{"current_steps": 593, "total_steps": 735, "loss": 0.0388, "lr": 1.0961398500416926e-06, "epoch": 2.415478615071283, "percentage": 80.68, "elapsed_time": "1:21:01", "remaining_time": "0:19:24"} +{"current_steps": 594, "total_steps": 735, "loss": 0.039, "lr": 1.0813359090045412e-06, "epoch": 2.4195519348268837, "percentage": 80.82, "elapsed_time": "1:21:08", "remaining_time": "0:19:15"} +{"current_steps": 595, "total_steps": 735, "loss": 0.0353, "lr": 1.0666204864674263e-06, "epoch": 2.423625254582485, "percentage": 80.95, "elapsed_time": "1:21:17", "remaining_time": "0:19:07"} +{"current_steps": 596, "total_steps": 735, "loss": 0.0439, "lr": 1.0519939148362667e-06, "epoch": 2.4276985743380854, "percentage": 81.09, "elapsed_time": "1:21:25", "remaining_time": "0:18:59"} +{"current_steps": 597, "total_steps": 735, "loss": 0.0365, "lr": 1.0374565245099328e-06, "epoch": 2.4317718940936865, "percentage": 81.22, "elapsed_time": "1:21:32", "remaining_time": "0:18:50"} +{"current_steps": 598, "total_steps": 735, "loss": 0.0426, "lr": 1.0230086438727771e-06, "epoch": 2.435845213849287, "percentage": 81.36, "elapsed_time": "1:21:45", "remaining_time": "0:18:43"} +{"current_steps": 599, "total_steps": 735, "loss": 0.0425, "lr": 1.0086505992872304e-06, "epoch": 2.439918533604888, "percentage": 81.5, "elapsed_time": "1:21:54", "remaining_time": "0:18:35"} +{"current_steps": 600, "total_steps": 735, "loss": 0.0387, "lr": 9.943827150864143e-07, "epoch": 2.443991853360489, "percentage": 81.63, "elapsed_time": "1:22:01", "remaining_time": "0:18:27"} +{"current_steps": 601, "total_steps": 735, "loss": 0.0405, "lr": 9.80205313566827e-07, "epoch": 2.4480651731160896, "percentage": 81.77, "elapsed_time": "1:22:08", "remaining_time": "0:18:18"} +{"current_steps": 602, "total_steps": 735, "loss": 0.0446, "lr": 9.66118714981058e-07, "epoch": 2.4521384928716903, "percentage": 81.9, "elapsed_time": "1:22:16", "remaining_time": "0:18:10"} +{"current_steps": 603, "total_steps": 735, "loss": 0.0399, "lr": 9.521232375305494e-07, "epoch": 2.4562118126272914, "percentage": 82.04, "elapsed_time": "1:22:27", "remaining_time": "0:18:03"} +{"current_steps": 604, "total_steps": 735, "loss": 0.03, "lr": 9.382191973584193e-07, "epoch": 2.460285132382892, "percentage": 82.18, "elapsed_time": "1:22:34", "remaining_time": "0:17:54"} +{"current_steps": 605, "total_steps": 735, "loss": 0.0498, "lr": 9.244069085423074e-07, "epoch": 2.4643584521384927, "percentage": 82.31, "elapsed_time": "1:22:44", "remaining_time": "0:17:46"} +{"current_steps": 606, "total_steps": 735, "loss": 0.0339, "lr": 9.106866830872929e-07, "epoch": 2.468431771894094, "percentage": 82.45, "elapsed_time": "1:22:51", "remaining_time": "0:17:38"} +{"current_steps": 607, "total_steps": 735, "loss": 0.043, "lr": 8.970588309188343e-07, "epoch": 2.4725050916496945, "percentage": 82.59, "elapsed_time": "1:22:58", "remaining_time": "0:17:29"} +{"current_steps": 608, "total_steps": 735, "loss": 0.0428, "lr": 8.835236598757796e-07, "epoch": 2.4765784114052956, "percentage": 82.72, "elapsed_time": "1:23:06", "remaining_time": "0:17:21"} +{"current_steps": 609, "total_steps": 735, "loss": 0.0347, "lr": 8.70081475703406e-07, "epoch": 2.480651731160896, "percentage": 82.86, "elapsed_time": "1:23:14", "remaining_time": "0:17:13"} +{"current_steps": 610, "total_steps": 735, "loss": 0.0495, "lr": 8.567325820465156e-07, "epoch": 2.484725050916497, "percentage": 82.99, "elapsed_time": "1:23:21", "remaining_time": "0:17:04"} +{"current_steps": 611, "total_steps": 735, "loss": 0.0408, "lr": 8.434772804425734e-07, "epoch": 2.4887983706720975, "percentage": 83.13, "elapsed_time": "1:23:29", "remaining_time": "0:16:56"} +{"current_steps": 612, "total_steps": 735, "loss": 0.0411, "lr": 8.303158703149023e-07, "epoch": 2.4928716904276986, "percentage": 83.27, "elapsed_time": "1:23:36", "remaining_time": "0:16:48"} +{"current_steps": 613, "total_steps": 735, "loss": 0.0393, "lr": 8.172486489659115e-07, "epoch": 2.4969450101832993, "percentage": 83.4, "elapsed_time": "1:23:44", "remaining_time": "0:16:39"} +{"current_steps": 614, "total_steps": 735, "loss": 0.0412, "lr": 8.042759115703891e-07, "epoch": 2.5010183299389004, "percentage": 83.54, "elapsed_time": "1:23:53", "remaining_time": "0:16:31"} +{"current_steps": 615, "total_steps": 735, "loss": 0.0409, "lr": 7.913979511688252e-07, "epoch": 2.505091649694501, "percentage": 83.67, "elapsed_time": "1:24:00", "remaining_time": "0:16:23"} +{"current_steps": 616, "total_steps": 735, "loss": 0.0477, "lr": 7.78615058660801e-07, "epoch": 2.5091649694501017, "percentage": 83.81, "elapsed_time": "1:24:11", "remaining_time": "0:16:15"} +{"current_steps": 617, "total_steps": 735, "loss": 0.0379, "lr": 7.659275227984142e-07, "epoch": 2.513238289205703, "percentage": 83.95, "elapsed_time": "1:24:18", "remaining_time": "0:16:07"} +{"current_steps": 618, "total_steps": 735, "loss": 0.0398, "lr": 7.533356301797523e-07, "epoch": 2.5173116089613035, "percentage": 84.08, "elapsed_time": "1:24:26", "remaining_time": "0:15:59"} +{"current_steps": 619, "total_steps": 735, "loss": 0.0345, "lr": 7.408396652424271e-07, "epoch": 2.521384928716904, "percentage": 84.22, "elapsed_time": "1:24:33", "remaining_time": "0:15:50"} +{"current_steps": 620, "total_steps": 735, "loss": 0.053, "lr": 7.28439910257141e-07, "epoch": 2.525458248472505, "percentage": 84.35, "elapsed_time": "1:24:41", "remaining_time": "0:15:42"} +{"current_steps": 621, "total_steps": 735, "loss": 0.0382, "lr": 7.161366453213181e-07, "epoch": 2.529531568228106, "percentage": 84.49, "elapsed_time": "1:24:47", "remaining_time": "0:15:33"} +{"current_steps": 622, "total_steps": 735, "loss": 0.041, "lr": 7.03930148352771e-07, "epoch": 2.5336048879837065, "percentage": 84.63, "elapsed_time": "1:24:57", "remaining_time": "0:15:26"} +{"current_steps": 623, "total_steps": 735, "loss": 0.0433, "lr": 6.918206950834283e-07, "epoch": 2.5376782077393076, "percentage": 84.76, "elapsed_time": "1:25:08", "remaining_time": "0:15:18"} +{"current_steps": 624, "total_steps": 735, "loss": 0.0403, "lr": 6.798085590531012e-07, "epoch": 2.5417515274949083, "percentage": 84.9, "elapsed_time": "1:25:15", "remaining_time": "0:15:10"} +{"current_steps": 625, "total_steps": 735, "loss": 0.0307, "lr": 6.678940116033095e-07, "epoch": 2.5458248472505094, "percentage": 85.03, "elapsed_time": "1:25:23", "remaining_time": "0:15:01"} +{"current_steps": 626, "total_steps": 735, "loss": 0.0387, "lr": 6.560773218711458e-07, "epoch": 2.54989816700611, "percentage": 85.17, "elapsed_time": "1:25:30", "remaining_time": "0:14:53"} +{"current_steps": 627, "total_steps": 735, "loss": 0.0364, "lr": 6.443587567832044e-07, "epoch": 2.5539714867617107, "percentage": 85.31, "elapsed_time": "1:25:38", "remaining_time": "0:14:45"} +{"current_steps": 628, "total_steps": 735, "loss": 0.0396, "lr": 6.327385810495423e-07, "epoch": 2.5580448065173114, "percentage": 85.44, "elapsed_time": "1:25:45", "remaining_time": "0:14:36"} +{"current_steps": 629, "total_steps": 735, "loss": 0.0433, "lr": 6.212170571577087e-07, "epoch": 2.5621181262729125, "percentage": 85.58, "elapsed_time": "1:25:52", "remaining_time": "0:14:28"} +{"current_steps": 630, "total_steps": 735, "loss": 0.0398, "lr": 6.097944453668081e-07, "epoch": 2.566191446028513, "percentage": 85.71, "elapsed_time": "1:26:00", "remaining_time": "0:14:20"} +{"current_steps": 631, "total_steps": 735, "loss": 0.036, "lr": 5.984710037016267e-07, "epoch": 2.5702647657841142, "percentage": 85.85, "elapsed_time": "1:26:07", "remaining_time": "0:14:11"} +{"current_steps": 632, "total_steps": 735, "loss": 0.051, "lr": 5.872469879468024e-07, "epoch": 2.574338085539715, "percentage": 85.99, "elapsed_time": "1:26:17", "remaining_time": "0:14:03"} +{"current_steps": 633, "total_steps": 735, "loss": 0.0336, "lr": 5.761226516410434e-07, "epoch": 2.5784114052953155, "percentage": 86.12, "elapsed_time": "1:26:25", "remaining_time": "0:13:55"} +{"current_steps": 634, "total_steps": 735, "loss": 0.0424, "lr": 5.650982460714083e-07, "epoch": 2.5824847250509166, "percentage": 86.26, "elapsed_time": "1:26:33", "remaining_time": "0:13:47"} +{"current_steps": 635, "total_steps": 735, "loss": 0.0416, "lr": 5.54174020267621e-07, "epoch": 2.5865580448065173, "percentage": 86.39, "elapsed_time": "1:26:39", "remaining_time": "0:13:38"} +{"current_steps": 636, "total_steps": 735, "loss": 0.0386, "lr": 5.433502209964531e-07, "epoch": 2.5906313645621184, "percentage": 86.53, "elapsed_time": "1:26:47", "remaining_time": "0:13:30"} +{"current_steps": 637, "total_steps": 735, "loss": 0.0433, "lr": 5.326270927561444e-07, "epoch": 2.594704684317719, "percentage": 86.67, "elapsed_time": "1:26:54", "remaining_time": "0:13:22"} +{"current_steps": 638, "total_steps": 735, "loss": 0.0406, "lr": 5.22004877770883e-07, "epoch": 2.5987780040733197, "percentage": 86.8, "elapsed_time": "1:27:02", "remaining_time": "0:13:14"} +{"current_steps": 639, "total_steps": 735, "loss": 0.0413, "lr": 5.114838159853336e-07, "epoch": 2.6028513238289204, "percentage": 86.94, "elapsed_time": "1:27:09", "remaining_time": "0:13:05"} +{"current_steps": 640, "total_steps": 735, "loss": 0.0424, "lr": 5.010641450592158e-07, "epoch": 2.6069246435845215, "percentage": 87.07, "elapsed_time": "1:27:19", "remaining_time": "0:12:57"} +{"current_steps": 641, "total_steps": 735, "loss": 0.0457, "lr": 4.907461003619346e-07, "epoch": 2.610997963340122, "percentage": 87.21, "elapsed_time": "1:27:27", "remaining_time": "0:12:49"} +{"current_steps": 642, "total_steps": 735, "loss": 0.0409, "lr": 4.805299149672682e-07, "epoch": 2.6150712830957232, "percentage": 87.35, "elapsed_time": "1:27:35", "remaining_time": "0:12:41"} +{"current_steps": 643, "total_steps": 735, "loss": 0.0375, "lr": 4.7041581964809733e-07, "epoch": 2.619144602851324, "percentage": 87.48, "elapsed_time": "1:27:43", "remaining_time": "0:12:33"} +{"current_steps": 644, "total_steps": 735, "loss": 0.0425, "lr": 4.6040404287119924e-07, "epoch": 2.6232179226069245, "percentage": 87.62, "elapsed_time": "1:27:49", "remaining_time": "0:12:24"} +{"current_steps": 645, "total_steps": 735, "loss": 0.037, "lr": 4.504948107920781e-07, "epoch": 2.627291242362525, "percentage": 87.76, "elapsed_time": "1:27:57", "remaining_time": "0:12:16"} +{"current_steps": 646, "total_steps": 735, "loss": 0.0404, "lr": 4.4068834724986466e-07, "epoch": 2.6313645621181263, "percentage": 87.89, "elapsed_time": "1:28:04", "remaining_time": "0:12:08"} +{"current_steps": 647, "total_steps": 735, "loss": 0.0405, "lr": 4.309848737622568e-07, "epoch": 2.635437881873727, "percentage": 88.03, "elapsed_time": "1:28:14", "remaining_time": "0:12:00"} +{"current_steps": 648, "total_steps": 735, "loss": 0.037, "lr": 4.213846095205126e-07, "epoch": 2.639511201629328, "percentage": 88.16, "elapsed_time": "1:28:21", "remaining_time": "0:11:51"} +{"current_steps": 649, "total_steps": 735, "loss": 0.0635, "lr": 4.1188777138450487e-07, "epoch": 2.6435845213849287, "percentage": 88.3, "elapsed_time": "1:28:28", "remaining_time": "0:11:43"} +{"current_steps": 650, "total_steps": 735, "loss": 0.045, "lr": 4.024945738778163e-07, "epoch": 2.6476578411405294, "percentage": 88.44, "elapsed_time": "1:28:37", "remaining_time": "0:11:35"} +{"current_steps": 651, "total_steps": 735, "loss": 0.0427, "lr": 3.9320522918289973e-07, "epoch": 2.6517311608961305, "percentage": 88.57, "elapsed_time": "1:28:45", "remaining_time": "0:11:27"} +{"current_steps": 652, "total_steps": 735, "loss": 0.0444, "lr": 3.8401994713628044e-07, "epoch": 2.655804480651731, "percentage": 88.71, "elapsed_time": "1:28:53", "remaining_time": "0:11:18"} +{"current_steps": 653, "total_steps": 735, "loss": 0.0368, "lr": 3.7493893522381866e-07, "epoch": 2.6598778004073322, "percentage": 88.84, "elapsed_time": "1:29:01", "remaining_time": "0:11:10"} +{"current_steps": 654, "total_steps": 735, "loss": 0.0448, "lr": 3.6596239857602136e-07, "epoch": 2.663951120162933, "percentage": 88.98, "elapsed_time": "1:29:10", "remaining_time": "0:11:02"} +{"current_steps": 655, "total_steps": 735, "loss": 0.0381, "lr": 3.570905399634111e-07, "epoch": 2.6680244399185336, "percentage": 89.12, "elapsed_time": "1:29:19", "remaining_time": "0:10:54"} +{"current_steps": 656, "total_steps": 735, "loss": 0.0377, "lr": 3.483235597919404e-07, "epoch": 2.672097759674134, "percentage": 89.25, "elapsed_time": "1:29:29", "remaining_time": "0:10:46"} +{"current_steps": 657, "total_steps": 735, "loss": 0.0351, "lr": 3.396616560984711e-07, "epoch": 2.6761710794297353, "percentage": 89.39, "elapsed_time": "1:29:35", "remaining_time": "0:10:38"} +{"current_steps": 658, "total_steps": 735, "loss": 0.036, "lr": 3.31105024546296e-07, "epoch": 2.680244399185336, "percentage": 89.52, "elapsed_time": "1:29:44", "remaining_time": "0:10:30"} +{"current_steps": 659, "total_steps": 735, "loss": 0.0459, "lr": 3.226538584207228e-07, "epoch": 2.684317718940937, "percentage": 89.66, "elapsed_time": "1:29:53", "remaining_time": "0:10:22"} +{"current_steps": 660, "total_steps": 735, "loss": 0.0554, "lr": 3.1430834862470395e-07, "epoch": 2.6883910386965377, "percentage": 89.8, "elapsed_time": "1:30:02", "remaining_time": "0:10:13"} +{"current_steps": 661, "total_steps": 735, "loss": 0.0561, "lr": 3.0606868367452746e-07, "epoch": 2.6924643584521384, "percentage": 89.93, "elapsed_time": "1:30:10", "remaining_time": "0:10:05"} +{"current_steps": 662, "total_steps": 735, "loss": 0.0433, "lr": 2.9793504969555965e-07, "epoch": 2.696537678207739, "percentage": 90.07, "elapsed_time": "1:30:19", "remaining_time": "0:09:57"} +{"current_steps": 663, "total_steps": 735, "loss": 0.0393, "lr": 2.899076304180348e-07, "epoch": 2.70061099796334, "percentage": 90.2, "elapsed_time": "1:30:25", "remaining_time": "0:09:49"} +{"current_steps": 664, "total_steps": 735, "loss": 0.0571, "lr": 2.819866071729127e-07, "epoch": 2.704684317718941, "percentage": 90.34, "elapsed_time": "1:30:34", "remaining_time": "0:09:41"} +{"current_steps": 665, "total_steps": 735, "loss": 0.0367, "lr": 2.7417215888777493e-07, "epoch": 2.708757637474542, "percentage": 90.48, "elapsed_time": "1:30:42", "remaining_time": "0:09:32"} +{"current_steps": 666, "total_steps": 735, "loss": 0.0373, "lr": 2.6646446208279054e-07, "epoch": 2.7128309572301426, "percentage": 90.61, "elapsed_time": "1:30:50", "remaining_time": "0:09:24"} +{"current_steps": 667, "total_steps": 735, "loss": 0.0423, "lr": 2.5886369086672193e-07, "epoch": 2.716904276985743, "percentage": 90.75, "elapsed_time": "1:30:58", "remaining_time": "0:09:16"} +{"current_steps": 668, "total_steps": 735, "loss": 0.0365, "lr": 2.513700169329963e-07, "epoch": 2.7209775967413443, "percentage": 90.88, "elapsed_time": "1:31:05", "remaining_time": "0:09:08"} +{"current_steps": 669, "total_steps": 735, "loss": 0.045, "lr": 2.439836095558262e-07, "epoch": 2.725050916496945, "percentage": 91.02, "elapsed_time": "1:31:12", "remaining_time": "0:08:59"} +{"current_steps": 670, "total_steps": 735, "loss": 0.0369, "lr": 2.3670463558638556e-07, "epoch": 2.729124236252546, "percentage": 91.16, "elapsed_time": "1:31:21", "remaining_time": "0:08:51"} +{"current_steps": 671, "total_steps": 735, "loss": 0.0368, "lr": 2.2953325944903848e-07, "epoch": 2.7331975560081467, "percentage": 91.29, "elapsed_time": "1:31:28", "remaining_time": "0:08:43"} +{"current_steps": 672, "total_steps": 735, "loss": 0.0406, "lr": 2.2246964313763053e-07, "epoch": 2.7372708757637474, "percentage": 91.43, "elapsed_time": "1:31:36", "remaining_time": "0:08:35"} +{"current_steps": 673, "total_steps": 735, "loss": 0.0413, "lr": 2.1551394621182277e-07, "epoch": 2.741344195519348, "percentage": 91.56, "elapsed_time": "1:31:42", "remaining_time": "0:08:26"} +{"current_steps": 674, "total_steps": 735, "loss": 0.0414, "lr": 2.08666325793494e-07, "epoch": 2.745417515274949, "percentage": 91.7, "elapsed_time": "1:31:49", "remaining_time": "0:08:18"} +{"current_steps": 675, "total_steps": 735, "loss": 0.0416, "lr": 2.0192693656318597e-07, "epoch": 2.74949083503055, "percentage": 91.84, "elapsed_time": "1:31:56", "remaining_time": "0:08:10"} +{"current_steps": 676, "total_steps": 735, "loss": 0.0412, "lr": 1.9529593075661267e-07, "epoch": 2.753564154786151, "percentage": 91.97, "elapsed_time": "1:32:07", "remaining_time": "0:08:02"} +{"current_steps": 677, "total_steps": 735, "loss": 0.038, "lr": 1.8877345816122162e-07, "epoch": 2.7576374745417516, "percentage": 92.11, "elapsed_time": "1:32:15", "remaining_time": "0:07:54"} +{"current_steps": 678, "total_steps": 735, "loss": 0.039, "lr": 1.8235966611280687e-07, "epoch": 2.7617107942973522, "percentage": 92.24, "elapsed_time": "1:32:23", "remaining_time": "0:07:46"} +{"current_steps": 679, "total_steps": 735, "loss": 0.0389, "lr": 1.760546994921858e-07, "epoch": 2.765784114052953, "percentage": 92.38, "elapsed_time": "1:32:32", "remaining_time": "0:07:37"} +{"current_steps": 680, "total_steps": 735, "loss": 0.0452, "lr": 1.6985870072192156e-07, "epoch": 2.769857433808554, "percentage": 92.52, "elapsed_time": "1:32:40", "remaining_time": "0:07:29"} +{"current_steps": 681, "total_steps": 735, "loss": 0.0419, "lr": 1.6377180976310968e-07, "epoch": 2.7739307535641546, "percentage": 92.65, "elapsed_time": "1:32:47", "remaining_time": "0:07:21"} +{"current_steps": 682, "total_steps": 735, "loss": 0.0384, "lr": 1.5779416411221437e-07, "epoch": 2.7780040733197557, "percentage": 92.79, "elapsed_time": "1:32:55", "remaining_time": "0:07:13"} +{"current_steps": 683, "total_steps": 735, "loss": 0.0356, "lr": 1.5192589879796383e-07, "epoch": 2.7820773930753564, "percentage": 92.93, "elapsed_time": "1:33:03", "remaining_time": "0:07:05"} +{"current_steps": 684, "total_steps": 735, "loss": 0.0389, "lr": 1.4616714637829822e-07, "epoch": 2.786150712830957, "percentage": 93.06, "elapsed_time": "1:33:11", "remaining_time": "0:06:56"} +{"current_steps": 685, "total_steps": 735, "loss": 0.0403, "lr": 1.4051803693737876e-07, "epoch": 2.790224032586558, "percentage": 93.2, "elapsed_time": "1:33:21", "remaining_time": "0:06:48"} +{"current_steps": 686, "total_steps": 735, "loss": 0.0626, "lr": 1.3497869808264453e-07, "epoch": 2.794297352342159, "percentage": 93.33, "elapsed_time": "1:33:28", "remaining_time": "0:06:40"} +{"current_steps": 687, "total_steps": 735, "loss": 0.0395, "lr": 1.2954925494193472e-07, "epoch": 2.79837067209776, "percentage": 93.47, "elapsed_time": "1:33:38", "remaining_time": "0:06:32"} +{"current_steps": 688, "total_steps": 735, "loss": 0.0371, "lr": 1.2422983016065816e-07, "epoch": 2.8024439918533606, "percentage": 93.61, "elapsed_time": "1:33:45", "remaining_time": "0:06:24"} +{"current_steps": 689, "total_steps": 735, "loss": 0.0606, "lr": 1.1902054389902662e-07, "epoch": 2.8065173116089612, "percentage": 93.74, "elapsed_time": "1:33:52", "remaining_time": "0:06:16"} +{"current_steps": 690, "total_steps": 735, "loss": 0.0368, "lr": 1.1392151382933647e-07, "epoch": 2.810590631364562, "percentage": 93.88, "elapsed_time": "1:34:00", "remaining_time": "0:06:07"} +{"current_steps": 691, "total_steps": 735, "loss": 0.0447, "lr": 1.0893285513331353e-07, "epoch": 2.814663951120163, "percentage": 94.01, "elapsed_time": "1:34:07", "remaining_time": "0:05:59"} +{"current_steps": 692, "total_steps": 735, "loss": 0.0392, "lr": 1.0405468049951184e-07, "epoch": 2.8187372708757636, "percentage": 94.15, "elapsed_time": "1:34:15", "remaining_time": "0:05:51"} +{"current_steps": 693, "total_steps": 735, "loss": 0.0451, "lr": 9.928710012076404e-08, "epoch": 2.8228105906313647, "percentage": 94.29, "elapsed_time": "1:34:27", "remaining_time": "0:05:43"} +{"current_steps": 694, "total_steps": 735, "loss": 0.0399, "lr": 9.463022169169666e-08, "epoch": 2.8268839103869654, "percentage": 94.42, "elapsed_time": "1:34:35", "remaining_time": "0:05:35"} +{"current_steps": 695, "total_steps": 735, "loss": 0.0353, "lr": 9.008415040629548e-08, "epoch": 2.830957230142566, "percentage": 94.56, "elapsed_time": "1:34:42", "remaining_time": "0:05:27"} +{"current_steps": 696, "total_steps": 735, "loss": 0.0464, "lr": 8.564898895552843e-08, "epoch": 2.835030549898167, "percentage": 94.69, "elapsed_time": "1:34:50", "remaining_time": "0:05:18"} +{"current_steps": 697, "total_steps": 735, "loss": 0.0309, "lr": 8.132483752502806e-08, "epoch": 2.839103869653768, "percentage": 94.83, "elapsed_time": "1:34:56", "remaining_time": "0:05:10"} +{"current_steps": 698, "total_steps": 735, "loss": 0.0427, "lr": 7.711179379282674e-08, "epoch": 2.8431771894093685, "percentage": 94.97, "elapsed_time": "1:35:07", "remaining_time": "0:05:02"} +{"current_steps": 699, "total_steps": 735, "loss": 0.0503, "lr": 7.300995292715107e-08, "epoch": 2.8472505091649696, "percentage": 95.1, "elapsed_time": "1:35:14", "remaining_time": "0:04:54"} +{"current_steps": 700, "total_steps": 735, "loss": 0.0496, "lr": 6.901940758427206e-08, "epoch": 2.8513238289205702, "percentage": 95.24, "elapsed_time": "1:35:25", "remaining_time": "0:04:46"} +{"current_steps": 701, "total_steps": 735, "loss": 0.0324, "lr": 6.514024790641116e-08, "epoch": 2.855397148676171, "percentage": 95.37, "elapsed_time": "1:35:32", "remaining_time": "0:04:38"} +{"current_steps": 702, "total_steps": 735, "loss": 0.0358, "lr": 6.137256151970583e-08, "epoch": 2.859470468431772, "percentage": 95.51, "elapsed_time": "1:35:39", "remaining_time": "0:04:29"} +{"current_steps": 703, "total_steps": 735, "loss": 0.0399, "lr": 5.771643353222778e-08, "epoch": 2.8635437881873727, "percentage": 95.65, "elapsed_time": "1:35:46", "remaining_time": "0:04:21"} +{"current_steps": 704, "total_steps": 735, "loss": 0.0396, "lr": 5.417194653206337e-08, "epoch": 2.8676171079429738, "percentage": 95.78, "elapsed_time": "1:35:52", "remaining_time": "0:04:13"} +{"current_steps": 705, "total_steps": 735, "loss": 0.0406, "lr": 5.073918058544458e-08, "epoch": 2.8716904276985744, "percentage": 95.92, "elapsed_time": "1:35:59", "remaining_time": "0:04:05"} +{"current_steps": 706, "total_steps": 735, "loss": 0.0371, "lr": 4.741821323494489e-08, "epoch": 2.875763747454175, "percentage": 96.05, "elapsed_time": "1:36:07", "remaining_time": "0:03:56"} +{"current_steps": 707, "total_steps": 735, "loss": 0.0347, "lr": 4.4209119497722883e-08, "epoch": 2.8798370672097757, "percentage": 96.19, "elapsed_time": "1:36:16", "remaining_time": "0:03:48"} +{"current_steps": 708, "total_steps": 735, "loss": 0.0373, "lr": 4.1111971863830866e-08, "epoch": 2.883910386965377, "percentage": 96.33, "elapsed_time": "1:36:22", "remaining_time": "0:03:40"} +{"current_steps": 709, "total_steps": 735, "loss": 0.0385, "lr": 3.812684029457614e-08, "epoch": 2.8879837067209775, "percentage": 96.46, "elapsed_time": "1:36:29", "remaining_time": "0:03:32"} +{"current_steps": 710, "total_steps": 735, "loss": 0.0368, "lr": 3.525379222094061e-08, "epoch": 2.8920570264765786, "percentage": 96.6, "elapsed_time": "1:36:36", "remaining_time": "0:03:24"} +{"current_steps": 711, "total_steps": 735, "loss": 0.0387, "lr": 3.249289254205867e-08, "epoch": 2.8961303462321792, "percentage": 96.73, "elapsed_time": "1:36:44", "remaining_time": "0:03:15"} +{"current_steps": 712, "total_steps": 735, "loss": 0.0374, "lr": 2.984420362375007e-08, "epoch": 2.90020366598778, "percentage": 96.87, "elapsed_time": "1:36:53", "remaining_time": "0:03:07"} +{"current_steps": 713, "total_steps": 735, "loss": 0.0361, "lr": 2.7307785297111533e-08, "epoch": 2.904276985743381, "percentage": 97.01, "elapsed_time": "1:37:01", "remaining_time": "0:02:59"} +{"current_steps": 714, "total_steps": 735, "loss": 0.0396, "lr": 2.488369485716513e-08, "epoch": 2.9083503054989817, "percentage": 97.14, "elapsed_time": "1:37:12", "remaining_time": "0:02:51"} +{"current_steps": 715, "total_steps": 735, "loss": 0.0349, "lr": 2.2571987061564827e-08, "epoch": 2.9124236252545828, "percentage": 97.28, "elapsed_time": "1:37:17", "remaining_time": "0:02:43"} +{"current_steps": 716, "total_steps": 735, "loss": 0.0405, "lr": 2.0372714129356375e-08, "epoch": 2.9164969450101834, "percentage": 97.41, "elapsed_time": "1:37:27", "remaining_time": "0:02:35"} +{"current_steps": 717, "total_steps": 735, "loss": 0.036, "lr": 1.8285925739803812e-08, "epoch": 2.920570264765784, "percentage": 97.55, "elapsed_time": "1:37:34", "remaining_time": "0:02:26"} +{"current_steps": 718, "total_steps": 735, "loss": 0.0434, "lr": 1.631166903126147e-08, "epoch": 2.9246435845213847, "percentage": 97.69, "elapsed_time": "1:37:42", "remaining_time": "0:02:18"} +{"current_steps": 719, "total_steps": 735, "loss": 0.0495, "lr": 1.4449988600111486e-08, "epoch": 2.928716904276986, "percentage": 97.82, "elapsed_time": "1:37:50", "remaining_time": "0:02:10"} +{"current_steps": 720, "total_steps": 735, "loss": 0.0405, "lr": 1.2700926499756295e-08, "epoch": 2.9327902240325865, "percentage": 97.96, "elapsed_time": "1:37:59", "remaining_time": "0:02:02"} +{"current_steps": 721, "total_steps": 735, "loss": 0.0339, "lr": 1.1064522239669916e-08, "epoch": 2.9368635437881876, "percentage": 98.1, "elapsed_time": "1:38:05", "remaining_time": "0:01:54"} +{"current_steps": 722, "total_steps": 735, "loss": 0.0497, "lr": 9.54081278450314e-09, "epoch": 2.9409368635437882, "percentage": 98.23, "elapsed_time": "1:38:12", "remaining_time": "0:01:46"} +{"current_steps": 723, "total_steps": 735, "loss": 0.0393, "lr": 8.129832553249173e-09, "epoch": 2.945010183299389, "percentage": 98.37, "elapsed_time": "1:38:19", "remaining_time": "0:01:37"} +{"current_steps": 724, "total_steps": 735, "loss": 0.0445, "lr": 6.831613418468163e-09, "epoch": 2.9490835030549896, "percentage": 98.5, "elapsed_time": "1:38:27", "remaining_time": "0:01:29"} +{"current_steps": 725, "total_steps": 735, "loss": 0.0325, "lr": 5.646184705563884e-09, "epoch": 2.9531568228105907, "percentage": 98.64, "elapsed_time": "1:38:35", "remaining_time": "0:01:21"} +{"current_steps": 726, "total_steps": 735, "loss": 0.0356, "lr": 4.573573192125369e-09, "epoch": 2.9572301425661913, "percentage": 98.78, "elapsed_time": "1:38:42", "remaining_time": "0:01:13"} +{"current_steps": 727, "total_steps": 735, "loss": 0.0463, "lr": 3.613803107317959e-09, "epoch": 2.9613034623217924, "percentage": 98.91, "elapsed_time": "1:38:53", "remaining_time": "0:01:05"} +{"current_steps": 728, "total_steps": 735, "loss": 0.0377, "lr": 2.7668961313376263e-09, "epoch": 2.965376782077393, "percentage": 99.05, "elapsed_time": "1:39:01", "remaining_time": "0:00:57"} +{"current_steps": 729, "total_steps": 735, "loss": 0.0466, "lr": 2.0328713949230304e-09, "epoch": 2.9694501018329937, "percentage": 99.18, "elapsed_time": "1:39:09", "remaining_time": "0:00:48"} +{"current_steps": 730, "total_steps": 735, "loss": 0.0427, "lr": 1.4117454789208673e-09, "epoch": 2.973523421588595, "percentage": 99.32, "elapsed_time": "1:39:15", "remaining_time": "0:00:40"} +{"current_steps": 731, "total_steps": 735, "loss": 0.0424, "lr": 9.03532413911723e-10, "epoch": 2.9775967413441955, "percentage": 99.46, "elapsed_time": "1:39:21", "remaining_time": "0:00:32"} +{"current_steps": 732, "total_steps": 735, "loss": 0.0305, "lr": 5.08243679894771e-10, "epoch": 2.9816700610997966, "percentage": 99.59, "elapsed_time": "1:39:29", "remaining_time": "0:00:24"} +{"current_steps": 733, "total_steps": 735, "loss": 0.047, "lr": 2.2588820602631457e-10, "epoch": 2.9857433808553973, "percentage": 99.73, "elapsed_time": "1:39:36", "remaining_time": "0:00:16"} +{"current_steps": 734, "total_steps": 735, "loss": 0.045, "lr": 5.6472370419391464e-11, "epoch": 2.989816700610998, "percentage": 99.86, "elapsed_time": "1:39:43", "remaining_time": "0:00:08"} +{"current_steps": 735, "total_steps": 735, "loss": 0.044, "lr": 0.0, "epoch": 2.9938900203665986, "percentage": 100.0, "elapsed_time": "1:39:51", "remaining_time": "0:00:00"} +{"current_steps": 735, "total_steps": 735, "epoch": 2.9938900203665986, "percentage": 100.0, "elapsed_time": "1:40:54", "remaining_time": "0:00:00"} diff --git a/trainer_state.json b/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..b42536e00495f036a5f5ee8712126387291917be --- /dev/null +++ b/trainer_state.json @@ -0,0 +1,5187 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.9938900203665986, + "eval_steps": 500, + "global_step": 735, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.004073319755600814, + "grad_norm": 14.739597738910254, + "learning_rate": 1.3513513513513515e-07, + "loss": 0.3301, + "step": 1 + }, + { + "epoch": 0.008146639511201629, + "grad_norm": 12.452379475537562, + "learning_rate": 2.702702702702703e-07, + "loss": 0.3036, + "step": 2 + }, + { + "epoch": 0.012219959266802444, + "grad_norm": 12.86836901323053, + "learning_rate": 4.0540540540540546e-07, + "loss": 0.3424, + "step": 3 + }, + { + "epoch": 0.016293279022403257, + "grad_norm": 13.43090696417895, + "learning_rate": 5.405405405405406e-07, + "loss": 0.3184, + "step": 4 + }, + { + "epoch": 0.020366598778004074, + "grad_norm": 14.809270827070751, + "learning_rate": 6.756756756756758e-07, + "loss": 0.3138, + "step": 5 + }, + { + "epoch": 0.024439918533604887, + "grad_norm": 12.082295065079254, + "learning_rate": 8.108108108108109e-07, + "loss": 0.2982, + "step": 6 + }, + { + "epoch": 0.028513238289205704, + "grad_norm": 8.711027825602685, + "learning_rate": 9.459459459459461e-07, + "loss": 0.3063, + "step": 7 + }, + { + "epoch": 0.032586558044806514, + "grad_norm": 7.652261107879573, + "learning_rate": 1.0810810810810812e-06, + "loss": 0.2749, + "step": 8 + }, + { + "epoch": 0.03665987780040733, + "grad_norm": 7.726905307446712, + "learning_rate": 1.2162162162162164e-06, + "loss": 0.2718, + "step": 9 + }, + { + "epoch": 0.04073319755600815, + "grad_norm": 3.4676017287243446, + "learning_rate": 1.3513513513513515e-06, + "loss": 0.2238, + "step": 10 + }, + { + "epoch": 0.04480651731160896, + "grad_norm": 4.29492237739491, + "learning_rate": 1.4864864864864868e-06, + "loss": 0.2513, + "step": 11 + }, + { + "epoch": 0.048879837067209775, + "grad_norm": 3.5844077505877707, + "learning_rate": 1.6216216216216219e-06, + "loss": 0.2327, + "step": 12 + }, + { + "epoch": 0.05295315682281059, + "grad_norm": 7.031113851112456, + "learning_rate": 1.756756756756757e-06, + "loss": 0.239, + "step": 13 + }, + { + "epoch": 0.05702647657841141, + "grad_norm": 7.210450292846676, + "learning_rate": 1.8918918918918922e-06, + "loss": 0.277, + "step": 14 + }, + { + "epoch": 0.06109979633401222, + "grad_norm": 6.526062178388152, + "learning_rate": 2.0270270270270273e-06, + "loss": 0.2495, + "step": 15 + }, + { + "epoch": 0.06517311608961303, + "grad_norm": 5.157530413977274, + "learning_rate": 2.1621621621621623e-06, + "loss": 0.2425, + "step": 16 + }, + { + "epoch": 0.06924643584521385, + "grad_norm": 3.242500698401516, + "learning_rate": 2.297297297297298e-06, + "loss": 0.1985, + "step": 17 + }, + { + "epoch": 0.07331975560081466, + "grad_norm": 2.554097363562634, + "learning_rate": 2.432432432432433e-06, + "loss": 0.1822, + "step": 18 + }, + { + "epoch": 0.07739307535641547, + "grad_norm": 2.6535341941648047, + "learning_rate": 2.5675675675675675e-06, + "loss": 0.2252, + "step": 19 + }, + { + "epoch": 0.0814663951120163, + "grad_norm": 2.5516965886789076, + "learning_rate": 2.702702702702703e-06, + "loss": 0.1862, + "step": 20 + }, + { + "epoch": 0.0855397148676171, + "grad_norm": 2.6017363031504264, + "learning_rate": 2.837837837837838e-06, + "loss": 0.2098, + "step": 21 + }, + { + "epoch": 0.08961303462321792, + "grad_norm": 2.0129035331912184, + "learning_rate": 2.9729729729729736e-06, + "loss": 0.1699, + "step": 22 + }, + { + "epoch": 0.09368635437881874, + "grad_norm": 1.849938821231628, + "learning_rate": 3.1081081081081082e-06, + "loss": 0.1868, + "step": 23 + }, + { + "epoch": 0.09775967413441955, + "grad_norm": 1.764863585345639, + "learning_rate": 3.2432432432432437e-06, + "loss": 0.1669, + "step": 24 + }, + { + "epoch": 0.10183299389002037, + "grad_norm": 1.8494325730642949, + "learning_rate": 3.3783783783783788e-06, + "loss": 0.1507, + "step": 25 + }, + { + "epoch": 0.10590631364562118, + "grad_norm": 1.6304914383856781, + "learning_rate": 3.513513513513514e-06, + "loss": 0.1534, + "step": 26 + }, + { + "epoch": 0.109979633401222, + "grad_norm": 1.4579913516119778, + "learning_rate": 3.648648648648649e-06, + "loss": 0.1387, + "step": 27 + }, + { + "epoch": 0.11405295315682282, + "grad_norm": 1.6082049119577289, + "learning_rate": 3.7837837837837844e-06, + "loss": 0.1406, + "step": 28 + }, + { + "epoch": 0.11812627291242363, + "grad_norm": 1.4819461102449234, + "learning_rate": 3.918918918918919e-06, + "loss": 0.1323, + "step": 29 + }, + { + "epoch": 0.12219959266802444, + "grad_norm": 1.7009165157092279, + "learning_rate": 4.0540540540540545e-06, + "loss": 0.1577, + "step": 30 + }, + { + "epoch": 0.12627291242362526, + "grad_norm": 1.5721048343370418, + "learning_rate": 4.189189189189189e-06, + "loss": 0.1345, + "step": 31 + }, + { + "epoch": 0.13034623217922606, + "grad_norm": 1.5868902144082508, + "learning_rate": 4.324324324324325e-06, + "loss": 0.1641, + "step": 32 + }, + { + "epoch": 0.13441955193482688, + "grad_norm": 1.367409491711825, + "learning_rate": 4.45945945945946e-06, + "loss": 0.1568, + "step": 33 + }, + { + "epoch": 0.1384928716904277, + "grad_norm": 1.2082341226617432, + "learning_rate": 4.594594594594596e-06, + "loss": 0.1158, + "step": 34 + }, + { + "epoch": 0.1425661914460285, + "grad_norm": 1.2834846670425744, + "learning_rate": 4.72972972972973e-06, + "loss": 0.1553, + "step": 35 + }, + { + "epoch": 0.14663951120162932, + "grad_norm": 1.4278045526468992, + "learning_rate": 4.864864864864866e-06, + "loss": 0.1472, + "step": 36 + }, + { + "epoch": 0.15071283095723015, + "grad_norm": 1.1433863309324082, + "learning_rate": 5e-06, + "loss": 0.1216, + "step": 37 + }, + { + "epoch": 0.15478615071283094, + "grad_norm": 1.2556861775151085, + "learning_rate": 5.135135135135135e-06, + "loss": 0.1383, + "step": 38 + }, + { + "epoch": 0.15885947046843177, + "grad_norm": 1.1940515610624718, + "learning_rate": 5.2702702702702705e-06, + "loss": 0.1488, + "step": 39 + }, + { + "epoch": 0.1629327902240326, + "grad_norm": 1.365361196469323, + "learning_rate": 5.405405405405406e-06, + "loss": 0.1658, + "step": 40 + }, + { + "epoch": 0.1670061099796334, + "grad_norm": 1.6375597676471598, + "learning_rate": 5.540540540540541e-06, + "loss": 0.1244, + "step": 41 + }, + { + "epoch": 0.1710794297352342, + "grad_norm": 1.058410205986207, + "learning_rate": 5.675675675675676e-06, + "loss": 0.1129, + "step": 42 + }, + { + "epoch": 0.17515274949083504, + "grad_norm": 1.468616447672182, + "learning_rate": 5.810810810810811e-06, + "loss": 0.176, + "step": 43 + }, + { + "epoch": 0.17922606924643583, + "grad_norm": 1.1292066998688302, + "learning_rate": 5.945945945945947e-06, + "loss": 0.1235, + "step": 44 + }, + { + "epoch": 0.18329938900203666, + "grad_norm": 1.1790440780653373, + "learning_rate": 6.081081081081082e-06, + "loss": 0.1352, + "step": 45 + }, + { + "epoch": 0.18737270875763748, + "grad_norm": 1.144770740193701, + "learning_rate": 6.2162162162162164e-06, + "loss": 0.1375, + "step": 46 + }, + { + "epoch": 0.19144602851323828, + "grad_norm": 1.3169675540020822, + "learning_rate": 6.351351351351351e-06, + "loss": 0.1451, + "step": 47 + }, + { + "epoch": 0.1955193482688391, + "grad_norm": 1.1364743430386761, + "learning_rate": 6.486486486486487e-06, + "loss": 0.1073, + "step": 48 + }, + { + "epoch": 0.19959266802443992, + "grad_norm": 1.3532964160734307, + "learning_rate": 6.621621621621622e-06, + "loss": 0.1502, + "step": 49 + }, + { + "epoch": 0.20366598778004075, + "grad_norm": 1.1049371458723167, + "learning_rate": 6.7567567567567575e-06, + "loss": 0.116, + "step": 50 + }, + { + "epoch": 0.20773930753564154, + "grad_norm": 1.0634720045604809, + "learning_rate": 6.891891891891892e-06, + "loss": 0.1438, + "step": 51 + }, + { + "epoch": 0.21181262729124237, + "grad_norm": 1.1677623232682453, + "learning_rate": 7.027027027027028e-06, + "loss": 0.1143, + "step": 52 + }, + { + "epoch": 0.2158859470468432, + "grad_norm": 1.2552603959178812, + "learning_rate": 7.162162162162163e-06, + "loss": 0.1443, + "step": 53 + }, + { + "epoch": 0.219959266802444, + "grad_norm": 1.1556616828254782, + "learning_rate": 7.297297297297298e-06, + "loss": 0.1341, + "step": 54 + }, + { + "epoch": 0.2240325865580448, + "grad_norm": 1.1241236805182522, + "learning_rate": 7.4324324324324324e-06, + "loss": 0.1283, + "step": 55 + }, + { + "epoch": 0.22810590631364563, + "grad_norm": 0.9867741809756463, + "learning_rate": 7.567567567567569e-06, + "loss": 0.1302, + "step": 56 + }, + { + "epoch": 0.23217922606924643, + "grad_norm": 1.0672327000495885, + "learning_rate": 7.702702702702704e-06, + "loss": 0.113, + "step": 57 + }, + { + "epoch": 0.23625254582484725, + "grad_norm": 1.0659735135074857, + "learning_rate": 7.837837837837838e-06, + "loss": 0.1293, + "step": 58 + }, + { + "epoch": 0.24032586558044808, + "grad_norm": 1.2422197356017706, + "learning_rate": 7.972972972972974e-06, + "loss": 0.164, + "step": 59 + }, + { + "epoch": 0.24439918533604887, + "grad_norm": 1.3538609671806645, + "learning_rate": 8.108108108108109e-06, + "loss": 0.1548, + "step": 60 + }, + { + "epoch": 0.2484725050916497, + "grad_norm": 1.0759558101958346, + "learning_rate": 8.243243243243245e-06, + "loss": 0.1225, + "step": 61 + }, + { + "epoch": 0.2525458248472505, + "grad_norm": 1.1244956381449198, + "learning_rate": 8.378378378378378e-06, + "loss": 0.1175, + "step": 62 + }, + { + "epoch": 0.25661914460285135, + "grad_norm": 1.171629685706723, + "learning_rate": 8.513513513513514e-06, + "loss": 0.1204, + "step": 63 + }, + { + "epoch": 0.2606924643584521, + "grad_norm": 1.2905585681894916, + "learning_rate": 8.64864864864865e-06, + "loss": 0.1253, + "step": 64 + }, + { + "epoch": 0.26476578411405294, + "grad_norm": 1.3979008428570314, + "learning_rate": 8.783783783783785e-06, + "loss": 0.191, + "step": 65 + }, + { + "epoch": 0.26883910386965376, + "grad_norm": 1.226756333773235, + "learning_rate": 8.91891891891892e-06, + "loss": 0.1287, + "step": 66 + }, + { + "epoch": 0.2729124236252546, + "grad_norm": 1.2835470528218054, + "learning_rate": 9.054054054054054e-06, + "loss": 0.138, + "step": 67 + }, + { + "epoch": 0.2769857433808554, + "grad_norm": 1.1622195270679896, + "learning_rate": 9.189189189189191e-06, + "loss": 0.1259, + "step": 68 + }, + { + "epoch": 0.28105906313645623, + "grad_norm": 1.1512666578576678, + "learning_rate": 9.324324324324325e-06, + "loss": 0.1292, + "step": 69 + }, + { + "epoch": 0.285132382892057, + "grad_norm": 0.9695391815507838, + "learning_rate": 9.45945945945946e-06, + "loss": 0.1142, + "step": 70 + }, + { + "epoch": 0.2892057026476578, + "grad_norm": 1.1262409828408337, + "learning_rate": 9.594594594594594e-06, + "loss": 0.1188, + "step": 71 + }, + { + "epoch": 0.29327902240325865, + "grad_norm": 0.9820966211674147, + "learning_rate": 9.729729729729732e-06, + "loss": 0.1052, + "step": 72 + }, + { + "epoch": 0.2973523421588595, + "grad_norm": 1.1058230077470572, + "learning_rate": 9.864864864864865e-06, + "loss": 0.1246, + "step": 73 + }, + { + "epoch": 0.3014256619144603, + "grad_norm": 1.3891942844370528, + "learning_rate": 1e-05, + "loss": 0.1651, + "step": 74 + }, + { + "epoch": 0.3054989816700611, + "grad_norm": 1.1373599847305171, + "learning_rate": 9.99994352762958e-06, + "loss": 0.1259, + "step": 75 + }, + { + "epoch": 0.3095723014256619, + "grad_norm": 1.0803757941511039, + "learning_rate": 9.999774111793974e-06, + "loss": 0.1485, + "step": 76 + }, + { + "epoch": 0.3136456211812627, + "grad_norm": 1.509987566205336, + "learning_rate": 9.999491756320105e-06, + "loss": 0.1708, + "step": 77 + }, + { + "epoch": 0.31771894093686354, + "grad_norm": 1.3769318827034491, + "learning_rate": 9.99909646758609e-06, + "loss": 0.1483, + "step": 78 + }, + { + "epoch": 0.32179226069246436, + "grad_norm": 0.9995516357476201, + "learning_rate": 9.99858825452108e-06, + "loss": 0.1124, + "step": 79 + }, + { + "epoch": 0.3258655804480652, + "grad_norm": 1.4328593788226842, + "learning_rate": 9.997967128605078e-06, + "loss": 0.1849, + "step": 80 + }, + { + "epoch": 0.329938900203666, + "grad_norm": 1.0397129864144867, + "learning_rate": 9.997233103868664e-06, + "loss": 0.1199, + "step": 81 + }, + { + "epoch": 0.3340122199592668, + "grad_norm": 1.3312975796955133, + "learning_rate": 9.996386196892683e-06, + "loss": 0.1748, + "step": 82 + }, + { + "epoch": 0.3380855397148676, + "grad_norm": 1.2070448028045222, + "learning_rate": 9.995426426807875e-06, + "loss": 0.1449, + "step": 83 + }, + { + "epoch": 0.3421588594704684, + "grad_norm": 0.9786604342473315, + "learning_rate": 9.994353815294438e-06, + "loss": 0.1349, + "step": 84 + }, + { + "epoch": 0.34623217922606925, + "grad_norm": 1.16279378070579, + "learning_rate": 9.993168386581533e-06, + "loss": 0.1111, + "step": 85 + }, + { + "epoch": 0.35030549898167007, + "grad_norm": 1.0832386326974766, + "learning_rate": 9.991870167446751e-06, + "loss": 0.1271, + "step": 86 + }, + { + "epoch": 0.3543788187372709, + "grad_norm": 1.076044536856832, + "learning_rate": 9.990459187215498e-06, + "loss": 0.122, + "step": 87 + }, + { + "epoch": 0.35845213849287166, + "grad_norm": 1.1390626595350608, + "learning_rate": 9.98893547776033e-06, + "loss": 0.1429, + "step": 88 + }, + { + "epoch": 0.3625254582484725, + "grad_norm": 1.2799324833393828, + "learning_rate": 9.987299073500245e-06, + "loss": 0.1789, + "step": 89 + }, + { + "epoch": 0.3665987780040733, + "grad_norm": 1.0088789278468007, + "learning_rate": 9.985550011399889e-06, + "loss": 0.1217, + "step": 90 + }, + { + "epoch": 0.37067209775967414, + "grad_norm": 1.0635380396962304, + "learning_rate": 9.98368833096874e-06, + "loss": 0.1517, + "step": 91 + }, + { + "epoch": 0.37474541751527496, + "grad_norm": 1.1149195586496816, + "learning_rate": 9.981714074260196e-06, + "loss": 0.1648, + "step": 92 + }, + { + "epoch": 0.3788187372708758, + "grad_norm": 0.9770064004740078, + "learning_rate": 9.979627285870644e-06, + "loss": 0.1173, + "step": 93 + }, + { + "epoch": 0.38289205702647655, + "grad_norm": 1.5786545324573935, + "learning_rate": 9.977428012938437e-06, + "loss": 0.2148, + "step": 94 + }, + { + "epoch": 0.3869653767820774, + "grad_norm": 0.9445672697637628, + "learning_rate": 9.975116305142836e-06, + "loss": 0.1272, + "step": 95 + }, + { + "epoch": 0.3910386965376782, + "grad_norm": 0.832092882135511, + "learning_rate": 9.97269221470289e-06, + "loss": 0.1149, + "step": 96 + }, + { + "epoch": 0.395112016293279, + "grad_norm": 0.8009975217381654, + "learning_rate": 9.97015579637625e-06, + "loss": 0.1081, + "step": 97 + }, + { + "epoch": 0.39918533604887985, + "grad_norm": 0.909000272396086, + "learning_rate": 9.967507107457942e-06, + "loss": 0.1249, + "step": 98 + }, + { + "epoch": 0.40325865580448067, + "grad_norm": 0.9894702747295367, + "learning_rate": 9.96474620777906e-06, + "loss": 0.1404, + "step": 99 + }, + { + "epoch": 0.4073319755600815, + "grad_norm": 1.1517905886733883, + "learning_rate": 9.961873159705426e-06, + "loss": 0.1433, + "step": 100 + }, + { + "epoch": 0.41140529531568226, + "grad_norm": 1.2806427058824508, + "learning_rate": 9.95888802813617e-06, + "loss": 0.1723, + "step": 101 + }, + { + "epoch": 0.4154786150712831, + "grad_norm": 0.919332585767889, + "learning_rate": 9.955790880502278e-06, + "loss": 0.1219, + "step": 102 + }, + { + "epoch": 0.4195519348268839, + "grad_norm": 0.8901964293186232, + "learning_rate": 9.952581786765057e-06, + "loss": 0.1157, + "step": 103 + }, + { + "epoch": 0.42362525458248473, + "grad_norm": 1.3877972822654616, + "learning_rate": 9.949260819414557e-06, + "loss": 0.1642, + "step": 104 + }, + { + "epoch": 0.42769857433808556, + "grad_norm": 0.9602184939318458, + "learning_rate": 9.945828053467939e-06, + "loss": 0.1224, + "step": 105 + }, + { + "epoch": 0.4317718940936864, + "grad_norm": 1.230791876608231, + "learning_rate": 9.942283566467773e-06, + "loss": 0.1596, + "step": 106 + }, + { + "epoch": 0.43584521384928715, + "grad_norm": 1.1454248942159495, + "learning_rate": 9.938627438480295e-06, + "loss": 0.1541, + "step": 107 + }, + { + "epoch": 0.439918533604888, + "grad_norm": 1.0873300186194603, + "learning_rate": 9.93485975209359e-06, + "loss": 0.1533, + "step": 108 + }, + { + "epoch": 0.4439918533604888, + "grad_norm": 0.9668569607934798, + "learning_rate": 9.930980592415728e-06, + "loss": 0.1539, + "step": 109 + }, + { + "epoch": 0.4480651731160896, + "grad_norm": 1.487429443095859, + "learning_rate": 9.926990047072849e-06, + "loss": 0.2379, + "step": 110 + }, + { + "epoch": 0.45213849287169044, + "grad_norm": 1.036501582869458, + "learning_rate": 9.922888206207174e-06, + "loss": 0.1181, + "step": 111 + }, + { + "epoch": 0.45621181262729127, + "grad_norm": 0.9427386345315173, + "learning_rate": 9.918675162474974e-06, + "loss": 0.1157, + "step": 112 + }, + { + "epoch": 0.46028513238289204, + "grad_norm": 1.1671785006625848, + "learning_rate": 9.914351011044472e-06, + "loss": 0.1671, + "step": 113 + }, + { + "epoch": 0.46435845213849286, + "grad_norm": 0.8485104800209154, + "learning_rate": 9.909915849593705e-06, + "loss": 0.1094, + "step": 114 + }, + { + "epoch": 0.4684317718940937, + "grad_norm": 0.895507646361391, + "learning_rate": 9.905369778308304e-06, + "loss": 0.1205, + "step": 115 + }, + { + "epoch": 0.4725050916496945, + "grad_norm": 1.1024237478073182, + "learning_rate": 9.900712899879237e-06, + "loss": 0.1551, + "step": 116 + }, + { + "epoch": 0.47657841140529533, + "grad_norm": 1.0811464118865846, + "learning_rate": 9.895945319500488e-06, + "loss": 0.1402, + "step": 117 + }, + { + "epoch": 0.48065173116089616, + "grad_norm": 0.9829410685047446, + "learning_rate": 9.891067144866687e-06, + "loss": 0.1381, + "step": 118 + }, + { + "epoch": 0.4847250509164969, + "grad_norm": 0.8855824729064482, + "learning_rate": 9.886078486170665e-06, + "loss": 0.1038, + "step": 119 + }, + { + "epoch": 0.48879837067209775, + "grad_norm": 1.1091690462920576, + "learning_rate": 9.880979456100974e-06, + "loss": 0.1372, + "step": 120 + }, + { + "epoch": 0.49287169042769857, + "grad_norm": 0.907049897730717, + "learning_rate": 9.875770169839343e-06, + "loss": 0.1322, + "step": 121 + }, + { + "epoch": 0.4969450101832994, + "grad_norm": 1.0224824312976686, + "learning_rate": 9.870450745058066e-06, + "loss": 0.1257, + "step": 122 + }, + { + "epoch": 0.5010183299389002, + "grad_norm": 1.0439109698157967, + "learning_rate": 9.865021301917358e-06, + "loss": 0.1317, + "step": 123 + }, + { + "epoch": 0.505091649694501, + "grad_norm": 0.8972366065592501, + "learning_rate": 9.859481963062623e-06, + "loss": 0.1104, + "step": 124 + }, + { + "epoch": 0.5091649694501018, + "grad_norm": 0.916952485621608, + "learning_rate": 9.853832853621703e-06, + "loss": 0.124, + "step": 125 + }, + { + "epoch": 0.5132382892057027, + "grad_norm": 0.7586835858660547, + "learning_rate": 9.848074101202037e-06, + "loss": 0.1191, + "step": 126 + }, + { + "epoch": 0.5173116089613035, + "grad_norm": 0.9149593226270635, + "learning_rate": 9.842205835887785e-06, + "loss": 0.1188, + "step": 127 + }, + { + "epoch": 0.5213849287169042, + "grad_norm": 0.9483144871900878, + "learning_rate": 9.836228190236892e-06, + "loss": 0.1392, + "step": 128 + }, + { + "epoch": 0.5254582484725051, + "grad_norm": 1.1137009286811568, + "learning_rate": 9.83014129927808e-06, + "loss": 0.1331, + "step": 129 + }, + { + "epoch": 0.5295315682281059, + "grad_norm": 1.0049886812823983, + "learning_rate": 9.823945300507815e-06, + "loss": 0.1393, + "step": 130 + }, + { + "epoch": 0.5336048879837068, + "grad_norm": 1.0017821694016227, + "learning_rate": 9.817640333887194e-06, + "loss": 0.1376, + "step": 131 + }, + { + "epoch": 0.5376782077393075, + "grad_norm": 0.8770993451067021, + "learning_rate": 9.81122654183878e-06, + "loss": 0.1075, + "step": 132 + }, + { + "epoch": 0.5417515274949084, + "grad_norm": 0.8112662923925413, + "learning_rate": 9.804704069243389e-06, + "loss": 0.1149, + "step": 133 + }, + { + "epoch": 0.5458248472505092, + "grad_norm": 0.7783508225595258, + "learning_rate": 9.798073063436815e-06, + "loss": 0.1077, + "step": 134 + }, + { + "epoch": 0.5498981670061099, + "grad_norm": 1.6671316247114485, + "learning_rate": 9.791333674206507e-06, + "loss": 0.1892, + "step": 135 + }, + { + "epoch": 0.5539714867617108, + "grad_norm": 0.8856245620297392, + "learning_rate": 9.784486053788179e-06, + "loss": 0.1075, + "step": 136 + }, + { + "epoch": 0.5580448065173116, + "grad_norm": 2.0578900491298824, + "learning_rate": 9.77753035686237e-06, + "loss": 0.1472, + "step": 137 + }, + { + "epoch": 0.5621181262729125, + "grad_norm": 1.148525636808097, + "learning_rate": 9.770466740550963e-06, + "loss": 0.1598, + "step": 138 + }, + { + "epoch": 0.5661914460285132, + "grad_norm": 0.8665254831769179, + "learning_rate": 9.763295364413616e-06, + "loss": 0.1186, + "step": 139 + }, + { + "epoch": 0.570264765784114, + "grad_norm": 1.0970826186220186, + "learning_rate": 9.756016390444174e-06, + "loss": 0.1386, + "step": 140 + }, + { + "epoch": 0.5743380855397149, + "grad_norm": 0.9530034310899396, + "learning_rate": 9.748629983067004e-06, + "loss": 0.1282, + "step": 141 + }, + { + "epoch": 0.5784114052953157, + "grad_norm": 1.2706893271757027, + "learning_rate": 9.741136309133279e-06, + "loss": 0.1754, + "step": 142 + }, + { + "epoch": 0.5824847250509165, + "grad_norm": 0.9703463762849697, + "learning_rate": 9.733535537917211e-06, + "loss": 0.1194, + "step": 143 + }, + { + "epoch": 0.5865580448065173, + "grad_norm": 0.8038414888371753, + "learning_rate": 9.725827841112226e-06, + "loss": 0.1162, + "step": 144 + }, + { + "epoch": 0.5906313645621182, + "grad_norm": 0.9411283645508486, + "learning_rate": 9.718013392827087e-06, + "loss": 0.1121, + "step": 145 + }, + { + "epoch": 0.594704684317719, + "grad_norm": 1.501666156048829, + "learning_rate": 9.710092369581966e-06, + "loss": 0.16, + "step": 146 + }, + { + "epoch": 0.5987780040733197, + "grad_norm": 0.9141719119872903, + "learning_rate": 9.702064950304442e-06, + "loss": 0.1211, + "step": 147 + }, + { + "epoch": 0.6028513238289206, + "grad_norm": 0.8652675727574004, + "learning_rate": 9.693931316325473e-06, + "loss": 0.0946, + "step": 148 + }, + { + "epoch": 0.6069246435845214, + "grad_norm": 0.7377787499846402, + "learning_rate": 9.685691651375297e-06, + "loss": 0.1016, + "step": 149 + }, + { + "epoch": 0.6109979633401222, + "grad_norm": 0.7630312206018969, + "learning_rate": 9.677346141579277e-06, + "loss": 0.1014, + "step": 150 + }, + { + "epoch": 0.615071283095723, + "grad_norm": 0.9718289359974593, + "learning_rate": 9.668894975453705e-06, + "loss": 0.1562, + "step": 151 + }, + { + "epoch": 0.6191446028513238, + "grad_norm": 1.004301729468449, + "learning_rate": 9.66033834390153e-06, + "loss": 0.1372, + "step": 152 + }, + { + "epoch": 0.6232179226069247, + "grad_norm": 0.9350824611493259, + "learning_rate": 9.65167644020806e-06, + "loss": 0.1254, + "step": 153 + }, + { + "epoch": 0.6272912423625254, + "grad_norm": 0.7612329276402703, + "learning_rate": 9.64290946003659e-06, + "loss": 0.0989, + "step": 154 + }, + { + "epoch": 0.6313645621181263, + "grad_norm": 0.7706614538086551, + "learning_rate": 9.63403760142398e-06, + "loss": 0.1013, + "step": 155 + }, + { + "epoch": 0.6354378818737271, + "grad_norm": 1.0210499034582712, + "learning_rate": 9.625061064776183e-06, + "loss": 0.1134, + "step": 156 + }, + { + "epoch": 0.639511201629328, + "grad_norm": 0.7560805642981956, + "learning_rate": 9.61598005286372e-06, + "loss": 0.0939, + "step": 157 + }, + { + "epoch": 0.6435845213849287, + "grad_norm": 1.0834289937869723, + "learning_rate": 9.606794770817102e-06, + "loss": 0.1785, + "step": 158 + }, + { + "epoch": 0.6476578411405295, + "grad_norm": 1.0611196002268826, + "learning_rate": 9.597505426122184e-06, + "loss": 0.1571, + "step": 159 + }, + { + "epoch": 0.6517311608961304, + "grad_norm": 1.0914261737532949, + "learning_rate": 9.588112228615495e-06, + "loss": 0.1745, + "step": 160 + }, + { + "epoch": 0.6558044806517311, + "grad_norm": 0.953948451978483, + "learning_rate": 9.57861539047949e-06, + "loss": 0.1353, + "step": 161 + }, + { + "epoch": 0.659877800407332, + "grad_norm": 1.2562247665468482, + "learning_rate": 9.569015126237744e-06, + "loss": 0.1521, + "step": 162 + }, + { + "epoch": 0.6639511201629328, + "grad_norm": 0.8283783602425362, + "learning_rate": 9.559311652750135e-06, + "loss": 0.1161, + "step": 163 + }, + { + "epoch": 0.6680244399185336, + "grad_norm": 0.7823509791751794, + "learning_rate": 9.549505189207924e-06, + "loss": 0.0976, + "step": 164 + }, + { + "epoch": 0.6720977596741344, + "grad_norm": 1.118258806444578, + "learning_rate": 9.539595957128803e-06, + "loss": 0.171, + "step": 165 + }, + { + "epoch": 0.6761710794297352, + "grad_norm": 0.7563799438807557, + "learning_rate": 9.529584180351902e-06, + "loss": 0.1159, + "step": 166 + }, + { + "epoch": 0.6802443991853361, + "grad_norm": 1.0059732424782886, + "learning_rate": 9.519470085032733e-06, + "loss": 0.1278, + "step": 167 + }, + { + "epoch": 0.6843177189409368, + "grad_norm": 0.8261325503708756, + "learning_rate": 9.509253899638066e-06, + "loss": 0.104, + "step": 168 + }, + { + "epoch": 0.6883910386965377, + "grad_norm": 1.1918252125330613, + "learning_rate": 9.498935854940785e-06, + "loss": 0.1682, + "step": 169 + }, + { + "epoch": 0.6924643584521385, + "grad_norm": 0.7216709177105455, + "learning_rate": 9.488516184014667e-06, + "loss": 0.1089, + "step": 170 + }, + { + "epoch": 0.6965376782077393, + "grad_norm": 0.8952054280934858, + "learning_rate": 9.477995122229117e-06, + "loss": 0.1521, + "step": 171 + }, + { + "epoch": 0.7006109979633401, + "grad_norm": 0.6538828419017942, + "learning_rate": 9.467372907243858e-06, + "loss": 0.1012, + "step": 172 + }, + { + "epoch": 0.7046843177189409, + "grad_norm": 0.840723056036209, + "learning_rate": 9.456649779003548e-06, + "loss": 0.117, + "step": 173 + }, + { + "epoch": 0.7087576374745418, + "grad_norm": 0.7652580794490056, + "learning_rate": 9.44582597973238e-06, + "loss": 0.1284, + "step": 174 + }, + { + "epoch": 0.7128309572301426, + "grad_norm": 0.9696904154678632, + "learning_rate": 9.434901753928593e-06, + "loss": 0.1429, + "step": 175 + }, + { + "epoch": 0.7169042769857433, + "grad_norm": 0.7509027450046076, + "learning_rate": 9.423877348358956e-06, + "loss": 0.1006, + "step": 176 + }, + { + "epoch": 0.7209775967413442, + "grad_norm": 0.6942112976471692, + "learning_rate": 9.4127530120532e-06, + "loss": 0.1042, + "step": 177 + }, + { + "epoch": 0.725050916496945, + "grad_norm": 1.4641902043350905, + "learning_rate": 9.401528996298375e-06, + "loss": 0.1676, + "step": 178 + }, + { + "epoch": 0.7291242362525459, + "grad_norm": 0.7418396518869238, + "learning_rate": 9.390205554633193e-06, + "loss": 0.1082, + "step": 179 + }, + { + "epoch": 0.7331975560081466, + "grad_norm": 1.2074617530849705, + "learning_rate": 9.378782942842292e-06, + "loss": 0.1401, + "step": 180 + }, + { + "epoch": 0.7372708757637475, + "grad_norm": 1.2938802390610347, + "learning_rate": 9.367261418950459e-06, + "loss": 0.1855, + "step": 181 + }, + { + "epoch": 0.7413441955193483, + "grad_norm": 1.225757248706894, + "learning_rate": 9.355641243216798e-06, + "loss": 0.1729, + "step": 182 + }, + { + "epoch": 0.745417515274949, + "grad_norm": 1.1483380054973364, + "learning_rate": 9.343922678128854e-06, + "loss": 0.1078, + "step": 183 + }, + { + "epoch": 0.7494908350305499, + "grad_norm": 0.8222440765781929, + "learning_rate": 9.332105988396692e-06, + "loss": 0.1239, + "step": 184 + }, + { + "epoch": 0.7535641547861507, + "grad_norm": 0.9655962832595171, + "learning_rate": 9.3201914409469e-06, + "loss": 0.1309, + "step": 185 + }, + { + "epoch": 0.7576374745417516, + "grad_norm": 0.8060791719318856, + "learning_rate": 9.308179304916573e-06, + "loss": 0.1159, + "step": 186 + }, + { + "epoch": 0.7617107942973523, + "grad_norm": 0.7357782726661909, + "learning_rate": 9.29606985164723e-06, + "loss": 0.1052, + "step": 187 + }, + { + "epoch": 0.7657841140529531, + "grad_norm": 0.9536045205176826, + "learning_rate": 9.283863354678683e-06, + "loss": 0.1351, + "step": 188 + }, + { + "epoch": 0.769857433808554, + "grad_norm": 0.8771938059672718, + "learning_rate": 9.27156008974286e-06, + "loss": 0.1304, + "step": 189 + }, + { + "epoch": 0.7739307535641547, + "grad_norm": 0.7232888469506753, + "learning_rate": 9.259160334757575e-06, + "loss": 0.1054, + "step": 190 + }, + { + "epoch": 0.7780040733197556, + "grad_norm": 0.8295211262810136, + "learning_rate": 9.246664369820249e-06, + "loss": 0.1323, + "step": 191 + }, + { + "epoch": 0.7820773930753564, + "grad_norm": 1.546126242212441, + "learning_rate": 9.234072477201588e-06, + "loss": 0.2385, + "step": 192 + }, + { + "epoch": 0.7861507128309573, + "grad_norm": 1.3189210288828541, + "learning_rate": 9.2213849413392e-06, + "loss": 0.1312, + "step": 193 + }, + { + "epoch": 0.790224032586558, + "grad_norm": 0.6640416710388396, + "learning_rate": 9.208602048831176e-06, + "loss": 0.1032, + "step": 194 + }, + { + "epoch": 0.7942973523421588, + "grad_norm": 0.7975892776697048, + "learning_rate": 9.195724088429611e-06, + "loss": 0.1089, + "step": 195 + }, + { + "epoch": 0.7983706720977597, + "grad_norm": 0.706905690575772, + "learning_rate": 9.18275135103409e-06, + "loss": 0.1166, + "step": 196 + }, + { + "epoch": 0.8024439918533605, + "grad_norm": 0.8769448196441653, + "learning_rate": 9.169684129685099e-06, + "loss": 0.1317, + "step": 197 + }, + { + "epoch": 0.8065173116089613, + "grad_norm": 1.3681899543939136, + "learning_rate": 9.156522719557428e-06, + "loss": 0.1892, + "step": 198 + }, + { + "epoch": 0.8105906313645621, + "grad_norm": 1.0165895452906009, + "learning_rate": 9.143267417953486e-06, + "loss": 0.1526, + "step": 199 + }, + { + "epoch": 0.814663951120163, + "grad_norm": 0.9252869599364745, + "learning_rate": 9.129918524296596e-06, + "loss": 0.1791, + "step": 200 + }, + { + "epoch": 0.8187372708757638, + "grad_norm": 0.7566289195807724, + "learning_rate": 9.11647634012422e-06, + "loss": 0.1018, + "step": 201 + }, + { + "epoch": 0.8228105906313645, + "grad_norm": 0.7097020344942068, + "learning_rate": 9.102941169081167e-06, + "loss": 0.1174, + "step": 202 + }, + { + "epoch": 0.8268839103869654, + "grad_norm": 0.8335131746923946, + "learning_rate": 9.089313316912708e-06, + "loss": 0.14, + "step": 203 + }, + { + "epoch": 0.8309572301425662, + "grad_norm": 0.7934600650652943, + "learning_rate": 9.075593091457692e-06, + "loss": 0.1208, + "step": 204 + }, + { + "epoch": 0.835030549898167, + "grad_norm": 0.7614374059129773, + "learning_rate": 9.061780802641582e-06, + "loss": 0.1166, + "step": 205 + }, + { + "epoch": 0.8391038696537678, + "grad_norm": 0.7158974362347166, + "learning_rate": 9.047876762469451e-06, + "loss": 0.1046, + "step": 206 + }, + { + "epoch": 0.8431771894093686, + "grad_norm": 0.676023527010282, + "learning_rate": 9.033881285018945e-06, + "loss": 0.1049, + "step": 207 + }, + { + "epoch": 0.8472505091649695, + "grad_norm": 1.0542817712970116, + "learning_rate": 9.019794686433174e-06, + "loss": 0.1605, + "step": 208 + }, + { + "epoch": 0.8513238289205702, + "grad_norm": 0.791238316768574, + "learning_rate": 9.005617284913586e-06, + "loss": 0.1008, + "step": 209 + }, + { + "epoch": 0.8553971486761711, + "grad_norm": 1.3679274286147247, + "learning_rate": 8.991349400712772e-06, + "loss": 0.1174, + "step": 210 + }, + { + "epoch": 0.8594704684317719, + "grad_norm": 0.8904165376343479, + "learning_rate": 8.976991356127225e-06, + "loss": 0.1252, + "step": 211 + }, + { + "epoch": 0.8635437881873728, + "grad_norm": 0.6365058101639782, + "learning_rate": 8.962543475490068e-06, + "loss": 0.1054, + "step": 212 + }, + { + "epoch": 0.8676171079429735, + "grad_norm": 0.6899915324730952, + "learning_rate": 8.948006085163735e-06, + "loss": 0.1059, + "step": 213 + }, + { + "epoch": 0.8716904276985743, + "grad_norm": 0.7033665303348221, + "learning_rate": 8.933379513532575e-06, + "loss": 0.1055, + "step": 214 + }, + { + "epoch": 0.8757637474541752, + "grad_norm": 0.7051229848942461, + "learning_rate": 8.91866409099546e-06, + "loss": 0.1047, + "step": 215 + }, + { + "epoch": 0.879837067209776, + "grad_norm": 0.7365152922519815, + "learning_rate": 8.903860149958308e-06, + "loss": 0.1028, + "step": 216 + }, + { + "epoch": 0.8839103869653768, + "grad_norm": 0.8798834115379963, + "learning_rate": 8.888968024826575e-06, + "loss": 0.131, + "step": 217 + }, + { + "epoch": 0.8879837067209776, + "grad_norm": 0.8127281754244611, + "learning_rate": 8.873988051997702e-06, + "loss": 0.1014, + "step": 218 + }, + { + "epoch": 0.8920570264765784, + "grad_norm": 0.841292566312256, + "learning_rate": 8.85892056985352e-06, + "loss": 0.1335, + "step": 219 + }, + { + "epoch": 0.8961303462321792, + "grad_norm": 1.3435689868107352, + "learning_rate": 8.8437659187526e-06, + "loss": 0.2286, + "step": 220 + }, + { + "epoch": 0.90020366598778, + "grad_norm": 1.8444300521677208, + "learning_rate": 8.828524441022575e-06, + "loss": 0.1827, + "step": 221 + }, + { + "epoch": 0.9042769857433809, + "grad_norm": 0.7545922474592645, + "learning_rate": 8.813196480952393e-06, + "loss": 0.1027, + "step": 222 + }, + { + "epoch": 0.9083503054989817, + "grad_norm": 0.75537983489465, + "learning_rate": 8.797782384784549e-06, + "loss": 0.1198, + "step": 223 + }, + { + "epoch": 0.9124236252545825, + "grad_norm": 0.8104999041705286, + "learning_rate": 8.782282500707262e-06, + "loss": 0.1029, + "step": 224 + }, + { + "epoch": 0.9164969450101833, + "grad_norm": 0.8405282400775482, + "learning_rate": 8.766697178846611e-06, + "loss": 0.1241, + "step": 225 + }, + { + "epoch": 0.9205702647657841, + "grad_norm": 1.013551552697806, + "learning_rate": 8.751026771258622e-06, + "loss": 0.1343, + "step": 226 + }, + { + "epoch": 0.924643584521385, + "grad_norm": 0.6728989996123187, + "learning_rate": 8.735271631921322e-06, + "loss": 0.1058, + "step": 227 + }, + { + "epoch": 0.9287169042769857, + "grad_norm": 0.8690442261224494, + "learning_rate": 8.719432116726738e-06, + "loss": 0.1332, + "step": 228 + }, + { + "epoch": 0.9327902240325866, + "grad_norm": 0.9449187305589617, + "learning_rate": 8.703508583472855e-06, + "loss": 0.1451, + "step": 229 + }, + { + "epoch": 0.9368635437881874, + "grad_norm": 0.8067318337898685, + "learning_rate": 8.68750139185554e-06, + "loss": 0.1248, + "step": 230 + }, + { + "epoch": 0.9409368635437881, + "grad_norm": 0.7905017587261095, + "learning_rate": 8.671410903460416e-06, + "loss": 0.119, + "step": 231 + }, + { + "epoch": 0.945010183299389, + "grad_norm": 1.1238154965476772, + "learning_rate": 8.65523748175469e-06, + "loss": 0.1559, + "step": 232 + }, + { + "epoch": 0.9490835030549898, + "grad_norm": 1.1027211644152675, + "learning_rate": 8.63898149207895e-06, + "loss": 0.1693, + "step": 233 + }, + { + "epoch": 0.9531568228105907, + "grad_norm": 0.9411765578825619, + "learning_rate": 8.622643301638902e-06, + "loss": 0.1346, + "step": 234 + }, + { + "epoch": 0.9572301425661914, + "grad_norm": 0.6884466751221227, + "learning_rate": 8.606223279497081e-06, + "loss": 0.0968, + "step": 235 + }, + { + "epoch": 0.9613034623217923, + "grad_norm": 0.7219918781543078, + "learning_rate": 8.589721796564521e-06, + "loss": 0.0966, + "step": 236 + }, + { + "epoch": 0.9653767820773931, + "grad_norm": 0.7967809896092082, + "learning_rate": 8.57313922559236e-06, + "loss": 0.1201, + "step": 237 + }, + { + "epoch": 0.9694501018329938, + "grad_norm": 0.8113807921190012, + "learning_rate": 8.556475941163436e-06, + "loss": 0.1097, + "step": 238 + }, + { + "epoch": 0.9735234215885947, + "grad_norm": 1.0943551126152973, + "learning_rate": 8.539732319683817e-06, + "loss": 0.1552, + "step": 239 + }, + { + "epoch": 0.9775967413441955, + "grad_norm": 0.7854046329247982, + "learning_rate": 8.5229087393743e-06, + "loss": 0.1138, + "step": 240 + }, + { + "epoch": 0.9816700610997964, + "grad_norm": 1.1720562073286809, + "learning_rate": 8.506005580261872e-06, + "loss": 0.1525, + "step": 241 + }, + { + "epoch": 0.9857433808553971, + "grad_norm": 0.718895289386658, + "learning_rate": 8.489023224171114e-06, + "loss": 0.1082, + "step": 242 + }, + { + "epoch": 0.9898167006109979, + "grad_norm": 0.613834884154541, + "learning_rate": 8.47196205471559e-06, + "loss": 0.0877, + "step": 243 + }, + { + "epoch": 0.9938900203665988, + "grad_norm": 0.9789990123927295, + "learning_rate": 8.45482245728917e-06, + "loss": 0.1675, + "step": 244 + }, + { + "epoch": 0.9979633401221996, + "grad_norm": 1.5580291175140415, + "learning_rate": 8.437604819057336e-06, + "loss": 0.15, + "step": 245 + }, + { + "epoch": 1.0020366598778003, + "grad_norm": 0.7685763736473359, + "learning_rate": 8.420309528948422e-06, + "loss": 0.1072, + "step": 246 + }, + { + "epoch": 1.0061099796334012, + "grad_norm": 0.6434124354999965, + "learning_rate": 8.40293697764484e-06, + "loss": 0.0844, + "step": 247 + }, + { + "epoch": 1.010183299389002, + "grad_norm": 0.5841852692369695, + "learning_rate": 8.385487557574253e-06, + "loss": 0.0859, + "step": 248 + }, + { + "epoch": 1.0142566191446027, + "grad_norm": 0.6061435282600086, + "learning_rate": 8.367961662900704e-06, + "loss": 0.0809, + "step": 249 + }, + { + "epoch": 1.0183299389002036, + "grad_norm": 0.8866327026089017, + "learning_rate": 8.35035968951572e-06, + "loss": 0.0996, + "step": 250 + }, + { + "epoch": 1.0224032586558045, + "grad_norm": 0.789311514275454, + "learning_rate": 8.33268203502937e-06, + "loss": 0.0999, + "step": 251 + }, + { + "epoch": 1.0264765784114054, + "grad_norm": 0.7470915493623619, + "learning_rate": 8.314929098761268e-06, + "loss": 0.0836, + "step": 252 + }, + { + "epoch": 1.030549898167006, + "grad_norm": 0.7275329446393578, + "learning_rate": 8.297101281731576e-06, + "loss": 0.0866, + "step": 253 + }, + { + "epoch": 1.034623217922607, + "grad_norm": 0.7227258514093932, + "learning_rate": 8.279198986651925e-06, + "loss": 0.0901, + "step": 254 + }, + { + "epoch": 1.0386965376782078, + "grad_norm": 0.6146754288814568, + "learning_rate": 8.261222617916335e-06, + "loss": 0.0789, + "step": 255 + }, + { + "epoch": 1.0427698574338085, + "grad_norm": 0.8514917967475527, + "learning_rate": 8.243172581592066e-06, + "loss": 0.1017, + "step": 256 + }, + { + "epoch": 1.0468431771894093, + "grad_norm": 0.7579530053794002, + "learning_rate": 8.22504928541045e-06, + "loss": 0.085, + "step": 257 + }, + { + "epoch": 1.0509164969450102, + "grad_norm": 0.6252945360785674, + "learning_rate": 8.206853138757687e-06, + "loss": 0.0777, + "step": 258 + }, + { + "epoch": 1.054989816700611, + "grad_norm": 0.7706416070190195, + "learning_rate": 8.188584552665592e-06, + "loss": 0.0833, + "step": 259 + }, + { + "epoch": 1.0590631364562118, + "grad_norm": 0.7980656602534597, + "learning_rate": 8.17024393980231e-06, + "loss": 0.0968, + "step": 260 + }, + { + "epoch": 1.0631364562118126, + "grad_norm": 0.6040157250127779, + "learning_rate": 8.15183171446299e-06, + "loss": 0.0632, + "step": 261 + }, + { + "epoch": 1.0672097759674135, + "grad_norm": 0.6247841753185668, + "learning_rate": 8.133348292560442e-06, + "loss": 0.073, + "step": 262 + }, + { + "epoch": 1.0712830957230142, + "grad_norm": 0.9705375898534241, + "learning_rate": 8.114794091615718e-06, + "loss": 0.1158, + "step": 263 + }, + { + "epoch": 1.075356415478615, + "grad_norm": 0.5868673627641846, + "learning_rate": 8.096169530748708e-06, + "loss": 0.0616, + "step": 264 + }, + { + "epoch": 1.079429735234216, + "grad_norm": 0.6511676677923491, + "learning_rate": 8.077475030668647e-06, + "loss": 0.0924, + "step": 265 + }, + { + "epoch": 1.0835030549898166, + "grad_norm": 0.8628274720451822, + "learning_rate": 8.058711013664633e-06, + "loss": 0.0841, + "step": 266 + }, + { + "epoch": 1.0875763747454175, + "grad_norm": 0.6366922861725464, + "learning_rate": 8.039877903596069e-06, + "loss": 0.0781, + "step": 267 + }, + { + "epoch": 1.0916496945010183, + "grad_norm": 0.6679013936452773, + "learning_rate": 8.020976125883105e-06, + "loss": 0.074, + "step": 268 + }, + { + "epoch": 1.0957230142566192, + "grad_norm": 0.7115218188251841, + "learning_rate": 8.002006107497018e-06, + "loss": 0.0909, + "step": 269 + }, + { + "epoch": 1.0997963340122199, + "grad_norm": 0.6044721294592456, + "learning_rate": 7.982968276950568e-06, + "loss": 0.0682, + "step": 270 + }, + { + "epoch": 1.1038696537678208, + "grad_norm": 1.1198941616515987, + "learning_rate": 7.963863064288326e-06, + "loss": 0.1067, + "step": 271 + }, + { + "epoch": 1.1079429735234216, + "grad_norm": 0.5209754431453612, + "learning_rate": 7.944690901076949e-06, + "loss": 0.0743, + "step": 272 + }, + { + "epoch": 1.1120162932790225, + "grad_norm": 0.7595155294604947, + "learning_rate": 7.925452220395436e-06, + "loss": 0.0903, + "step": 273 + }, + { + "epoch": 1.1160896130346232, + "grad_norm": 0.692986942378061, + "learning_rate": 7.906147456825349e-06, + "loss": 0.0835, + "step": 274 + }, + { + "epoch": 1.120162932790224, + "grad_norm": 0.6090247595888676, + "learning_rate": 7.886777046440993e-06, + "loss": 0.0739, + "step": 275 + }, + { + "epoch": 1.124236252545825, + "grad_norm": 1.042879657433014, + "learning_rate": 7.867341426799562e-06, + "loss": 0.1219, + "step": 276 + }, + { + "epoch": 1.1283095723014256, + "grad_norm": 0.7996350912985092, + "learning_rate": 7.847841036931263e-06, + "loss": 0.0913, + "step": 277 + }, + { + "epoch": 1.1323828920570265, + "grad_norm": 0.5332223023774978, + "learning_rate": 7.828276317329388e-06, + "loss": 0.0757, + "step": 278 + }, + { + "epoch": 1.1364562118126273, + "grad_norm": 0.5635521535562537, + "learning_rate": 7.80864770994038e-06, + "loss": 0.0641, + "step": 279 + }, + { + "epoch": 1.140529531568228, + "grad_norm": 0.6987390220817623, + "learning_rate": 7.788955658153829e-06, + "loss": 0.0821, + "step": 280 + }, + { + "epoch": 1.1446028513238289, + "grad_norm": 0.6962375028972171, + "learning_rate": 7.769200606792476e-06, + "loss": 0.0686, + "step": 281 + }, + { + "epoch": 1.1486761710794298, + "grad_norm": 1.3654151723987282, + "learning_rate": 7.749383002102147e-06, + "loss": 0.0959, + "step": 282 + }, + { + "epoch": 1.1527494908350306, + "grad_norm": 0.8307371553295552, + "learning_rate": 7.72950329174169e-06, + "loss": 0.0859, + "step": 283 + }, + { + "epoch": 1.1568228105906313, + "grad_norm": 0.9349410963502993, + "learning_rate": 7.709561924772855e-06, + "loss": 0.0874, + "step": 284 + }, + { + "epoch": 1.1608961303462322, + "grad_norm": 1.1074351510282887, + "learning_rate": 7.689559351650142e-06, + "loss": 0.1012, + "step": 285 + }, + { + "epoch": 1.164969450101833, + "grad_norm": 0.9416925430977862, + "learning_rate": 7.66949602421064e-06, + "loss": 0.0923, + "step": 286 + }, + { + "epoch": 1.1690427698574337, + "grad_norm": 0.6905374196650557, + "learning_rate": 7.649372395663816e-06, + "loss": 0.0676, + "step": 287 + }, + { + "epoch": 1.1731160896130346, + "grad_norm": 0.7723265248527, + "learning_rate": 7.629188920581267e-06, + "loss": 0.0812, + "step": 288 + }, + { + "epoch": 1.1771894093686355, + "grad_norm": 0.815712697347911, + "learning_rate": 7.608946054886468e-06, + "loss": 0.0755, + "step": 289 + }, + { + "epoch": 1.1812627291242364, + "grad_norm": 0.8142685931991146, + "learning_rate": 7.588644255844464e-06, + "loss": 0.097, + "step": 290 + }, + { + "epoch": 1.185336048879837, + "grad_norm": 0.6448741497851783, + "learning_rate": 7.568283982051538e-06, + "loss": 0.0719, + "step": 291 + }, + { + "epoch": 1.189409368635438, + "grad_norm": 0.5839738850266732, + "learning_rate": 7.5478656934248626e-06, + "loss": 0.0715, + "step": 292 + }, + { + "epoch": 1.1934826883910388, + "grad_norm": 0.6469439075383099, + "learning_rate": 7.527389851192099e-06, + "loss": 0.0745, + "step": 293 + }, + { + "epoch": 1.1975560081466394, + "grad_norm": 0.7083801028247904, + "learning_rate": 7.506856917880989e-06, + "loss": 0.077, + "step": 294 + }, + { + "epoch": 1.2016293279022403, + "grad_norm": 0.5584165288788803, + "learning_rate": 7.486267357308896e-06, + "loss": 0.0757, + "step": 295 + }, + { + "epoch": 1.2057026476578412, + "grad_norm": 0.7093198997979762, + "learning_rate": 7.465621634572336e-06, + "loss": 0.0821, + "step": 296 + }, + { + "epoch": 1.2097759674134418, + "grad_norm": 0.8170300802168915, + "learning_rate": 7.444920216036473e-06, + "loss": 0.0857, + "step": 297 + }, + { + "epoch": 1.2138492871690427, + "grad_norm": 0.6052440895883001, + "learning_rate": 7.4241635693245766e-06, + "loss": 0.0706, + "step": 298 + }, + { + "epoch": 1.2179226069246436, + "grad_norm": 0.5358799947933865, + "learning_rate": 7.40335216330746e-06, + "loss": 0.0698, + "step": 299 + }, + { + "epoch": 1.2219959266802445, + "grad_norm": 0.57379599749216, + "learning_rate": 7.382486468092899e-06, + "loss": 0.0778, + "step": 300 + }, + { + "epoch": 1.2260692464358451, + "grad_norm": 1.9428087979785686, + "learning_rate": 7.361566955014999e-06, + "loss": 0.0881, + "step": 301 + }, + { + "epoch": 1.230142566191446, + "grad_norm": 0.8052892598402892, + "learning_rate": 7.340594096623559e-06, + "loss": 0.0807, + "step": 302 + }, + { + "epoch": 1.234215885947047, + "grad_norm": 0.5697887043869344, + "learning_rate": 7.319568366673389e-06, + "loss": 0.0761, + "step": 303 + }, + { + "epoch": 1.2382892057026478, + "grad_norm": 0.8103909158109461, + "learning_rate": 7.2984902401136115e-06, + "loss": 0.084, + "step": 304 + }, + { + "epoch": 1.2423625254582484, + "grad_norm": 0.7842393785164873, + "learning_rate": 7.277360193076936e-06, + "loss": 0.0762, + "step": 305 + }, + { + "epoch": 1.2464358452138493, + "grad_norm": 0.5722519256754022, + "learning_rate": 7.256178702868899e-06, + "loss": 0.0723, + "step": 306 + }, + { + "epoch": 1.2505091649694502, + "grad_norm": 0.5772908994522038, + "learning_rate": 7.234946247957087e-06, + "loss": 0.0809, + "step": 307 + }, + { + "epoch": 1.2545824847250509, + "grad_norm": 0.7436038482880873, + "learning_rate": 7.213663307960321e-06, + "loss": 0.0822, + "step": 308 + }, + { + "epoch": 1.2586558044806517, + "grad_norm": 0.9056276676473372, + "learning_rate": 7.192330363637832e-06, + "loss": 0.1005, + "step": 309 + }, + { + "epoch": 1.2627291242362526, + "grad_norm": 0.6358405282433213, + "learning_rate": 7.170947896878392e-06, + "loss": 0.0737, + "step": 310 + }, + { + "epoch": 1.2668024439918533, + "grad_norm": 0.5954677442268389, + "learning_rate": 7.149516390689433e-06, + "loss": 0.0694, + "step": 311 + }, + { + "epoch": 1.2708757637474541, + "grad_norm": 0.6088987642638558, + "learning_rate": 7.12803632918614e-06, + "loss": 0.0712, + "step": 312 + }, + { + "epoch": 1.274949083503055, + "grad_norm": 0.6327020826012894, + "learning_rate": 7.1065081975805086e-06, + "loss": 0.0709, + "step": 313 + }, + { + "epoch": 1.2790224032586557, + "grad_norm": 0.8052023527305602, + "learning_rate": 7.084932482170385e-06, + "loss": 0.0884, + "step": 314 + }, + { + "epoch": 1.2830957230142566, + "grad_norm": 0.6196057921350728, + "learning_rate": 7.063309670328491e-06, + "loss": 0.0791, + "step": 315 + }, + { + "epoch": 1.2871690427698574, + "grad_norm": 0.6874689974891509, + "learning_rate": 7.041640250491398e-06, + "loss": 0.0725, + "step": 316 + }, + { + "epoch": 1.2912423625254583, + "grad_norm": 0.5750839664752618, + "learning_rate": 7.019924712148511e-06, + "loss": 0.0676, + "step": 317 + }, + { + "epoch": 1.2953156822810592, + "grad_norm": 0.7742197007819996, + "learning_rate": 6.998163545830998e-06, + "loss": 0.0734, + "step": 318 + }, + { + "epoch": 1.2993890020366599, + "grad_norm": 0.6195282494860945, + "learning_rate": 6.976357243100718e-06, + "loss": 0.0775, + "step": 319 + }, + { + "epoch": 1.3034623217922607, + "grad_norm": 0.6875577568472137, + "learning_rate": 6.954506296539112e-06, + "loss": 0.0878, + "step": 320 + }, + { + "epoch": 1.3075356415478616, + "grad_norm": 0.7320126566140536, + "learning_rate": 6.9326111997360775e-06, + "loss": 0.081, + "step": 321 + }, + { + "epoch": 1.3116089613034623, + "grad_norm": 0.843149661393896, + "learning_rate": 6.910672447278827e-06, + "loss": 0.0918, + "step": 322 + }, + { + "epoch": 1.3156822810590632, + "grad_norm": 0.6249054893219663, + "learning_rate": 6.8886905347406985e-06, + "loss": 0.0859, + "step": 323 + }, + { + "epoch": 1.319755600814664, + "grad_norm": 0.6921145932385908, + "learning_rate": 6.866665958669976e-06, + "loss": 0.0916, + "step": 324 + }, + { + "epoch": 1.3238289205702647, + "grad_norm": 0.5550915898105676, + "learning_rate": 6.844599216578667e-06, + "loss": 0.0755, + "step": 325 + }, + { + "epoch": 1.3279022403258656, + "grad_norm": 0.8923390816851975, + "learning_rate": 6.822490806931262e-06, + "loss": 0.0903, + "step": 326 + }, + { + "epoch": 1.3319755600814664, + "grad_norm": 0.6246266762679324, + "learning_rate": 6.800341229133486e-06, + "loss": 0.0833, + "step": 327 + }, + { + "epoch": 1.336048879837067, + "grad_norm": 0.8145107114030108, + "learning_rate": 6.778150983520999e-06, + "loss": 0.1131, + "step": 328 + }, + { + "epoch": 1.340122199592668, + "grad_norm": 0.6593824730687659, + "learning_rate": 6.755920571348111e-06, + "loss": 0.0783, + "step": 329 + }, + { + "epoch": 1.3441955193482689, + "grad_norm": 0.7871250490461288, + "learning_rate": 6.73365049477645e-06, + "loss": 0.0791, + "step": 330 + }, + { + "epoch": 1.3482688391038695, + "grad_norm": 1.5668561976011524, + "learning_rate": 6.711341256863623e-06, + "loss": 0.116, + "step": 331 + }, + { + "epoch": 1.3523421588594704, + "grad_norm": 0.6238740583787221, + "learning_rate": 6.688993361551847e-06, + "loss": 0.079, + "step": 332 + }, + { + "epoch": 1.3564154786150713, + "grad_norm": 0.7619408457347728, + "learning_rate": 6.66660731365657e-06, + "loss": 0.0876, + "step": 333 + }, + { + "epoch": 1.3604887983706722, + "grad_norm": 0.5260527244857047, + "learning_rate": 6.64418361885507e-06, + "loss": 0.08, + "step": 334 + }, + { + "epoch": 1.364562118126273, + "grad_norm": 0.7273285421050715, + "learning_rate": 6.621722783675024e-06, + "loss": 0.0803, + "step": 335 + }, + { + "epoch": 1.3686354378818737, + "grad_norm": 0.5976232642003504, + "learning_rate": 6.599225315483076e-06, + "loss": 0.0773, + "step": 336 + }, + { + "epoch": 1.3727087576374746, + "grad_norm": 0.5354838304438007, + "learning_rate": 6.576691722473368e-06, + "loss": 0.0699, + "step": 337 + }, + { + "epoch": 1.3767820773930755, + "grad_norm": 0.7164176676979538, + "learning_rate": 6.554122513656065e-06, + "loss": 0.0898, + "step": 338 + }, + { + "epoch": 1.3808553971486761, + "grad_norm": 0.694148331721139, + "learning_rate": 6.531518198845854e-06, + "loss": 0.0796, + "step": 339 + }, + { + "epoch": 1.384928716904277, + "grad_norm": 0.8668103406807447, + "learning_rate": 6.508879288650431e-06, + "loss": 0.0864, + "step": 340 + }, + { + "epoch": 1.3890020366598779, + "grad_norm": 0.905281439053181, + "learning_rate": 6.486206294458966e-06, + "loss": 0.1032, + "step": 341 + }, + { + "epoch": 1.3930753564154785, + "grad_norm": 0.6090466800578089, + "learning_rate": 6.463499728430549e-06, + "loss": 0.0739, + "step": 342 + }, + { + "epoch": 1.3971486761710794, + "grad_norm": 0.870879688990723, + "learning_rate": 6.4407601034826225e-06, + "loss": 0.0911, + "step": 343 + }, + { + "epoch": 1.4012219959266803, + "grad_norm": 0.5520211139425468, + "learning_rate": 6.417987933279397e-06, + "loss": 0.0763, + "step": 344 + }, + { + "epoch": 1.405295315682281, + "grad_norm": 0.567715626333358, + "learning_rate": 6.395183732220242e-06, + "loss": 0.0685, + "step": 345 + }, + { + "epoch": 1.4093686354378818, + "grad_norm": 0.6211209151590639, + "learning_rate": 6.372348015428077e-06, + "loss": 0.0763, + "step": 346 + }, + { + "epoch": 1.4134419551934827, + "grad_norm": 0.6300074755034071, + "learning_rate": 6.349481298737723e-06, + "loss": 0.0811, + "step": 347 + }, + { + "epoch": 1.4175152749490836, + "grad_norm": 0.5995929997342367, + "learning_rate": 6.32658409868426e-06, + "loss": 0.0651, + "step": 348 + }, + { + "epoch": 1.4215885947046842, + "grad_norm": 0.6049082049048853, + "learning_rate": 6.303656932491349e-06, + "loss": 0.0763, + "step": 349 + }, + { + "epoch": 1.4256619144602851, + "grad_norm": 0.9822040378006914, + "learning_rate": 6.280700318059563e-06, + "loss": 0.1054, + "step": 350 + }, + { + "epoch": 1.429735234215886, + "grad_norm": 0.8449953491667241, + "learning_rate": 6.257714773954674e-06, + "loss": 0.102, + "step": 351 + }, + { + "epoch": 1.4338085539714869, + "grad_norm": 0.5999494361508966, + "learning_rate": 6.234700819395946e-06, + "loss": 0.0813, + "step": 352 + }, + { + "epoch": 1.4378818737270875, + "grad_norm": 0.6512734204457252, + "learning_rate": 6.211658974244407e-06, + "loss": 0.0829, + "step": 353 + }, + { + "epoch": 1.4419551934826884, + "grad_norm": 0.616189473053879, + "learning_rate": 6.1885897589911e-06, + "loss": 0.0782, + "step": 354 + }, + { + "epoch": 1.4460285132382893, + "grad_norm": 0.9515071645483372, + "learning_rate": 6.1654936947453355e-06, + "loss": 0.0975, + "step": 355 + }, + { + "epoch": 1.45010183299389, + "grad_norm": 0.6021477200299695, + "learning_rate": 6.142371303222909e-06, + "loss": 0.071, + "step": 356 + }, + { + "epoch": 1.4541751527494908, + "grad_norm": 0.678653399667039, + "learning_rate": 6.119223106734328e-06, + "loss": 0.0812, + "step": 357 + }, + { + "epoch": 1.4582484725050917, + "grad_norm": 0.6291449966527708, + "learning_rate": 6.0960496281729995e-06, + "loss": 0.0689, + "step": 358 + }, + { + "epoch": 1.4623217922606924, + "grad_norm": 0.5932315331898775, + "learning_rate": 6.072851391003432e-06, + "loss": 0.075, + "step": 359 + }, + { + "epoch": 1.4663951120162932, + "grad_norm": 0.6492759903008403, + "learning_rate": 6.0496289192494e-06, + "loss": 0.0851, + "step": 360 + }, + { + "epoch": 1.4704684317718941, + "grad_norm": 0.8227230707733793, + "learning_rate": 6.026382737482116e-06, + "loss": 0.0939, + "step": 361 + }, + { + "epoch": 1.4745417515274948, + "grad_norm": 0.7553836500415501, + "learning_rate": 6.003113370808375e-06, + "loss": 0.089, + "step": 362 + }, + { + "epoch": 1.4786150712830957, + "grad_norm": 0.8341775236686235, + "learning_rate": 5.979821344858695e-06, + "loss": 0.1087, + "step": 363 + }, + { + "epoch": 1.4826883910386965, + "grad_norm": 0.7407455393175063, + "learning_rate": 5.956507185775441e-06, + "loss": 0.0875, + "step": 364 + }, + { + "epoch": 1.4867617107942974, + "grad_norm": 0.6302976593180085, + "learning_rate": 5.933171420200946e-06, + "loss": 0.074, + "step": 365 + }, + { + "epoch": 1.4908350305498983, + "grad_norm": 0.6017971359417691, + "learning_rate": 5.909814575265609e-06, + "loss": 0.0771, + "step": 366 + }, + { + "epoch": 1.494908350305499, + "grad_norm": 0.5327101770243312, + "learning_rate": 5.88643717857599e-06, + "loss": 0.064, + "step": 367 + }, + { + "epoch": 1.4989816700610998, + "grad_norm": 0.5530940520217251, + "learning_rate": 5.863039758202889e-06, + "loss": 0.0732, + "step": 368 + }, + { + "epoch": 1.5030549898167007, + "grad_norm": 0.575697348750185, + "learning_rate": 5.839622842669423e-06, + "loss": 0.0794, + "step": 369 + }, + { + "epoch": 1.5071283095723014, + "grad_norm": 0.7879263375060825, + "learning_rate": 5.816186960939084e-06, + "loss": 0.0873, + "step": 370 + }, + { + "epoch": 1.5112016293279023, + "grad_norm": 0.6477317557359421, + "learning_rate": 5.7927326424037875e-06, + "loss": 0.0808, + "step": 371 + }, + { + "epoch": 1.5152749490835031, + "grad_norm": 0.6424367902201409, + "learning_rate": 5.7692604168719225e-06, + "loss": 0.0743, + "step": 372 + }, + { + "epoch": 1.5193482688391038, + "grad_norm": 0.5205522605427637, + "learning_rate": 5.745770814556373e-06, + "loss": 0.0737, + "step": 373 + }, + { + "epoch": 1.5234215885947047, + "grad_norm": 0.8437328046734178, + "learning_rate": 5.722264366062549e-06, + "loss": 0.1075, + "step": 374 + }, + { + "epoch": 1.5274949083503055, + "grad_norm": 0.705710140903758, + "learning_rate": 5.698741602376395e-06, + "loss": 0.0854, + "step": 375 + }, + { + "epoch": 1.5315682281059062, + "grad_norm": 0.5479078071724787, + "learning_rate": 5.675203054852403e-06, + "loss": 0.0735, + "step": 376 + }, + { + "epoch": 1.535641547861507, + "grad_norm": 0.628783685599969, + "learning_rate": 5.651649255201603e-06, + "loss": 0.0893, + "step": 377 + }, + { + "epoch": 1.539714867617108, + "grad_norm": 0.6393301256468628, + "learning_rate": 5.628080735479553e-06, + "loss": 0.0808, + "step": 378 + }, + { + "epoch": 1.5437881873727086, + "grad_norm": 0.5419627427127232, + "learning_rate": 5.604498028074323e-06, + "loss": 0.0693, + "step": 379 + }, + { + "epoch": 1.5478615071283097, + "grad_norm": 0.5585947140971957, + "learning_rate": 5.580901665694471e-06, + "loss": 0.0708, + "step": 380 + }, + { + "epoch": 1.5519348268839104, + "grad_norm": 0.7630225067115134, + "learning_rate": 5.557292181357003e-06, + "loss": 0.0916, + "step": 381 + }, + { + "epoch": 1.556008146639511, + "grad_norm": 0.7132672152748122, + "learning_rate": 5.533670108375334e-06, + "loss": 0.075, + "step": 382 + }, + { + "epoch": 1.5600814663951121, + "grad_norm": 0.6317003602484598, + "learning_rate": 5.510035980347249e-06, + "loss": 0.0629, + "step": 383 + }, + { + "epoch": 1.5641547861507128, + "grad_norm": 1.0965759213987818, + "learning_rate": 5.486390331142841e-06, + "loss": 0.129, + "step": 384 + }, + { + "epoch": 1.5682281059063137, + "grad_norm": 0.9961574441023341, + "learning_rate": 5.462733694892452e-06, + "loss": 0.0994, + "step": 385 + }, + { + "epoch": 1.5723014256619146, + "grad_norm": 0.55138988687004, + "learning_rate": 5.439066605974615e-06, + "loss": 0.0884, + "step": 386 + }, + { + "epoch": 1.5763747454175152, + "grad_norm": 0.642013542309746, + "learning_rate": 5.415389599003972e-06, + "loss": 0.0758, + "step": 387 + }, + { + "epoch": 1.580448065173116, + "grad_norm": 0.7003801753473788, + "learning_rate": 5.391703208819209e-06, + "loss": 0.0822, + "step": 388 + }, + { + "epoch": 1.584521384928717, + "grad_norm": 0.641290834723853, + "learning_rate": 5.368007970470964e-06, + "loss": 0.0794, + "step": 389 + }, + { + "epoch": 1.5885947046843176, + "grad_norm": 0.7220121980133926, + "learning_rate": 5.344304419209748e-06, + "loss": 0.0908, + "step": 390 + }, + { + "epoch": 1.5926680244399185, + "grad_norm": 0.5338391641253294, + "learning_rate": 5.3205930904738544e-06, + "loss": 0.065, + "step": 391 + }, + { + "epoch": 1.5967413441955194, + "grad_norm": 0.8220572260271829, + "learning_rate": 5.296874519877256e-06, + "loss": 0.1063, + "step": 392 + }, + { + "epoch": 1.60081466395112, + "grad_norm": 0.6948752546108359, + "learning_rate": 5.273149243197517e-06, + "loss": 0.092, + "step": 393 + }, + { + "epoch": 1.6048879837067211, + "grad_norm": 0.5973741327673737, + "learning_rate": 5.2494177963636785e-06, + "loss": 0.0781, + "step": 394 + }, + { + "epoch": 1.6089613034623218, + "grad_norm": 0.7082285511834933, + "learning_rate": 5.225680715444168e-06, + "loss": 0.0844, + "step": 395 + }, + { + "epoch": 1.6130346232179225, + "grad_norm": 0.5776337469174156, + "learning_rate": 5.201938536634674e-06, + "loss": 0.0755, + "step": 396 + }, + { + "epoch": 1.6171079429735236, + "grad_norm": 0.6439617862748772, + "learning_rate": 5.178191796246043e-06, + "loss": 0.0692, + "step": 397 + }, + { + "epoch": 1.6211812627291242, + "grad_norm": 0.693976310427531, + "learning_rate": 5.154441030692162e-06, + "loss": 0.0844, + "step": 398 + }, + { + "epoch": 1.625254582484725, + "grad_norm": 0.6310937269228004, + "learning_rate": 5.1306867764778445e-06, + "loss": 0.0605, + "step": 399 + }, + { + "epoch": 1.629327902240326, + "grad_norm": 0.5557882712930212, + "learning_rate": 5.106929570186706e-06, + "loss": 0.0616, + "step": 400 + }, + { + "epoch": 1.6334012219959266, + "grad_norm": 0.7105865341669582, + "learning_rate": 5.083169948469049e-06, + "loss": 0.0888, + "step": 401 + }, + { + "epoch": 1.6374745417515275, + "grad_norm": 0.785914280291214, + "learning_rate": 5.059408448029737e-06, + "loss": 0.0924, + "step": 402 + }, + { + "epoch": 1.6415478615071284, + "grad_norm": 0.760854086300454, + "learning_rate": 5.0356456056160715e-06, + "loss": 0.0899, + "step": 403 + }, + { + "epoch": 1.645621181262729, + "grad_norm": 0.5160083716123763, + "learning_rate": 5.0118819580056686e-06, + "loss": 0.0676, + "step": 404 + }, + { + "epoch": 1.64969450101833, + "grad_norm": 0.5926381494692319, + "learning_rate": 4.988118041994332e-06, + "loss": 0.0719, + "step": 405 + }, + { + "epoch": 1.6537678207739308, + "grad_norm": 0.5387161542722511, + "learning_rate": 4.964354394383929e-06, + "loss": 0.0757, + "step": 406 + }, + { + "epoch": 1.6578411405295315, + "grad_norm": 0.5028703565858573, + "learning_rate": 4.940591551970264e-06, + "loss": 0.066, + "step": 407 + }, + { + "epoch": 1.6619144602851323, + "grad_norm": 0.5736039189545704, + "learning_rate": 4.9168300515309515e-06, + "loss": 0.0724, + "step": 408 + }, + { + "epoch": 1.6659877800407332, + "grad_norm": 0.984691517709554, + "learning_rate": 4.8930704298132965e-06, + "loss": 0.1022, + "step": 409 + }, + { + "epoch": 1.6700610997963339, + "grad_norm": 0.5927643518511072, + "learning_rate": 4.869313223522159e-06, + "loss": 0.0718, + "step": 410 + }, + { + "epoch": 1.674134419551935, + "grad_norm": 0.566899954027869, + "learning_rate": 4.845558969307839e-06, + "loss": 0.0707, + "step": 411 + }, + { + "epoch": 1.6782077393075356, + "grad_norm": 0.5788045225856518, + "learning_rate": 4.821808203753959e-06, + "loss": 0.0785, + "step": 412 + }, + { + "epoch": 1.6822810590631363, + "grad_norm": 0.5224303081188956, + "learning_rate": 4.798061463365327e-06, + "loss": 0.0755, + "step": 413 + }, + { + "epoch": 1.6863543788187374, + "grad_norm": 0.5256289426946346, + "learning_rate": 4.774319284555833e-06, + "loss": 0.0725, + "step": 414 + }, + { + "epoch": 1.690427698574338, + "grad_norm": 0.5514388613040209, + "learning_rate": 4.7505822036363214e-06, + "loss": 0.0698, + "step": 415 + }, + { + "epoch": 1.694501018329939, + "grad_norm": 0.7507069842501244, + "learning_rate": 4.726850756802486e-06, + "loss": 0.0779, + "step": 416 + }, + { + "epoch": 1.6985743380855398, + "grad_norm": 0.5535879129510451, + "learning_rate": 4.703125480122747e-06, + "loss": 0.0677, + "step": 417 + }, + { + "epoch": 1.7026476578411405, + "grad_norm": 0.7586101007852933, + "learning_rate": 4.679406909526147e-06, + "loss": 0.0959, + "step": 418 + }, + { + "epoch": 1.7067209775967414, + "grad_norm": 0.5235003242969455, + "learning_rate": 4.655695580790254e-06, + "loss": 0.0782, + "step": 419 + }, + { + "epoch": 1.7107942973523422, + "grad_norm": 0.8957116693381422, + "learning_rate": 4.631992029529037e-06, + "loss": 0.104, + "step": 420 + }, + { + "epoch": 1.7148676171079429, + "grad_norm": 0.5397767746056776, + "learning_rate": 4.608296791180793e-06, + "loss": 0.0794, + "step": 421 + }, + { + "epoch": 1.7189409368635438, + "grad_norm": 0.5510200443982937, + "learning_rate": 4.584610400996028e-06, + "loss": 0.0727, + "step": 422 + }, + { + "epoch": 1.7230142566191446, + "grad_norm": 0.7218539657297133, + "learning_rate": 4.560933394025386e-06, + "loss": 0.0812, + "step": 423 + }, + { + "epoch": 1.7270875763747453, + "grad_norm": 0.6450718218339647, + "learning_rate": 4.537266305107549e-06, + "loss": 0.0857, + "step": 424 + }, + { + "epoch": 1.7311608961303462, + "grad_norm": 0.5240562658243174, + "learning_rate": 4.513609668857162e-06, + "loss": 0.0658, + "step": 425 + }, + { + "epoch": 1.735234215885947, + "grad_norm": 0.5203639231556227, + "learning_rate": 4.489964019652752e-06, + "loss": 0.0637, + "step": 426 + }, + { + "epoch": 1.7393075356415477, + "grad_norm": 0.7157158022653508, + "learning_rate": 4.4663298916246665e-06, + "loss": 0.0878, + "step": 427 + }, + { + "epoch": 1.7433808553971488, + "grad_norm": 0.6919816756435726, + "learning_rate": 4.442707818642999e-06, + "loss": 0.0801, + "step": 428 + }, + { + "epoch": 1.7474541751527495, + "grad_norm": 0.7381380434766637, + "learning_rate": 4.419098334305529e-06, + "loss": 0.0831, + "step": 429 + }, + { + "epoch": 1.7515274949083504, + "grad_norm": 0.720540642558599, + "learning_rate": 4.395501971925677e-06, + "loss": 0.0782, + "step": 430 + }, + { + "epoch": 1.7556008146639512, + "grad_norm": 0.6824501618323201, + "learning_rate": 4.371919264520449e-06, + "loss": 0.0795, + "step": 431 + }, + { + "epoch": 1.759674134419552, + "grad_norm": 0.7572127655566931, + "learning_rate": 4.348350744798399e-06, + "loss": 0.0798, + "step": 432 + }, + { + "epoch": 1.7637474541751528, + "grad_norm": 0.5634906535431049, + "learning_rate": 4.324796945147598e-06, + "loss": 0.0728, + "step": 433 + }, + { + "epoch": 1.7678207739307537, + "grad_norm": 0.7686272783650896, + "learning_rate": 4.301258397623606e-06, + "loss": 0.0782, + "step": 434 + }, + { + "epoch": 1.7718940936863543, + "grad_norm": 0.712370232236961, + "learning_rate": 4.2777356339374526e-06, + "loss": 0.0856, + "step": 435 + }, + { + "epoch": 1.7759674134419552, + "grad_norm": 0.9290726298620688, + "learning_rate": 4.254229185443628e-06, + "loss": 0.0783, + "step": 436 + }, + { + "epoch": 1.780040733197556, + "grad_norm": 0.5716455342190312, + "learning_rate": 4.230739583128078e-06, + "loss": 0.0701, + "step": 437 + }, + { + "epoch": 1.7841140529531567, + "grad_norm": 1.1820087104502222, + "learning_rate": 4.2072673575962125e-06, + "loss": 0.0977, + "step": 438 + }, + { + "epoch": 1.7881873727087576, + "grad_norm": 0.935231069436012, + "learning_rate": 4.183813039060919e-06, + "loss": 0.1103, + "step": 439 + }, + { + "epoch": 1.7922606924643585, + "grad_norm": 0.5110831181130548, + "learning_rate": 4.160377157330579e-06, + "loss": 0.0787, + "step": 440 + }, + { + "epoch": 1.7963340122199591, + "grad_norm": 0.6036766363233732, + "learning_rate": 4.136960241797113e-06, + "loss": 0.0648, + "step": 441 + }, + { + "epoch": 1.8004073319755602, + "grad_norm": 0.6253574899226054, + "learning_rate": 4.113562821424012e-06, + "loss": 0.0856, + "step": 442 + }, + { + "epoch": 1.804480651731161, + "grad_norm": 0.5737047935293089, + "learning_rate": 4.090185424734392e-06, + "loss": 0.0769, + "step": 443 + }, + { + "epoch": 1.8085539714867616, + "grad_norm": 0.6675899229386297, + "learning_rate": 4.066828579799054e-06, + "loss": 0.0761, + "step": 444 + }, + { + "epoch": 1.8126272912423627, + "grad_norm": 0.5292920337446484, + "learning_rate": 4.043492814224559e-06, + "loss": 0.0684, + "step": 445 + }, + { + "epoch": 1.8167006109979633, + "grad_norm": 0.7551251294419339, + "learning_rate": 4.020178655141307e-06, + "loss": 0.0792, + "step": 446 + }, + { + "epoch": 1.8207739307535642, + "grad_norm": 0.6230513981956748, + "learning_rate": 3.9968866291916254e-06, + "loss": 0.082, + "step": 447 + }, + { + "epoch": 1.824847250509165, + "grad_norm": 0.5934762051634166, + "learning_rate": 3.973617262517886e-06, + "loss": 0.0638, + "step": 448 + }, + { + "epoch": 1.8289205702647657, + "grad_norm": 0.675289075760882, + "learning_rate": 3.950371080750602e-06, + "loss": 0.0795, + "step": 449 + }, + { + "epoch": 1.8329938900203666, + "grad_norm": 0.9147897851217298, + "learning_rate": 3.927148608996569e-06, + "loss": 0.1063, + "step": 450 + }, + { + "epoch": 1.8370672097759675, + "grad_norm": 1.1105124935695379, + "learning_rate": 3.903950371827001e-06, + "loss": 0.087, + "step": 451 + }, + { + "epoch": 1.8411405295315681, + "grad_norm": 0.5889228060416761, + "learning_rate": 3.880776893265673e-06, + "loss": 0.0767, + "step": 452 + }, + { + "epoch": 1.845213849287169, + "grad_norm": 0.5032013275393701, + "learning_rate": 3.85762869677709e-06, + "loss": 0.0575, + "step": 453 + }, + { + "epoch": 1.84928716904277, + "grad_norm": 0.6559816525725005, + "learning_rate": 3.834506305254667e-06, + "loss": 0.0896, + "step": 454 + }, + { + "epoch": 1.8533604887983706, + "grad_norm": 0.7532212211298266, + "learning_rate": 3.811410241008902e-06, + "loss": 0.0856, + "step": 455 + }, + { + "epoch": 1.8574338085539714, + "grad_norm": 0.5738261899311171, + "learning_rate": 3.788341025755595e-06, + "loss": 0.0733, + "step": 456 + }, + { + "epoch": 1.8615071283095723, + "grad_norm": 0.7208121405309541, + "learning_rate": 3.765299180604055e-06, + "loss": 0.0829, + "step": 457 + }, + { + "epoch": 1.865580448065173, + "grad_norm": 0.5296431964709345, + "learning_rate": 3.7422852260453274e-06, + "loss": 0.0704, + "step": 458 + }, + { + "epoch": 1.869653767820774, + "grad_norm": 0.6719188744341698, + "learning_rate": 3.719299681940437e-06, + "loss": 0.09, + "step": 459 + }, + { + "epoch": 1.8737270875763747, + "grad_norm": 0.5867743083933351, + "learning_rate": 3.696343067508651e-06, + "loss": 0.0816, + "step": 460 + }, + { + "epoch": 1.8778004073319754, + "grad_norm": 0.49214441808140924, + "learning_rate": 3.673415901315743e-06, + "loss": 0.0648, + "step": 461 + }, + { + "epoch": 1.8818737270875765, + "grad_norm": 0.5871371037925425, + "learning_rate": 3.650518701262278e-06, + "loss": 0.0732, + "step": 462 + }, + { + "epoch": 1.8859470468431772, + "grad_norm": 0.6415787500205045, + "learning_rate": 3.6276519845719237e-06, + "loss": 0.0731, + "step": 463 + }, + { + "epoch": 1.890020366598778, + "grad_norm": 0.7505406685838056, + "learning_rate": 3.6048162677797595e-06, + "loss": 0.0847, + "step": 464 + }, + { + "epoch": 1.894093686354379, + "grad_norm": 0.5203285332468817, + "learning_rate": 3.582012066720605e-06, + "loss": 0.0666, + "step": 465 + }, + { + "epoch": 1.8981670061099796, + "grad_norm": 0.5664388898818371, + "learning_rate": 3.559239896517379e-06, + "loss": 0.0814, + "step": 466 + }, + { + "epoch": 1.9022403258655805, + "grad_norm": 0.9911644175743987, + "learning_rate": 3.536500271569452e-06, + "loss": 0.0766, + "step": 467 + }, + { + "epoch": 1.9063136456211813, + "grad_norm": 0.5541020704419577, + "learning_rate": 3.5137937055410343e-06, + "loss": 0.0677, + "step": 468 + }, + { + "epoch": 1.910386965376782, + "grad_norm": 0.7733672524931661, + "learning_rate": 3.4911207113495703e-06, + "loss": 0.0883, + "step": 469 + }, + { + "epoch": 1.9144602851323829, + "grad_norm": 0.5994378228763769, + "learning_rate": 3.4684818011541484e-06, + "loss": 0.0779, + "step": 470 + }, + { + "epoch": 1.9185336048879837, + "grad_norm": 0.7925341371043554, + "learning_rate": 3.4458774863439366e-06, + "loss": 0.0882, + "step": 471 + }, + { + "epoch": 1.9226069246435844, + "grad_norm": 0.8007592704584606, + "learning_rate": 3.423308277526633e-06, + "loss": 0.088, + "step": 472 + }, + { + "epoch": 1.9266802443991853, + "grad_norm": 0.6130809497009315, + "learning_rate": 3.4007746845169253e-06, + "loss": 0.0836, + "step": 473 + }, + { + "epoch": 1.9307535641547862, + "grad_norm": 0.6462482272234705, + "learning_rate": 3.3782772163249767e-06, + "loss": 0.0671, + "step": 474 + }, + { + "epoch": 1.9348268839103868, + "grad_norm": 0.4781126271398999, + "learning_rate": 3.3558163811449317e-06, + "loss": 0.0694, + "step": 475 + }, + { + "epoch": 1.938900203665988, + "grad_norm": 0.7328378092623428, + "learning_rate": 3.3333926863434317e-06, + "loss": 0.0847, + "step": 476 + }, + { + "epoch": 1.9429735234215886, + "grad_norm": 0.6002434419865161, + "learning_rate": 3.311006638448155e-06, + "loss": 0.0724, + "step": 477 + }, + { + "epoch": 1.9470468431771895, + "grad_norm": 0.6276269450961816, + "learning_rate": 3.288658743136378e-06, + "loss": 0.066, + "step": 478 + }, + { + "epoch": 1.9511201629327903, + "grad_norm": 0.8586874267355604, + "learning_rate": 3.2663495052235505e-06, + "loss": 0.1077, + "step": 479 + }, + { + "epoch": 1.955193482688391, + "grad_norm": 0.6160197884131686, + "learning_rate": 3.2440794286518896e-06, + "loss": 0.085, + "step": 480 + }, + { + "epoch": 1.9592668024439919, + "grad_norm": 0.5494876667673338, + "learning_rate": 3.2218490164790015e-06, + "loss": 0.0656, + "step": 481 + }, + { + "epoch": 1.9633401221995928, + "grad_norm": 0.5953585828338879, + "learning_rate": 3.199658770866515e-06, + "loss": 0.0754, + "step": 482 + }, + { + "epoch": 1.9674134419551934, + "grad_norm": 0.5401626625339145, + "learning_rate": 3.1775091930687374e-06, + "loss": 0.0668, + "step": 483 + }, + { + "epoch": 1.9714867617107943, + "grad_norm": 0.5111554952361316, + "learning_rate": 3.1554007834213357e-06, + "loss": 0.0686, + "step": 484 + }, + { + "epoch": 1.9755600814663952, + "grad_norm": 0.722625027505199, + "learning_rate": 3.1333340413300263e-06, + "loss": 0.0848, + "step": 485 + }, + { + "epoch": 1.9796334012219958, + "grad_norm": 0.5959140201333295, + "learning_rate": 3.1113094652593023e-06, + "loss": 0.0701, + "step": 486 + }, + { + "epoch": 1.9837067209775967, + "grad_norm": 0.8208975505384392, + "learning_rate": 3.0893275527211742e-06, + "loss": 0.1013, + "step": 487 + }, + { + "epoch": 1.9877800407331976, + "grad_norm": 0.6224896038998268, + "learning_rate": 3.067388800263923e-06, + "loss": 0.0832, + "step": 488 + }, + { + "epoch": 1.9918533604887982, + "grad_norm": 0.6241176624569225, + "learning_rate": 3.04549370346089e-06, + "loss": 0.0678, + "step": 489 + }, + { + "epoch": 1.9959266802443993, + "grad_norm": 0.6061475364205144, + "learning_rate": 3.0236427568992845e-06, + "loss": 0.0768, + "step": 490 + }, + { + "epoch": 2.0, + "grad_norm": 0.7233218000939327, + "learning_rate": 3.0018364541690048e-06, + "loss": 0.0861, + "step": 491 + }, + { + "epoch": 2.0040733197556007, + "grad_norm": 0.4042479382149737, + "learning_rate": 2.9800752878514903e-06, + "loss": 0.0466, + "step": 492 + }, + { + "epoch": 2.0081466395112018, + "grad_norm": 0.5139407106351861, + "learning_rate": 2.958359749508603e-06, + "loss": 0.0515, + "step": 493 + }, + { + "epoch": 2.0122199592668024, + "grad_norm": 0.46115082125552953, + "learning_rate": 2.936690329671511e-06, + "loss": 0.0435, + "step": 494 + }, + { + "epoch": 2.016293279022403, + "grad_norm": 0.4268534345955416, + "learning_rate": 2.915067517829615e-06, + "loss": 0.0455, + "step": 495 + }, + { + "epoch": 2.020366598778004, + "grad_norm": 0.4635470622803733, + "learning_rate": 2.893491802419492e-06, + "loss": 0.0476, + "step": 496 + }, + { + "epoch": 2.024439918533605, + "grad_norm": 0.411133073158324, + "learning_rate": 2.871963670813861e-06, + "loss": 0.0383, + "step": 497 + }, + { + "epoch": 2.0285132382892055, + "grad_norm": 0.5300632118826186, + "learning_rate": 2.850483609310567e-06, + "loss": 0.0429, + "step": 498 + }, + { + "epoch": 2.0325865580448066, + "grad_norm": 0.5473407298179113, + "learning_rate": 2.829052103121611e-06, + "loss": 0.0461, + "step": 499 + }, + { + "epoch": 2.0366598778004072, + "grad_norm": 0.5221255489838956, + "learning_rate": 2.807669636362169e-06, + "loss": 0.0468, + "step": 500 + }, + { + "epoch": 2.0407331975560083, + "grad_norm": 1.1657623948850715, + "learning_rate": 2.7863366920396805e-06, + "loss": 0.0661, + "step": 501 + }, + { + "epoch": 2.044806517311609, + "grad_norm": 0.6686933034288756, + "learning_rate": 2.765053752042915e-06, + "loss": 0.0528, + "step": 502 + }, + { + "epoch": 2.0488798370672097, + "grad_norm": 0.570328535871806, + "learning_rate": 2.7438212971311016e-06, + "loss": 0.0445, + "step": 503 + }, + { + "epoch": 2.0529531568228108, + "grad_norm": 0.514036454560942, + "learning_rate": 2.722639806923066e-06, + "loss": 0.0377, + "step": 504 + }, + { + "epoch": 2.0570264765784114, + "grad_norm": 0.7454897039779705, + "learning_rate": 2.7015097598863906e-06, + "loss": 0.0493, + "step": 505 + }, + { + "epoch": 2.061099796334012, + "grad_norm": 0.6127243032686627, + "learning_rate": 2.680431633326614e-06, + "loss": 0.0397, + "step": 506 + }, + { + "epoch": 2.065173116089613, + "grad_norm": 0.5755519569317435, + "learning_rate": 2.659405903376442e-06, + "loss": 0.0485, + "step": 507 + }, + { + "epoch": 2.069246435845214, + "grad_norm": 0.5575385047600512, + "learning_rate": 2.6384330449850028e-06, + "loss": 0.0479, + "step": 508 + }, + { + "epoch": 2.0733197556008145, + "grad_norm": 0.5392806277095619, + "learning_rate": 2.617513531907103e-06, + "loss": 0.0466, + "step": 509 + }, + { + "epoch": 2.0773930753564156, + "grad_norm": 0.671579294280409, + "learning_rate": 2.5966478366925406e-06, + "loss": 0.0472, + "step": 510 + }, + { + "epoch": 2.0814663951120163, + "grad_norm": 0.6268395862195464, + "learning_rate": 2.5758364306754247e-06, + "loss": 0.0448, + "step": 511 + }, + { + "epoch": 2.085539714867617, + "grad_norm": 0.7629937873111097, + "learning_rate": 2.5550797839635283e-06, + "loss": 0.0448, + "step": 512 + }, + { + "epoch": 2.089613034623218, + "grad_norm": 0.5361361289284211, + "learning_rate": 2.5343783654276644e-06, + "loss": 0.0446, + "step": 513 + }, + { + "epoch": 2.0936863543788187, + "grad_norm": 0.5890396253795798, + "learning_rate": 2.5137326426911067e-06, + "loss": 0.0433, + "step": 514 + }, + { + "epoch": 2.0977596741344193, + "grad_norm": 0.6330309017862886, + "learning_rate": 2.493143082119013e-06, + "loss": 0.048, + "step": 515 + }, + { + "epoch": 2.1018329938900204, + "grad_norm": 0.6503134273714366, + "learning_rate": 2.472610148807903e-06, + "loss": 0.0478, + "step": 516 + }, + { + "epoch": 2.105906313645621, + "grad_norm": 0.6422107457849697, + "learning_rate": 2.452134306575139e-06, + "loss": 0.0464, + "step": 517 + }, + { + "epoch": 2.109979633401222, + "grad_norm": 0.5877663930261545, + "learning_rate": 2.431716017948462e-06, + "loss": 0.0535, + "step": 518 + }, + { + "epoch": 2.114052953156823, + "grad_norm": 0.5447537397982213, + "learning_rate": 2.4113557441555384e-06, + "loss": 0.0374, + "step": 519 + }, + { + "epoch": 2.1181262729124235, + "grad_norm": 0.5207073923132541, + "learning_rate": 2.391053945113533e-06, + "loss": 0.0344, + "step": 520 + }, + { + "epoch": 2.1221995926680246, + "grad_norm": 0.5610363989876352, + "learning_rate": 2.370811079418735e-06, + "loss": 0.0452, + "step": 521 + }, + { + "epoch": 2.1262729124236253, + "grad_norm": 0.6673691208587358, + "learning_rate": 2.350627604336186e-06, + "loss": 0.0491, + "step": 522 + }, + { + "epoch": 2.130346232179226, + "grad_norm": 0.6073812740579083, + "learning_rate": 2.330503975789361e-06, + "loss": 0.0522, + "step": 523 + }, + { + "epoch": 2.134419551934827, + "grad_norm": 0.5565972854137461, + "learning_rate": 2.3104406483498593e-06, + "loss": 0.0433, + "step": 524 + }, + { + "epoch": 2.1384928716904277, + "grad_norm": 0.6165041221340056, + "learning_rate": 2.290438075227146e-06, + "loss": 0.0431, + "step": 525 + }, + { + "epoch": 2.1425661914460283, + "grad_norm": 0.5010290577858647, + "learning_rate": 2.270496708258309e-06, + "loss": 0.045, + "step": 526 + }, + { + "epoch": 2.1466395112016294, + "grad_norm": 0.55185695474535, + "learning_rate": 2.2506169978978543e-06, + "loss": 0.041, + "step": 527 + }, + { + "epoch": 2.15071283095723, + "grad_norm": 0.610147889264061, + "learning_rate": 2.230799393207526e-06, + "loss": 0.0453, + "step": 528 + }, + { + "epoch": 2.1547861507128308, + "grad_norm": 0.503191638472138, + "learning_rate": 2.2110443418461723e-06, + "loss": 0.0333, + "step": 529 + }, + { + "epoch": 2.158859470468432, + "grad_norm": 0.5012816515718463, + "learning_rate": 2.191352290059621e-06, + "loss": 0.041, + "step": 530 + }, + { + "epoch": 2.1629327902240325, + "grad_norm": 0.6375299907541431, + "learning_rate": 2.171723682670613e-06, + "loss": 0.0518, + "step": 531 + }, + { + "epoch": 2.167006109979633, + "grad_norm": 0.6386194779538906, + "learning_rate": 2.152158963068739e-06, + "loss": 0.0413, + "step": 532 + }, + { + "epoch": 2.1710794297352343, + "grad_norm": 0.5082368802837912, + "learning_rate": 2.1326585732004384e-06, + "loss": 0.0407, + "step": 533 + }, + { + "epoch": 2.175152749490835, + "grad_norm": 0.6010063459701015, + "learning_rate": 2.1132229535590092e-06, + "loss": 0.047, + "step": 534 + }, + { + "epoch": 2.179226069246436, + "grad_norm": 0.5342559384940562, + "learning_rate": 2.093852543174652e-06, + "loss": 0.0442, + "step": 535 + }, + { + "epoch": 2.1832993890020367, + "grad_norm": 0.577105364648582, + "learning_rate": 2.0745477796045664e-06, + "loss": 0.0409, + "step": 536 + }, + { + "epoch": 2.1873727087576373, + "grad_norm": 0.5567485551750603, + "learning_rate": 2.0553090989230527e-06, + "loss": 0.0455, + "step": 537 + }, + { + "epoch": 2.1914460285132384, + "grad_norm": 0.5438386031817741, + "learning_rate": 2.036136935711674e-06, + "loss": 0.0362, + "step": 538 + }, + { + "epoch": 2.195519348268839, + "grad_norm": 0.5315263327187112, + "learning_rate": 2.017031723049432e-06, + "loss": 0.034, + "step": 539 + }, + { + "epoch": 2.1995926680244398, + "grad_norm": 0.673612549921327, + "learning_rate": 1.997993892502983e-06, + "loss": 0.0456, + "step": 540 + }, + { + "epoch": 2.203665987780041, + "grad_norm": 0.5852681942169125, + "learning_rate": 1.979023874116895e-06, + "loss": 0.0472, + "step": 541 + }, + { + "epoch": 2.2077393075356415, + "grad_norm": 0.5437231347482441, + "learning_rate": 1.9601220964039324e-06, + "loss": 0.0417, + "step": 542 + }, + { + "epoch": 2.211812627291242, + "grad_norm": 0.5680092663795543, + "learning_rate": 1.9412889863353683e-06, + "loss": 0.0439, + "step": 543 + }, + { + "epoch": 2.2158859470468433, + "grad_norm": 0.5311280129610669, + "learning_rate": 1.9225249693313547e-06, + "loss": 0.0403, + "step": 544 + }, + { + "epoch": 2.219959266802444, + "grad_norm": 0.5412826132559947, + "learning_rate": 1.9038304692512943e-06, + "loss": 0.0447, + "step": 545 + }, + { + "epoch": 2.224032586558045, + "grad_norm": 0.5452801648176083, + "learning_rate": 1.8852059083842838e-06, + "loss": 0.043, + "step": 546 + }, + { + "epoch": 2.2281059063136457, + "grad_norm": 0.5596216040559803, + "learning_rate": 1.8666517074395607e-06, + "loss": 0.0503, + "step": 547 + }, + { + "epoch": 2.2321792260692463, + "grad_norm": 0.45337689088312033, + "learning_rate": 1.8481682855370098e-06, + "loss": 0.034, + "step": 548 + }, + { + "epoch": 2.2362525458248474, + "grad_norm": 0.5750769024644237, + "learning_rate": 1.829756060197692e-06, + "loss": 0.0405, + "step": 549 + }, + { + "epoch": 2.240325865580448, + "grad_norm": 0.6084440583738112, + "learning_rate": 1.8114154473344081e-06, + "loss": 0.0473, + "step": 550 + }, + { + "epoch": 2.2443991853360488, + "grad_norm": 0.4803812820375662, + "learning_rate": 1.7931468612423142e-06, + "loss": 0.0378, + "step": 551 + }, + { + "epoch": 2.24847250509165, + "grad_norm": 0.51784716618696, + "learning_rate": 1.7749507145895518e-06, + "loss": 0.0379, + "step": 552 + }, + { + "epoch": 2.2525458248472505, + "grad_norm": 0.7122683699927368, + "learning_rate": 1.756827418407936e-06, + "loss": 0.0461, + "step": 553 + }, + { + "epoch": 2.256619144602851, + "grad_norm": 0.6243772081300375, + "learning_rate": 1.7387773820836668e-06, + "loss": 0.0405, + "step": 554 + }, + { + "epoch": 2.2606924643584523, + "grad_norm": 0.6228491364800817, + "learning_rate": 1.7208010133480751e-06, + "loss": 0.0398, + "step": 555 + }, + { + "epoch": 2.264765784114053, + "grad_norm": 0.5651946448346165, + "learning_rate": 1.7028987182684248e-06, + "loss": 0.0445, + "step": 556 + }, + { + "epoch": 2.2688391038696536, + "grad_norm": 0.503547616132502, + "learning_rate": 1.6850709012387328e-06, + "loss": 0.0407, + "step": 557 + }, + { + "epoch": 2.2729124236252547, + "grad_norm": 0.5555445714555036, + "learning_rate": 1.6673179649706312e-06, + "loss": 0.04, + "step": 558 + }, + { + "epoch": 2.2769857433808554, + "grad_norm": 0.5874178542752004, + "learning_rate": 1.64964031048428e-06, + "loss": 0.0399, + "step": 559 + }, + { + "epoch": 2.281059063136456, + "grad_norm": 0.5336372195671713, + "learning_rate": 1.632038337099297e-06, + "loss": 0.042, + "step": 560 + }, + { + "epoch": 2.285132382892057, + "grad_norm": 0.5383594089044018, + "learning_rate": 1.6145124424257497e-06, + "loss": 0.0396, + "step": 561 + }, + { + "epoch": 2.2892057026476578, + "grad_norm": 0.5590885541277442, + "learning_rate": 1.5970630223551614e-06, + "loss": 0.0411, + "step": 562 + }, + { + "epoch": 2.293279022403259, + "grad_norm": 0.5009898528517284, + "learning_rate": 1.5796904710515792e-06, + "loss": 0.0371, + "step": 563 + }, + { + "epoch": 2.2973523421588595, + "grad_norm": 0.548643352778303, + "learning_rate": 1.5623951809426663e-06, + "loss": 0.0443, + "step": 564 + }, + { + "epoch": 2.30142566191446, + "grad_norm": 0.6669365745322423, + "learning_rate": 1.5451775427108302e-06, + "loss": 0.0377, + "step": 565 + }, + { + "epoch": 2.3054989816700613, + "grad_norm": 0.5016854380779077, + "learning_rate": 1.5280379452844124e-06, + "loss": 0.0392, + "step": 566 + }, + { + "epoch": 2.309572301425662, + "grad_norm": 0.5047140084164453, + "learning_rate": 1.510976775828887e-06, + "loss": 0.0371, + "step": 567 + }, + { + "epoch": 2.3136456211812626, + "grad_norm": 0.579908909538212, + "learning_rate": 1.493994419738129e-06, + "loss": 0.0414, + "step": 568 + }, + { + "epoch": 2.3177189409368637, + "grad_norm": 0.5387115270421546, + "learning_rate": 1.4770912606257003e-06, + "loss": 0.0481, + "step": 569 + }, + { + "epoch": 2.3217922606924644, + "grad_norm": 0.7385149697772013, + "learning_rate": 1.4602676803161842e-06, + "loss": 0.045, + "step": 570 + }, + { + "epoch": 2.325865580448065, + "grad_norm": 0.5947860407063731, + "learning_rate": 1.4435240588365645e-06, + "loss": 0.0382, + "step": 571 + }, + { + "epoch": 2.329938900203666, + "grad_norm": 0.5512237269241814, + "learning_rate": 1.4268607744076419e-06, + "loss": 0.0388, + "step": 572 + }, + { + "epoch": 2.3340122199592668, + "grad_norm": 0.5946964883265247, + "learning_rate": 1.41027820343548e-06, + "loss": 0.0453, + "step": 573 + }, + { + "epoch": 2.3380855397148674, + "grad_norm": 0.6664453909137862, + "learning_rate": 1.3937767205029196e-06, + "loss": 0.0506, + "step": 574 + }, + { + "epoch": 2.3421588594704685, + "grad_norm": 0.5836986263456211, + "learning_rate": 1.3773566983610992e-06, + "loss": 0.048, + "step": 575 + }, + { + "epoch": 2.346232179226069, + "grad_norm": 0.6465467628151139, + "learning_rate": 1.3610185079210514e-06, + "loss": 0.0372, + "step": 576 + }, + { + "epoch": 2.35030549898167, + "grad_norm": 0.6252291597352545, + "learning_rate": 1.34476251824531e-06, + "loss": 0.0418, + "step": 577 + }, + { + "epoch": 2.354378818737271, + "grad_norm": 0.6247058605044977, + "learning_rate": 1.3285890965395853e-06, + "loss": 0.0448, + "step": 578 + }, + { + "epoch": 2.3584521384928716, + "grad_norm": 0.5412768116067983, + "learning_rate": 1.3124986081444625e-06, + "loss": 0.0459, + "step": 579 + }, + { + "epoch": 2.3625254582484727, + "grad_norm": 0.5791251838362595, + "learning_rate": 1.296491416527147e-06, + "loss": 0.0424, + "step": 580 + }, + { + "epoch": 2.3665987780040734, + "grad_norm": 0.593745342304442, + "learning_rate": 1.2805678832732627e-06, + "loss": 0.0522, + "step": 581 + }, + { + "epoch": 2.370672097759674, + "grad_norm": 0.8494211302879079, + "learning_rate": 1.264728368078678e-06, + "loss": 0.0403, + "step": 582 + }, + { + "epoch": 2.374745417515275, + "grad_norm": 0.5337776817823944, + "learning_rate": 1.248973228741378e-06, + "loss": 0.0435, + "step": 583 + }, + { + "epoch": 2.378818737270876, + "grad_norm": 0.585415744482421, + "learning_rate": 1.2333028211533916e-06, + "loss": 0.0399, + "step": 584 + }, + { + "epoch": 2.3828920570264764, + "grad_norm": 0.6610414798056414, + "learning_rate": 1.21771749929274e-06, + "loss": 0.0457, + "step": 585 + }, + { + "epoch": 2.3869653767820775, + "grad_norm": 0.542889748912829, + "learning_rate": 1.2022176152154525e-06, + "loss": 0.0422, + "step": 586 + }, + { + "epoch": 2.391038696537678, + "grad_norm": 0.5789981235953507, + "learning_rate": 1.1868035190476085e-06, + "loss": 0.0408, + "step": 587 + }, + { + "epoch": 2.395112016293279, + "grad_norm": 0.5350956934163298, + "learning_rate": 1.1714755589774252e-06, + "loss": 0.0374, + "step": 588 + }, + { + "epoch": 2.39918533604888, + "grad_norm": 0.49474503763151906, + "learning_rate": 1.1562340812474004e-06, + "loss": 0.0331, + "step": 589 + }, + { + "epoch": 2.4032586558044806, + "grad_norm": 0.49516175923003436, + "learning_rate": 1.1410794301464817e-06, + "loss": 0.0381, + "step": 590 + }, + { + "epoch": 2.4073319755600817, + "grad_norm": 0.512692253376879, + "learning_rate": 1.1260119480023008e-06, + "loss": 0.0338, + "step": 591 + }, + { + "epoch": 2.4114052953156824, + "grad_norm": 0.5273075654992884, + "learning_rate": 1.1110319751734271e-06, + "loss": 0.0382, + "step": 592 + }, + { + "epoch": 2.415478615071283, + "grad_norm": 0.5525265566643786, + "learning_rate": 1.0961398500416926e-06, + "loss": 0.0388, + "step": 593 + }, + { + "epoch": 2.4195519348268837, + "grad_norm": 0.5075084748842459, + "learning_rate": 1.0813359090045412e-06, + "loss": 0.039, + "step": 594 + }, + { + "epoch": 2.423625254582485, + "grad_norm": 0.5542848034343792, + "learning_rate": 1.0666204864674263e-06, + "loss": 0.0353, + "step": 595 + }, + { + "epoch": 2.4276985743380854, + "grad_norm": 0.5987619949589811, + "learning_rate": 1.0519939148362667e-06, + "loss": 0.0439, + "step": 596 + }, + { + "epoch": 2.4317718940936865, + "grad_norm": 0.5077117076298409, + "learning_rate": 1.0374565245099328e-06, + "loss": 0.0365, + "step": 597 + }, + { + "epoch": 2.435845213849287, + "grad_norm": 0.5983086948318251, + "learning_rate": 1.0230086438727771e-06, + "loss": 0.0426, + "step": 598 + }, + { + "epoch": 2.439918533604888, + "grad_norm": 0.531303529426792, + "learning_rate": 1.0086505992872304e-06, + "loss": 0.0425, + "step": 599 + }, + { + "epoch": 2.443991853360489, + "grad_norm": 0.5287789149426023, + "learning_rate": 9.943827150864143e-07, + "loss": 0.0387, + "step": 600 + }, + { + "epoch": 2.4480651731160896, + "grad_norm": 0.5413705197589561, + "learning_rate": 9.80205313566827e-07, + "loss": 0.0405, + "step": 601 + }, + { + "epoch": 2.4521384928716903, + "grad_norm": 0.5254335377173296, + "learning_rate": 9.66118714981058e-07, + "loss": 0.0446, + "step": 602 + }, + { + "epoch": 2.4562118126272914, + "grad_norm": 0.4973949287110427, + "learning_rate": 9.521232375305494e-07, + "loss": 0.0399, + "step": 603 + }, + { + "epoch": 2.460285132382892, + "grad_norm": 0.5774952931364324, + "learning_rate": 9.382191973584193e-07, + "loss": 0.03, + "step": 604 + }, + { + "epoch": 2.4643584521384927, + "grad_norm": 0.6662418655190413, + "learning_rate": 9.244069085423074e-07, + "loss": 0.0498, + "step": 605 + }, + { + "epoch": 2.468431771894094, + "grad_norm": 0.5094732488489113, + "learning_rate": 9.106866830872929e-07, + "loss": 0.0339, + "step": 606 + }, + { + "epoch": 2.4725050916496945, + "grad_norm": 0.5411020588710564, + "learning_rate": 8.970588309188343e-07, + "loss": 0.043, + "step": 607 + }, + { + "epoch": 2.4765784114052956, + "grad_norm": 0.7698681919411557, + "learning_rate": 8.835236598757796e-07, + "loss": 0.0428, + "step": 608 + }, + { + "epoch": 2.480651731160896, + "grad_norm": 0.46481272931573697, + "learning_rate": 8.70081475703406e-07, + "loss": 0.0347, + "step": 609 + }, + { + "epoch": 2.484725050916497, + "grad_norm": 0.8419803303836032, + "learning_rate": 8.567325820465156e-07, + "loss": 0.0495, + "step": 610 + }, + { + "epoch": 2.4887983706720975, + "grad_norm": 0.5599968776361592, + "learning_rate": 8.434772804425734e-07, + "loss": 0.0408, + "step": 611 + }, + { + "epoch": 2.4928716904276986, + "grad_norm": 0.5857539950345456, + "learning_rate": 8.303158703149023e-07, + "loss": 0.0411, + "step": 612 + }, + { + "epoch": 2.4969450101832993, + "grad_norm": 0.5261597215598826, + "learning_rate": 8.172486489659115e-07, + "loss": 0.0393, + "step": 613 + }, + { + "epoch": 2.5010183299389004, + "grad_norm": 0.5676439893059966, + "learning_rate": 8.042759115703891e-07, + "loss": 0.0412, + "step": 614 + }, + { + "epoch": 2.505091649694501, + "grad_norm": 0.6917881182955395, + "learning_rate": 7.913979511688252e-07, + "loss": 0.0409, + "step": 615 + }, + { + "epoch": 2.5091649694501017, + "grad_norm": 0.6445730147266382, + "learning_rate": 7.78615058660801e-07, + "loss": 0.0477, + "step": 616 + }, + { + "epoch": 2.513238289205703, + "grad_norm": 0.51653497568893, + "learning_rate": 7.659275227984142e-07, + "loss": 0.0379, + "step": 617 + }, + { + "epoch": 2.5173116089613035, + "grad_norm": 0.49200602033604846, + "learning_rate": 7.533356301797523e-07, + "loss": 0.0398, + "step": 618 + }, + { + "epoch": 2.521384928716904, + "grad_norm": 0.5668774352962834, + "learning_rate": 7.408396652424271e-07, + "loss": 0.0345, + "step": 619 + }, + { + "epoch": 2.525458248472505, + "grad_norm": 0.6916666283952536, + "learning_rate": 7.28439910257141e-07, + "loss": 0.053, + "step": 620 + }, + { + "epoch": 2.529531568228106, + "grad_norm": 0.5926967728678049, + "learning_rate": 7.161366453213181e-07, + "loss": 0.0382, + "step": 621 + }, + { + "epoch": 2.5336048879837065, + "grad_norm": 0.5473156833255498, + "learning_rate": 7.03930148352771e-07, + "loss": 0.041, + "step": 622 + }, + { + "epoch": 2.5376782077393076, + "grad_norm": 0.5346824810255936, + "learning_rate": 6.918206950834283e-07, + "loss": 0.0433, + "step": 623 + }, + { + "epoch": 2.5417515274949083, + "grad_norm": 0.5930343664045405, + "learning_rate": 6.798085590531012e-07, + "loss": 0.0403, + "step": 624 + }, + { + "epoch": 2.5458248472505094, + "grad_norm": 0.6064677518833592, + "learning_rate": 6.678940116033095e-07, + "loss": 0.0307, + "step": 625 + }, + { + "epoch": 2.54989816700611, + "grad_norm": 0.5224836625520722, + "learning_rate": 6.560773218711458e-07, + "loss": 0.0387, + "step": 626 + }, + { + "epoch": 2.5539714867617107, + "grad_norm": 0.59765107128643, + "learning_rate": 6.443587567832044e-07, + "loss": 0.0364, + "step": 627 + }, + { + "epoch": 2.5580448065173114, + "grad_norm": 0.6354038537233366, + "learning_rate": 6.327385810495423e-07, + "loss": 0.0396, + "step": 628 + }, + { + "epoch": 2.5621181262729125, + "grad_norm": 0.5543781150256897, + "learning_rate": 6.212170571577087e-07, + "loss": 0.0433, + "step": 629 + }, + { + "epoch": 2.566191446028513, + "grad_norm": 0.5478340831952899, + "learning_rate": 6.097944453668081e-07, + "loss": 0.0398, + "step": 630 + }, + { + "epoch": 2.5702647657841142, + "grad_norm": 0.4730479933130945, + "learning_rate": 5.984710037016267e-07, + "loss": 0.036, + "step": 631 + }, + { + "epoch": 2.574338085539715, + "grad_norm": 0.6518340293036718, + "learning_rate": 5.872469879468024e-07, + "loss": 0.051, + "step": 632 + }, + { + "epoch": 2.5784114052953155, + "grad_norm": 0.525283427975801, + "learning_rate": 5.761226516410434e-07, + "loss": 0.0336, + "step": 633 + }, + { + "epoch": 2.5824847250509166, + "grad_norm": 0.6611576813326997, + "learning_rate": 5.650982460714083e-07, + "loss": 0.0424, + "step": 634 + }, + { + "epoch": 2.5865580448065173, + "grad_norm": 0.6300996132757367, + "learning_rate": 5.54174020267621e-07, + "loss": 0.0416, + "step": 635 + }, + { + "epoch": 2.5906313645621184, + "grad_norm": 0.48987769259317776, + "learning_rate": 5.433502209964531e-07, + "loss": 0.0386, + "step": 636 + }, + { + "epoch": 2.594704684317719, + "grad_norm": 0.550269517656816, + "learning_rate": 5.326270927561444e-07, + "loss": 0.0433, + "step": 637 + }, + { + "epoch": 2.5987780040733197, + "grad_norm": 0.5416789018992982, + "learning_rate": 5.22004877770883e-07, + "loss": 0.0406, + "step": 638 + }, + { + "epoch": 2.6028513238289204, + "grad_norm": 0.5636474353072044, + "learning_rate": 5.114838159853336e-07, + "loss": 0.0413, + "step": 639 + }, + { + "epoch": 2.6069246435845215, + "grad_norm": 0.5254864379504877, + "learning_rate": 5.010641450592158e-07, + "loss": 0.0424, + "step": 640 + }, + { + "epoch": 2.610997963340122, + "grad_norm": 0.6691497595163663, + "learning_rate": 4.907461003619346e-07, + "loss": 0.0457, + "step": 641 + }, + { + "epoch": 2.6150712830957232, + "grad_norm": 0.6033224692953435, + "learning_rate": 4.805299149672682e-07, + "loss": 0.0409, + "step": 642 + }, + { + "epoch": 2.619144602851324, + "grad_norm": 0.5548154773351458, + "learning_rate": 4.7041581964809733e-07, + "loss": 0.0375, + "step": 643 + }, + { + "epoch": 2.6232179226069245, + "grad_norm": 0.5302610063387178, + "learning_rate": 4.6040404287119924e-07, + "loss": 0.0425, + "step": 644 + }, + { + "epoch": 2.627291242362525, + "grad_norm": 0.5475760159123029, + "learning_rate": 4.504948107920781e-07, + "loss": 0.037, + "step": 645 + }, + { + "epoch": 2.6313645621181263, + "grad_norm": 0.5384431678073536, + "learning_rate": 4.4068834724986466e-07, + "loss": 0.0404, + "step": 646 + }, + { + "epoch": 2.635437881873727, + "grad_norm": 0.5960889282149573, + "learning_rate": 4.309848737622568e-07, + "loss": 0.0405, + "step": 647 + }, + { + "epoch": 2.639511201629328, + "grad_norm": 0.5392965352716715, + "learning_rate": 4.213846095205126e-07, + "loss": 0.037, + "step": 648 + }, + { + "epoch": 2.6435845213849287, + "grad_norm": 0.8274526363011494, + "learning_rate": 4.1188777138450487e-07, + "loss": 0.0635, + "step": 649 + }, + { + "epoch": 2.6476578411405294, + "grad_norm": 0.5633117328220467, + "learning_rate": 4.024945738778163e-07, + "loss": 0.045, + "step": 650 + }, + { + "epoch": 2.6517311608961305, + "grad_norm": 0.5325073134146674, + "learning_rate": 3.9320522918289973e-07, + "loss": 0.0427, + "step": 651 + }, + { + "epoch": 2.655804480651731, + "grad_norm": 0.5506079099112636, + "learning_rate": 3.8401994713628044e-07, + "loss": 0.0444, + "step": 652 + }, + { + "epoch": 2.6598778004073322, + "grad_norm": 0.5182068970747546, + "learning_rate": 3.7493893522381866e-07, + "loss": 0.0368, + "step": 653 + }, + { + "epoch": 2.663951120162933, + "grad_norm": 0.5922018474747655, + "learning_rate": 3.6596239857602136e-07, + "loss": 0.0448, + "step": 654 + }, + { + "epoch": 2.6680244399185336, + "grad_norm": 0.6568887443509075, + "learning_rate": 3.570905399634111e-07, + "loss": 0.0381, + "step": 655 + }, + { + "epoch": 2.672097759674134, + "grad_norm": 0.5388479916650084, + "learning_rate": 3.483235597919404e-07, + "loss": 0.0377, + "step": 656 + }, + { + "epoch": 2.6761710794297353, + "grad_norm": 0.5081067207399704, + "learning_rate": 3.396616560984711e-07, + "loss": 0.0351, + "step": 657 + }, + { + "epoch": 2.680244399185336, + "grad_norm": 0.48450121917494543, + "learning_rate": 3.31105024546296e-07, + "loss": 0.036, + "step": 658 + }, + { + "epoch": 2.684317718940937, + "grad_norm": 0.647050948858888, + "learning_rate": 3.226538584207228e-07, + "loss": 0.0459, + "step": 659 + }, + { + "epoch": 2.6883910386965377, + "grad_norm": 0.6890279836440739, + "learning_rate": 3.1430834862470395e-07, + "loss": 0.0554, + "step": 660 + }, + { + "epoch": 2.6924643584521384, + "grad_norm": 1.8140245687384717, + "learning_rate": 3.0606868367452746e-07, + "loss": 0.0561, + "step": 661 + }, + { + "epoch": 2.696537678207739, + "grad_norm": 0.512535359667785, + "learning_rate": 2.9793504969555965e-07, + "loss": 0.0433, + "step": 662 + }, + { + "epoch": 2.70061099796334, + "grad_norm": 0.5146434179799297, + "learning_rate": 2.899076304180348e-07, + "loss": 0.0393, + "step": 663 + }, + { + "epoch": 2.704684317718941, + "grad_norm": 0.7613608910074268, + "learning_rate": 2.819866071729127e-07, + "loss": 0.0571, + "step": 664 + }, + { + "epoch": 2.708757637474542, + "grad_norm": 0.5788433559178309, + "learning_rate": 2.7417215888777493e-07, + "loss": 0.0367, + "step": 665 + }, + { + "epoch": 2.7128309572301426, + "grad_norm": 0.6619235135428994, + "learning_rate": 2.6646446208279054e-07, + "loss": 0.0373, + "step": 666 + }, + { + "epoch": 2.716904276985743, + "grad_norm": 0.5169219759499485, + "learning_rate": 2.5886369086672193e-07, + "loss": 0.0423, + "step": 667 + }, + { + "epoch": 2.7209775967413443, + "grad_norm": 0.5379720171061586, + "learning_rate": 2.513700169329963e-07, + "loss": 0.0365, + "step": 668 + }, + { + "epoch": 2.725050916496945, + "grad_norm": 0.6756425751790065, + "learning_rate": 2.439836095558262e-07, + "loss": 0.045, + "step": 669 + }, + { + "epoch": 2.729124236252546, + "grad_norm": 0.48551183048653324, + "learning_rate": 2.3670463558638556e-07, + "loss": 0.0369, + "step": 670 + }, + { + "epoch": 2.7331975560081467, + "grad_norm": 0.5897842837912828, + "learning_rate": 2.2953325944903848e-07, + "loss": 0.0368, + "step": 671 + }, + { + "epoch": 2.7372708757637474, + "grad_norm": 0.5831457921590835, + "learning_rate": 2.2246964313763053e-07, + "loss": 0.0406, + "step": 672 + }, + { + "epoch": 2.741344195519348, + "grad_norm": 0.6668669980963122, + "learning_rate": 2.1551394621182277e-07, + "loss": 0.0413, + "step": 673 + }, + { + "epoch": 2.745417515274949, + "grad_norm": 0.5505672609048925, + "learning_rate": 2.08666325793494e-07, + "loss": 0.0414, + "step": 674 + }, + { + "epoch": 2.74949083503055, + "grad_norm": 0.550600720968359, + "learning_rate": 2.0192693656318597e-07, + "loss": 0.0416, + "step": 675 + }, + { + "epoch": 2.753564154786151, + "grad_norm": 0.5310931391199994, + "learning_rate": 1.9529593075661267e-07, + "loss": 0.0412, + "step": 676 + }, + { + "epoch": 2.7576374745417516, + "grad_norm": 0.5363558016582138, + "learning_rate": 1.8877345816122162e-07, + "loss": 0.038, + "step": 677 + }, + { + "epoch": 2.7617107942973522, + "grad_norm": 0.5178984255141141, + "learning_rate": 1.8235966611280687e-07, + "loss": 0.039, + "step": 678 + }, + { + "epoch": 2.765784114052953, + "grad_norm": 0.5466402434707056, + "learning_rate": 1.760546994921858e-07, + "loss": 0.0389, + "step": 679 + }, + { + "epoch": 2.769857433808554, + "grad_norm": 0.6121457657463409, + "learning_rate": 1.6985870072192156e-07, + "loss": 0.0452, + "step": 680 + }, + { + "epoch": 2.7739307535641546, + "grad_norm": 0.5349893671952921, + "learning_rate": 1.6377180976310968e-07, + "loss": 0.0419, + "step": 681 + }, + { + "epoch": 2.7780040733197557, + "grad_norm": 0.5604090248089841, + "learning_rate": 1.5779416411221437e-07, + "loss": 0.0384, + "step": 682 + }, + { + "epoch": 2.7820773930753564, + "grad_norm": 0.564686751441713, + "learning_rate": 1.5192589879796383e-07, + "loss": 0.0356, + "step": 683 + }, + { + "epoch": 2.786150712830957, + "grad_norm": 0.5005018586429554, + "learning_rate": 1.4616714637829822e-07, + "loss": 0.0389, + "step": 684 + }, + { + "epoch": 2.790224032586558, + "grad_norm": 0.6015625122582622, + "learning_rate": 1.4051803693737876e-07, + "loss": 0.0403, + "step": 685 + }, + { + "epoch": 2.794297352342159, + "grad_norm": 1.2111120323286364, + "learning_rate": 1.3497869808264453e-07, + "loss": 0.0626, + "step": 686 + }, + { + "epoch": 2.79837067209776, + "grad_norm": 0.5396431590816398, + "learning_rate": 1.2954925494193472e-07, + "loss": 0.0395, + "step": 687 + }, + { + "epoch": 2.8024439918533606, + "grad_norm": 0.4817069481887003, + "learning_rate": 1.2422983016065816e-07, + "loss": 0.0371, + "step": 688 + }, + { + "epoch": 2.8065173116089612, + "grad_norm": 1.2000698848409261, + "learning_rate": 1.1902054389902662e-07, + "loss": 0.0606, + "step": 689 + }, + { + "epoch": 2.810590631364562, + "grad_norm": 0.5941877180307404, + "learning_rate": 1.1392151382933647e-07, + "loss": 0.0368, + "step": 690 + }, + { + "epoch": 2.814663951120163, + "grad_norm": 0.6266160796772987, + "learning_rate": 1.0893285513331353e-07, + "loss": 0.0447, + "step": 691 + }, + { + "epoch": 2.8187372708757636, + "grad_norm": 0.6046216067369392, + "learning_rate": 1.0405468049951184e-07, + "loss": 0.0392, + "step": 692 + }, + { + "epoch": 2.8228105906313647, + "grad_norm": 0.5575790775752935, + "learning_rate": 9.928710012076404e-08, + "loss": 0.0451, + "step": 693 + }, + { + "epoch": 2.8268839103869654, + "grad_norm": 0.5473539803468005, + "learning_rate": 9.463022169169666e-08, + "loss": 0.0399, + "step": 694 + }, + { + "epoch": 2.830957230142566, + "grad_norm": 0.6009599586325383, + "learning_rate": 9.008415040629548e-08, + "loss": 0.0353, + "step": 695 + }, + { + "epoch": 2.835030549898167, + "grad_norm": 0.6489298732282478, + "learning_rate": 8.564898895552843e-08, + "loss": 0.0464, + "step": 696 + }, + { + "epoch": 2.839103869653768, + "grad_norm": 0.5963225875320323, + "learning_rate": 8.132483752502806e-08, + "loss": 0.0309, + "step": 697 + }, + { + "epoch": 2.8431771894093685, + "grad_norm": 0.5218917037293123, + "learning_rate": 7.711179379282674e-08, + "loss": 0.0427, + "step": 698 + }, + { + "epoch": 2.8472505091649696, + "grad_norm": 0.6978736014217868, + "learning_rate": 7.300995292715107e-08, + "loss": 0.0503, + "step": 699 + }, + { + "epoch": 2.8513238289205702, + "grad_norm": 0.9455337732227677, + "learning_rate": 6.901940758427206e-08, + "loss": 0.0496, + "step": 700 + }, + { + "epoch": 2.855397148676171, + "grad_norm": 0.5306662874239818, + "learning_rate": 6.514024790641116e-08, + "loss": 0.0324, + "step": 701 + }, + { + "epoch": 2.859470468431772, + "grad_norm": 0.5226514754428617, + "learning_rate": 6.137256151970583e-08, + "loss": 0.0358, + "step": 702 + }, + { + "epoch": 2.8635437881873727, + "grad_norm": 0.5460041457771847, + "learning_rate": 5.771643353222778e-08, + "loss": 0.0399, + "step": 703 + }, + { + "epoch": 2.8676171079429738, + "grad_norm": 0.5162398669868244, + "learning_rate": 5.417194653206337e-08, + "loss": 0.0396, + "step": 704 + }, + { + "epoch": 2.8716904276985744, + "grad_norm": 0.5368972754214257, + "learning_rate": 5.073918058544458e-08, + "loss": 0.0406, + "step": 705 + }, + { + "epoch": 2.875763747454175, + "grad_norm": 0.588105419732977, + "learning_rate": 4.741821323494489e-08, + "loss": 0.0371, + "step": 706 + }, + { + "epoch": 2.8798370672097757, + "grad_norm": 0.4500501700097903, + "learning_rate": 4.4209119497722883e-08, + "loss": 0.0347, + "step": 707 + }, + { + "epoch": 2.883910386965377, + "grad_norm": 0.5696365953710792, + "learning_rate": 4.1111971863830866e-08, + "loss": 0.0373, + "step": 708 + }, + { + "epoch": 2.8879837067209775, + "grad_norm": 0.5792607166669516, + "learning_rate": 3.812684029457614e-08, + "loss": 0.0385, + "step": 709 + }, + { + "epoch": 2.8920570264765786, + "grad_norm": 0.5055106261358345, + "learning_rate": 3.525379222094061e-08, + "loss": 0.0368, + "step": 710 + }, + { + "epoch": 2.8961303462321792, + "grad_norm": 0.541427455199586, + "learning_rate": 3.249289254205867e-08, + "loss": 0.0387, + "step": 711 + }, + { + "epoch": 2.90020366598778, + "grad_norm": 0.5454327791955479, + "learning_rate": 2.984420362375007e-08, + "loss": 0.0374, + "step": 712 + }, + { + "epoch": 2.904276985743381, + "grad_norm": 0.5647837269627647, + "learning_rate": 2.7307785297111533e-08, + "loss": 0.0361, + "step": 713 + }, + { + "epoch": 2.9083503054989817, + "grad_norm": 0.4987542173953722, + "learning_rate": 2.488369485716513e-08, + "loss": 0.0396, + "step": 714 + }, + { + "epoch": 2.9124236252545828, + "grad_norm": 0.5027481564306886, + "learning_rate": 2.2571987061564827e-08, + "loss": 0.0349, + "step": 715 + }, + { + "epoch": 2.9164969450101834, + "grad_norm": 0.5256678599998655, + "learning_rate": 2.0372714129356375e-08, + "loss": 0.0405, + "step": 716 + }, + { + "epoch": 2.920570264765784, + "grad_norm": 0.49502069413096106, + "learning_rate": 1.8285925739803812e-08, + "loss": 0.036, + "step": 717 + }, + { + "epoch": 2.9246435845213847, + "grad_norm": 0.5467110241512876, + "learning_rate": 1.631166903126147e-08, + "loss": 0.0434, + "step": 718 + }, + { + "epoch": 2.928716904276986, + "grad_norm": 0.7022157113889199, + "learning_rate": 1.4449988600111486e-08, + "loss": 0.0495, + "step": 719 + }, + { + "epoch": 2.9327902240325865, + "grad_norm": 0.5499848006034249, + "learning_rate": 1.2700926499756295e-08, + "loss": 0.0405, + "step": 720 + }, + { + "epoch": 2.9368635437881876, + "grad_norm": 0.5059098241990838, + "learning_rate": 1.1064522239669916e-08, + "loss": 0.0339, + "step": 721 + }, + { + "epoch": 2.9409368635437882, + "grad_norm": 1.192165546566186, + "learning_rate": 9.54081278450314e-09, + "loss": 0.0497, + "step": 722 + }, + { + "epoch": 2.945010183299389, + "grad_norm": 0.5876747884158724, + "learning_rate": 8.129832553249173e-09, + "loss": 0.0393, + "step": 723 + }, + { + "epoch": 2.9490835030549896, + "grad_norm": 0.564386085643717, + "learning_rate": 6.831613418468163e-09, + "loss": 0.0445, + "step": 724 + }, + { + "epoch": 2.9531568228105907, + "grad_norm": 0.5352000628276116, + "learning_rate": 5.646184705563884e-09, + "loss": 0.0325, + "step": 725 + }, + { + "epoch": 2.9572301425661913, + "grad_norm": 0.5408383588017381, + "learning_rate": 4.573573192125369e-09, + "loss": 0.0356, + "step": 726 + }, + { + "epoch": 2.9613034623217924, + "grad_norm": 0.5942181511771125, + "learning_rate": 3.613803107317959e-09, + "loss": 0.0463, + "step": 727 + }, + { + "epoch": 2.965376782077393, + "grad_norm": 0.5365629409093408, + "learning_rate": 2.7668961313376263e-09, + "loss": 0.0377, + "step": 728 + }, + { + "epoch": 2.9694501018329937, + "grad_norm": 0.5977649244620887, + "learning_rate": 2.0328713949230304e-09, + "loss": 0.0466, + "step": 729 + }, + { + "epoch": 2.973523421588595, + "grad_norm": 0.5879417626438596, + "learning_rate": 1.4117454789208673e-09, + "loss": 0.0427, + "step": 730 + }, + { + "epoch": 2.9775967413441955, + "grad_norm": 0.5682231856966399, + "learning_rate": 9.03532413911723e-10, + "loss": 0.0424, + "step": 731 + }, + { + "epoch": 2.9816700610997966, + "grad_norm": 0.48337158082961007, + "learning_rate": 5.08243679894771e-10, + "loss": 0.0305, + "step": 732 + }, + { + "epoch": 2.9857433808553973, + "grad_norm": 0.5700756040994207, + "learning_rate": 2.2588820602631457e-10, + "loss": 0.047, + "step": 733 + }, + { + "epoch": 2.989816700610998, + "grad_norm": 0.6339798377809833, + "learning_rate": 5.6472370419391464e-11, + "loss": 0.045, + "step": 734 + }, + { + "epoch": 2.9938900203665986, + "grad_norm": 0.5348219812753423, + "learning_rate": 0.0, + "loss": 0.044, + "step": 735 + }, + { + "epoch": 2.9938900203665986, + "step": 735, + "total_flos": 48064094208000.0, + "train_loss": 0.08951896556025865, + "train_runtime": 6057.9419, + "train_samples_per_second": 1.945, + "train_steps_per_second": 0.121 + } + ], + "logging_steps": 1, + "max_steps": 735, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 250, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 48064094208000.0, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +} diff --git a/training_args.bin b/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..d80d80f91915a307660dad4b2c4bb70a1f1629ab --- /dev/null +++ b/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:266201b4dbff74ad87f1a11f3b724a4866069747c79f60058f5aae5f6e7c094d +size 7416 diff --git a/training_loss.png b/training_loss.png new file mode 100644 index 0000000000000000000000000000000000000000..bcbc972cd25f51861923aa8f083507e0ab178fc7 Binary files /dev/null and b/training_loss.png differ