Add files using upload-large-folder tool
Browse files- .gitattributes +1 -0
- README.md +60 -0
- added_tokens.json +24 -0
- all_results.json +9 -0
- chat_template.jinja +7 -0
- config.json +135 -0
- generation_config.json +12 -0
- merges.txt +0 -0
- model-00001-of-00004.safetensors +3 -0
- model-00002-of-00004.safetensors +3 -0
- model-00003-of-00004.safetensors +3 -0
- model-00004-of-00004.safetensors +3 -0
- model.safetensors.index.json +737 -0
- preprocessor_config.json +37 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +209 -0
- train_results.json +9 -0
- trainer_log.jsonl +488 -0
- trainer_state.json +3452 -0
- training_args.bin +3 -0
- training_loss.png +0 -0
- video_preprocessor_config.json +43 -0
- vocab.json +0 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
library_name: transformers
|
| 3 |
+
license: other
|
| 4 |
+
base_model: Qwen/Qwen2.5-VL-7B-Instruct
|
| 5 |
+
tags:
|
| 6 |
+
- llama-factory
|
| 7 |
+
- full
|
| 8 |
+
- generated_from_trainer
|
| 9 |
+
model-index:
|
| 10 |
+
- name: qwen2_5vl_7b_easyr1_38k_hard_qwen7b_easy_gta1_4MP_stacked_pro_apps_no_resolution_in_prompt_lr_1_0e-06_bs16_4nodes_2epochs
|
| 11 |
+
results: []
|
| 12 |
+
---
|
| 13 |
+
|
| 14 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
| 15 |
+
should probably proofread and complete it, then remove this comment. -->
|
| 16 |
+
|
| 17 |
+
# qwen2_5vl_7b_easyr1_38k_hard_qwen7b_easy_gta1_4MP_stacked_pro_apps_no_resolution_in_prompt_lr_1_0e-06_bs16_4nodes_2epochs
|
| 18 |
+
|
| 19 |
+
This model is a fine-tuned version of [/p/project1/synthlaion/awadalla1/models/Qwen2.5-VL-7B-Instruct](https://huggingface.co//p/project1/synthlaion/awadalla1/models/Qwen2.5-VL-7B-Instruct) on the easyr1-38k-hard-qwen7b-easy-gta1-4MP-stacked-pro-apps-no-resolution-in-prompt dataset.
|
| 20 |
+
|
| 21 |
+
## Model description
|
| 22 |
+
|
| 23 |
+
More information needed
|
| 24 |
+
|
| 25 |
+
## Intended uses & limitations
|
| 26 |
+
|
| 27 |
+
More information needed
|
| 28 |
+
|
| 29 |
+
## Training and evaluation data
|
| 30 |
+
|
| 31 |
+
More information needed
|
| 32 |
+
|
| 33 |
+
## Training procedure
|
| 34 |
+
|
| 35 |
+
### Training hyperparameters
|
| 36 |
+
|
| 37 |
+
The following hyperparameters were used during training:
|
| 38 |
+
- learning_rate: 1e-06
|
| 39 |
+
- train_batch_size: 1
|
| 40 |
+
- eval_batch_size: 8
|
| 41 |
+
- seed: 42
|
| 42 |
+
- distributed_type: multi-GPU
|
| 43 |
+
- num_devices: 16
|
| 44 |
+
- total_train_batch_size: 16
|
| 45 |
+
- total_eval_batch_size: 128
|
| 46 |
+
- optimizer: Use OptimizerNames.ADAMW_TORCH_FUSED with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
| 47 |
+
- lr_scheduler_type: cosine
|
| 48 |
+
- lr_scheduler_warmup_ratio: 0.1
|
| 49 |
+
- num_epochs: 2.0
|
| 50 |
+
|
| 51 |
+
### Training results
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
### Framework versions
|
| 56 |
+
|
| 57 |
+
- Transformers 4.55.0
|
| 58 |
+
- Pytorch 2.8.0+cu129
|
| 59 |
+
- Datasets 4.0.0
|
| 60 |
+
- Tokenizers 0.21.1
|
added_tokens.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</tool_call>": 151658,
|
| 3 |
+
"<tool_call>": 151657,
|
| 4 |
+
"<|box_end|>": 151649,
|
| 5 |
+
"<|box_start|>": 151648,
|
| 6 |
+
"<|endoftext|>": 151643,
|
| 7 |
+
"<|file_sep|>": 151664,
|
| 8 |
+
"<|fim_middle|>": 151660,
|
| 9 |
+
"<|fim_pad|>": 151662,
|
| 10 |
+
"<|fim_prefix|>": 151659,
|
| 11 |
+
"<|fim_suffix|>": 151661,
|
| 12 |
+
"<|im_end|>": 151645,
|
| 13 |
+
"<|im_start|>": 151644,
|
| 14 |
+
"<|image_pad|>": 151655,
|
| 15 |
+
"<|object_ref_end|>": 151647,
|
| 16 |
+
"<|object_ref_start|>": 151646,
|
| 17 |
+
"<|quad_end|>": 151651,
|
| 18 |
+
"<|quad_start|>": 151650,
|
| 19 |
+
"<|repo_name|>": 151663,
|
| 20 |
+
"<|video_pad|>": 151656,
|
| 21 |
+
"<|vision_end|>": 151653,
|
| 22 |
+
"<|vision_pad|>": 151654,
|
| 23 |
+
"<|vision_start|>": 151652
|
| 24 |
+
}
|
all_results.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"effective_tokens_per_sec": 237.99224040941883,
|
| 3 |
+
"epoch": 2.0,
|
| 4 |
+
"total_flos": 688709313101824.0,
|
| 5 |
+
"train_loss": 0.4963684876922902,
|
| 6 |
+
"train_runtime": 35426.998,
|
| 7 |
+
"train_samples_per_second": 2.201,
|
| 8 |
+
"train_steps_per_second": 0.138
|
| 9 |
+
}
|
chat_template.jinja
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system
|
| 2 |
+
You are a helpful assistant.<|im_end|>
|
| 3 |
+
{% endif %}<|im_start|>{{ message['role'] }}
|
| 4 |
+
{% if message['content'] is string %}{{ message['content'] }}<|im_end|>
|
| 5 |
+
{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>
|
| 6 |
+
{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant
|
| 7 |
+
{% endif %}
|
config.json
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"Qwen2_5_VLForConditionalGeneration"
|
| 4 |
+
],
|
| 5 |
+
"attention_dropout": 0.0,
|
| 6 |
+
"bos_token_id": 151643,
|
| 7 |
+
"eos_token_id": 151645,
|
| 8 |
+
"hidden_act": "silu",
|
| 9 |
+
"hidden_size": 3584,
|
| 10 |
+
"image_token_id": 151655,
|
| 11 |
+
"initializer_range": 0.02,
|
| 12 |
+
"intermediate_size": 18944,
|
| 13 |
+
"max_position_embeddings": 128000,
|
| 14 |
+
"max_window_layers": 28,
|
| 15 |
+
"model_type": "qwen2_5_vl",
|
| 16 |
+
"num_attention_heads": 28,
|
| 17 |
+
"num_hidden_layers": 28,
|
| 18 |
+
"num_key_value_heads": 4,
|
| 19 |
+
"rms_norm_eps": 1e-06,
|
| 20 |
+
"rope_scaling": {
|
| 21 |
+
"mrope_section": [
|
| 22 |
+
16,
|
| 23 |
+
24,
|
| 24 |
+
24
|
| 25 |
+
],
|
| 26 |
+
"rope_type": "default",
|
| 27 |
+
"type": "default"
|
| 28 |
+
},
|
| 29 |
+
"rope_theta": 1000000.0,
|
| 30 |
+
"sliding_window": 32768,
|
| 31 |
+
"text_config": {
|
| 32 |
+
"architectures": [
|
| 33 |
+
"Qwen2_5_VLForConditionalGeneration"
|
| 34 |
+
],
|
| 35 |
+
"attention_dropout": 0.0,
|
| 36 |
+
"bos_token_id": 151643,
|
| 37 |
+
"eos_token_id": 151645,
|
| 38 |
+
"hidden_act": "silu",
|
| 39 |
+
"hidden_size": 3584,
|
| 40 |
+
"image_token_id": null,
|
| 41 |
+
"initializer_range": 0.02,
|
| 42 |
+
"intermediate_size": 18944,
|
| 43 |
+
"layer_types": [
|
| 44 |
+
"full_attention",
|
| 45 |
+
"full_attention",
|
| 46 |
+
"full_attention",
|
| 47 |
+
"full_attention",
|
| 48 |
+
"full_attention",
|
| 49 |
+
"full_attention",
|
| 50 |
+
"full_attention",
|
| 51 |
+
"full_attention",
|
| 52 |
+
"full_attention",
|
| 53 |
+
"full_attention",
|
| 54 |
+
"full_attention",
|
| 55 |
+
"full_attention",
|
| 56 |
+
"full_attention",
|
| 57 |
+
"full_attention",
|
| 58 |
+
"full_attention",
|
| 59 |
+
"full_attention",
|
| 60 |
+
"full_attention",
|
| 61 |
+
"full_attention",
|
| 62 |
+
"full_attention",
|
| 63 |
+
"full_attention",
|
| 64 |
+
"full_attention",
|
| 65 |
+
"full_attention",
|
| 66 |
+
"full_attention",
|
| 67 |
+
"full_attention",
|
| 68 |
+
"full_attention",
|
| 69 |
+
"full_attention",
|
| 70 |
+
"full_attention",
|
| 71 |
+
"full_attention"
|
| 72 |
+
],
|
| 73 |
+
"max_position_embeddings": 128000,
|
| 74 |
+
"max_window_layers": 28,
|
| 75 |
+
"model_type": "qwen2_5_vl_text",
|
| 76 |
+
"num_attention_heads": 28,
|
| 77 |
+
"num_hidden_layers": 28,
|
| 78 |
+
"num_key_value_heads": 4,
|
| 79 |
+
"rms_norm_eps": 1e-06,
|
| 80 |
+
"rope_scaling": {
|
| 81 |
+
"mrope_section": [
|
| 82 |
+
16,
|
| 83 |
+
24,
|
| 84 |
+
24
|
| 85 |
+
],
|
| 86 |
+
"rope_type": "default",
|
| 87 |
+
"type": "default"
|
| 88 |
+
},
|
| 89 |
+
"rope_theta": 1000000.0,
|
| 90 |
+
"sliding_window": null,
|
| 91 |
+
"torch_dtype": "float32",
|
| 92 |
+
"use_cache": false,
|
| 93 |
+
"use_sliding_window": false,
|
| 94 |
+
"video_token_id": null,
|
| 95 |
+
"vision_end_token_id": 151653,
|
| 96 |
+
"vision_start_token_id": 151652,
|
| 97 |
+
"vision_token_id": 151654,
|
| 98 |
+
"vocab_size": 152064
|
| 99 |
+
},
|
| 100 |
+
"tie_word_embeddings": false,
|
| 101 |
+
"torch_dtype": "bfloat16",
|
| 102 |
+
"transformers_version": "4.55.0",
|
| 103 |
+
"use_cache": false,
|
| 104 |
+
"use_sliding_window": false,
|
| 105 |
+
"video_token_id": 151656,
|
| 106 |
+
"vision_config": {
|
| 107 |
+
"depth": 32,
|
| 108 |
+
"fullatt_block_indexes": [
|
| 109 |
+
7,
|
| 110 |
+
15,
|
| 111 |
+
23,
|
| 112 |
+
31
|
| 113 |
+
],
|
| 114 |
+
"hidden_act": "silu",
|
| 115 |
+
"hidden_size": 1280,
|
| 116 |
+
"in_channels": 3,
|
| 117 |
+
"in_chans": 3,
|
| 118 |
+
"initializer_range": 0.02,
|
| 119 |
+
"intermediate_size": 3420,
|
| 120 |
+
"model_type": "qwen2_5_vl",
|
| 121 |
+
"num_heads": 16,
|
| 122 |
+
"out_hidden_size": 3584,
|
| 123 |
+
"patch_size": 14,
|
| 124 |
+
"spatial_merge_size": 2,
|
| 125 |
+
"spatial_patch_size": 14,
|
| 126 |
+
"temporal_patch_size": 2,
|
| 127 |
+
"tokens_per_second": 2,
|
| 128 |
+
"torch_dtype": "float32",
|
| 129 |
+
"window_size": 112
|
| 130 |
+
},
|
| 131 |
+
"vision_end_token_id": 151653,
|
| 132 |
+
"vision_start_token_id": 151652,
|
| 133 |
+
"vision_token_id": 151654,
|
| 134 |
+
"vocab_size": 152064
|
| 135 |
+
}
|
generation_config.json
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token_id": 151643,
|
| 3 |
+
"do_sample": true,
|
| 4 |
+
"eos_token_id": [
|
| 5 |
+
151645,
|
| 6 |
+
151643
|
| 7 |
+
],
|
| 8 |
+
"pad_token_id": 151643,
|
| 9 |
+
"repetition_penalty": 1.05,
|
| 10 |
+
"temperature": 1e-06,
|
| 11 |
+
"transformers_version": "4.55.0"
|
| 12 |
+
}
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model-00001-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:057e4b41d1968fcfab77a01acbff1a877dba5eee0ecb5e7fc6f732d1dbc7bca6
|
| 3 |
+
size 4968243304
|
model-00002-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1986c9da65589ae790cfa22110559f72319bb2db8dbca43a1aca779fbf9e9c87
|
| 3 |
+
size 4991495816
|
model-00003-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6e6637d50a27aa9beae0910c22ea7d080809b7e5ba4d9c9a67aa1c4b2a1e1ff9
|
| 3 |
+
size 4932751040
|
model-00004-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:64818afd8354323fa698a69ae74083c425380ad58dc4b7e48702732b326e43c4
|
| 3 |
+
size 1691924384
|
model.safetensors.index.json
ADDED
|
@@ -0,0 +1,737 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_parameters": 848896,
|
| 4 |
+
"total_size": 16584333312
|
| 5 |
+
},
|
| 6 |
+
"weight_map": {
|
| 7 |
+
"lm_head.weight": "model-00004-of-00004.safetensors",
|
| 8 |
+
"model.embed_tokens.weight": "model-00001-of-00004.safetensors",
|
| 9 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 10 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 11 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 12 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 13 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 14 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 15 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 16 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 17 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 18 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 19 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 20 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 21 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 22 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 23 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 24 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 25 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 26 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 27 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 28 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 29 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 30 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 31 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 32 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 33 |
+
"model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 34 |
+
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 35 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 36 |
+
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 37 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 38 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 39 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 40 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 41 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 42 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 43 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 44 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 45 |
+
"model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 46 |
+
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 47 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 48 |
+
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 49 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 50 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 51 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 52 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 53 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 54 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 55 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 56 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 57 |
+
"model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 58 |
+
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 59 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 60 |
+
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 61 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 62 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 63 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 64 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 65 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 66 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 67 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 68 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 69 |
+
"model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 70 |
+
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 71 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 72 |
+
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 73 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 74 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 75 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 76 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 77 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 78 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 79 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 80 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 81 |
+
"model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 82 |
+
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 83 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 84 |
+
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 85 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 86 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 87 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 88 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 89 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 90 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 91 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 92 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 93 |
+
"model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 94 |
+
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 95 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 96 |
+
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 97 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 98 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 99 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 100 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 101 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 102 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 103 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 104 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 105 |
+
"model.layers.16.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 106 |
+
"model.layers.16.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 107 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 108 |
+
"model.layers.16.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 109 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 110 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 111 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 112 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 113 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 114 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 115 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 116 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 117 |
+
"model.layers.17.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 118 |
+
"model.layers.17.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 119 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 120 |
+
"model.layers.17.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 121 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 122 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 123 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 124 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 125 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 126 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 127 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 128 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 129 |
+
"model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 130 |
+
"model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 131 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 132 |
+
"model.layers.18.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 133 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 134 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 135 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 136 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 137 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 138 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 139 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 140 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 141 |
+
"model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 142 |
+
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 143 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 144 |
+
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 145 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 146 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 147 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 148 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 149 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 150 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 151 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 152 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 153 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 154 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 155 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 156 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 157 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 158 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 159 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 160 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 161 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 162 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 163 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 164 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 165 |
+
"model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 166 |
+
"model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 167 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 168 |
+
"model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 169 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 170 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 171 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 172 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 173 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 174 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 175 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 176 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 177 |
+
"model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 178 |
+
"model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 179 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 180 |
+
"model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 181 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 182 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 183 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 184 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 185 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 186 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 187 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 188 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 189 |
+
"model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 190 |
+
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 191 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 192 |
+
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 193 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 194 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 195 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 196 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 197 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 198 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 199 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 200 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 201 |
+
"model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 202 |
+
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 203 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 204 |
+
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 205 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 206 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 207 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 208 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 209 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 210 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 211 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 212 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 213 |
+
"model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 214 |
+
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 215 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 216 |
+
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 217 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 218 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 219 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 220 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 221 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 222 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 223 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 224 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 225 |
+
"model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 226 |
+
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 227 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 228 |
+
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 229 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 230 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 231 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 232 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 233 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 234 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 235 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 236 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 237 |
+
"model.layers.26.input_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 238 |
+
"model.layers.26.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
|
| 239 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 240 |
+
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 241 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 242 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 243 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 244 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 245 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 246 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 247 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 248 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 249 |
+
"model.layers.27.input_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 250 |
+
"model.layers.27.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
|
| 251 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
|
| 252 |
+
"model.layers.27.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
|
| 253 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 254 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
|
| 255 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
|
| 256 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
|
| 257 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
|
| 258 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
|
| 259 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
|
| 260 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
|
| 261 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 262 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 263 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 264 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 265 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 266 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 267 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 268 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 269 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 270 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 271 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 272 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 273 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 274 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 275 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 276 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 277 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 278 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 279 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 280 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 281 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 282 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 283 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 284 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 285 |
+
"model.layers.5.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 286 |
+
"model.layers.5.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 287 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 288 |
+
"model.layers.5.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 289 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 290 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 291 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 292 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 293 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 294 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 295 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 296 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 297 |
+
"model.layers.6.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 298 |
+
"model.layers.6.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 299 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 300 |
+
"model.layers.6.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 301 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 302 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 303 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 304 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 305 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 306 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 307 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 308 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 309 |
+
"model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 310 |
+
"model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 311 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 312 |
+
"model.layers.7.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 313 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 314 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 315 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 316 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 317 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 318 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 319 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 320 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 321 |
+
"model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 322 |
+
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 323 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 324 |
+
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 325 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 326 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 327 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 328 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 329 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 330 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 331 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 332 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 333 |
+
"model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 334 |
+
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 335 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 336 |
+
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 337 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 338 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 339 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 340 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 341 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 342 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 343 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 344 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 345 |
+
"model.norm.weight": "model-00004-of-00004.safetensors",
|
| 346 |
+
"visual.blocks.0.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 347 |
+
"visual.blocks.0.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 348 |
+
"visual.blocks.0.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 349 |
+
"visual.blocks.0.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 350 |
+
"visual.blocks.0.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 351 |
+
"visual.blocks.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 352 |
+
"visual.blocks.0.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 353 |
+
"visual.blocks.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 354 |
+
"visual.blocks.0.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 355 |
+
"visual.blocks.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 356 |
+
"visual.blocks.0.norm1.weight": "model-00001-of-00004.safetensors",
|
| 357 |
+
"visual.blocks.0.norm2.weight": "model-00001-of-00004.safetensors",
|
| 358 |
+
"visual.blocks.1.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 359 |
+
"visual.blocks.1.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 360 |
+
"visual.blocks.1.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 361 |
+
"visual.blocks.1.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 362 |
+
"visual.blocks.1.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 363 |
+
"visual.blocks.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 364 |
+
"visual.blocks.1.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 365 |
+
"visual.blocks.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 366 |
+
"visual.blocks.1.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 367 |
+
"visual.blocks.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 368 |
+
"visual.blocks.1.norm1.weight": "model-00001-of-00004.safetensors",
|
| 369 |
+
"visual.blocks.1.norm2.weight": "model-00001-of-00004.safetensors",
|
| 370 |
+
"visual.blocks.10.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 371 |
+
"visual.blocks.10.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 372 |
+
"visual.blocks.10.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 373 |
+
"visual.blocks.10.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 374 |
+
"visual.blocks.10.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 375 |
+
"visual.blocks.10.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 376 |
+
"visual.blocks.10.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 377 |
+
"visual.blocks.10.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 378 |
+
"visual.blocks.10.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 379 |
+
"visual.blocks.10.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 380 |
+
"visual.blocks.10.norm1.weight": "model-00001-of-00004.safetensors",
|
| 381 |
+
"visual.blocks.10.norm2.weight": "model-00001-of-00004.safetensors",
|
| 382 |
+
"visual.blocks.11.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 383 |
+
"visual.blocks.11.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 384 |
+
"visual.blocks.11.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 385 |
+
"visual.blocks.11.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 386 |
+
"visual.blocks.11.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 387 |
+
"visual.blocks.11.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 388 |
+
"visual.blocks.11.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 389 |
+
"visual.blocks.11.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 390 |
+
"visual.blocks.11.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 391 |
+
"visual.blocks.11.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 392 |
+
"visual.blocks.11.norm1.weight": "model-00001-of-00004.safetensors",
|
| 393 |
+
"visual.blocks.11.norm2.weight": "model-00001-of-00004.safetensors",
|
| 394 |
+
"visual.blocks.12.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 395 |
+
"visual.blocks.12.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 396 |
+
"visual.blocks.12.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 397 |
+
"visual.blocks.12.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 398 |
+
"visual.blocks.12.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 399 |
+
"visual.blocks.12.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 400 |
+
"visual.blocks.12.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 401 |
+
"visual.blocks.12.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 402 |
+
"visual.blocks.12.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 403 |
+
"visual.blocks.12.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 404 |
+
"visual.blocks.12.norm1.weight": "model-00001-of-00004.safetensors",
|
| 405 |
+
"visual.blocks.12.norm2.weight": "model-00001-of-00004.safetensors",
|
| 406 |
+
"visual.blocks.13.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 407 |
+
"visual.blocks.13.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 408 |
+
"visual.blocks.13.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 409 |
+
"visual.blocks.13.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 410 |
+
"visual.blocks.13.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 411 |
+
"visual.blocks.13.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 412 |
+
"visual.blocks.13.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 413 |
+
"visual.blocks.13.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 414 |
+
"visual.blocks.13.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 415 |
+
"visual.blocks.13.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 416 |
+
"visual.blocks.13.norm1.weight": "model-00001-of-00004.safetensors",
|
| 417 |
+
"visual.blocks.13.norm2.weight": "model-00001-of-00004.safetensors",
|
| 418 |
+
"visual.blocks.14.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 419 |
+
"visual.blocks.14.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 420 |
+
"visual.blocks.14.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 421 |
+
"visual.blocks.14.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 422 |
+
"visual.blocks.14.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 423 |
+
"visual.blocks.14.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 424 |
+
"visual.blocks.14.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 425 |
+
"visual.blocks.14.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 426 |
+
"visual.blocks.14.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 427 |
+
"visual.blocks.14.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 428 |
+
"visual.blocks.14.norm1.weight": "model-00001-of-00004.safetensors",
|
| 429 |
+
"visual.blocks.14.norm2.weight": "model-00001-of-00004.safetensors",
|
| 430 |
+
"visual.blocks.15.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 431 |
+
"visual.blocks.15.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 432 |
+
"visual.blocks.15.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 433 |
+
"visual.blocks.15.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 434 |
+
"visual.blocks.15.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 435 |
+
"visual.blocks.15.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 436 |
+
"visual.blocks.15.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 437 |
+
"visual.blocks.15.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 438 |
+
"visual.blocks.15.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 439 |
+
"visual.blocks.15.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 440 |
+
"visual.blocks.15.norm1.weight": "model-00001-of-00004.safetensors",
|
| 441 |
+
"visual.blocks.15.norm2.weight": "model-00001-of-00004.safetensors",
|
| 442 |
+
"visual.blocks.16.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 443 |
+
"visual.blocks.16.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 444 |
+
"visual.blocks.16.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 445 |
+
"visual.blocks.16.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 446 |
+
"visual.blocks.16.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 447 |
+
"visual.blocks.16.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 448 |
+
"visual.blocks.16.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 449 |
+
"visual.blocks.16.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 450 |
+
"visual.blocks.16.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 451 |
+
"visual.blocks.16.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 452 |
+
"visual.blocks.16.norm1.weight": "model-00001-of-00004.safetensors",
|
| 453 |
+
"visual.blocks.16.norm2.weight": "model-00001-of-00004.safetensors",
|
| 454 |
+
"visual.blocks.17.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 455 |
+
"visual.blocks.17.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 456 |
+
"visual.blocks.17.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 457 |
+
"visual.blocks.17.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 458 |
+
"visual.blocks.17.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 459 |
+
"visual.blocks.17.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 460 |
+
"visual.blocks.17.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 461 |
+
"visual.blocks.17.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 462 |
+
"visual.blocks.17.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 463 |
+
"visual.blocks.17.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 464 |
+
"visual.blocks.17.norm1.weight": "model-00001-of-00004.safetensors",
|
| 465 |
+
"visual.blocks.17.norm2.weight": "model-00001-of-00004.safetensors",
|
| 466 |
+
"visual.blocks.18.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 467 |
+
"visual.blocks.18.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 468 |
+
"visual.blocks.18.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 469 |
+
"visual.blocks.18.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 470 |
+
"visual.blocks.18.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 471 |
+
"visual.blocks.18.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 472 |
+
"visual.blocks.18.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 473 |
+
"visual.blocks.18.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 474 |
+
"visual.blocks.18.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 475 |
+
"visual.blocks.18.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 476 |
+
"visual.blocks.18.norm1.weight": "model-00001-of-00004.safetensors",
|
| 477 |
+
"visual.blocks.18.norm2.weight": "model-00001-of-00004.safetensors",
|
| 478 |
+
"visual.blocks.19.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 479 |
+
"visual.blocks.19.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 480 |
+
"visual.blocks.19.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 481 |
+
"visual.blocks.19.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 482 |
+
"visual.blocks.19.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 483 |
+
"visual.blocks.19.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 484 |
+
"visual.blocks.19.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 485 |
+
"visual.blocks.19.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 486 |
+
"visual.blocks.19.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 487 |
+
"visual.blocks.19.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 488 |
+
"visual.blocks.19.norm1.weight": "model-00001-of-00004.safetensors",
|
| 489 |
+
"visual.blocks.19.norm2.weight": "model-00001-of-00004.safetensors",
|
| 490 |
+
"visual.blocks.2.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 491 |
+
"visual.blocks.2.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 492 |
+
"visual.blocks.2.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 493 |
+
"visual.blocks.2.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 494 |
+
"visual.blocks.2.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 495 |
+
"visual.blocks.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 496 |
+
"visual.blocks.2.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 497 |
+
"visual.blocks.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 498 |
+
"visual.blocks.2.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 499 |
+
"visual.blocks.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 500 |
+
"visual.blocks.2.norm1.weight": "model-00001-of-00004.safetensors",
|
| 501 |
+
"visual.blocks.2.norm2.weight": "model-00001-of-00004.safetensors",
|
| 502 |
+
"visual.blocks.20.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 503 |
+
"visual.blocks.20.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 504 |
+
"visual.blocks.20.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 505 |
+
"visual.blocks.20.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 506 |
+
"visual.blocks.20.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 507 |
+
"visual.blocks.20.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 508 |
+
"visual.blocks.20.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 509 |
+
"visual.blocks.20.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 510 |
+
"visual.blocks.20.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 511 |
+
"visual.blocks.20.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 512 |
+
"visual.blocks.20.norm1.weight": "model-00001-of-00004.safetensors",
|
| 513 |
+
"visual.blocks.20.norm2.weight": "model-00001-of-00004.safetensors",
|
| 514 |
+
"visual.blocks.21.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 515 |
+
"visual.blocks.21.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 516 |
+
"visual.blocks.21.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 517 |
+
"visual.blocks.21.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 518 |
+
"visual.blocks.21.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 519 |
+
"visual.blocks.21.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 520 |
+
"visual.blocks.21.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 521 |
+
"visual.blocks.21.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 522 |
+
"visual.blocks.21.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 523 |
+
"visual.blocks.21.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 524 |
+
"visual.blocks.21.norm1.weight": "model-00001-of-00004.safetensors",
|
| 525 |
+
"visual.blocks.21.norm2.weight": "model-00001-of-00004.safetensors",
|
| 526 |
+
"visual.blocks.22.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 527 |
+
"visual.blocks.22.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 528 |
+
"visual.blocks.22.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 529 |
+
"visual.blocks.22.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 530 |
+
"visual.blocks.22.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 531 |
+
"visual.blocks.22.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 532 |
+
"visual.blocks.22.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 533 |
+
"visual.blocks.22.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 534 |
+
"visual.blocks.22.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 535 |
+
"visual.blocks.22.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 536 |
+
"visual.blocks.22.norm1.weight": "model-00001-of-00004.safetensors",
|
| 537 |
+
"visual.blocks.22.norm2.weight": "model-00001-of-00004.safetensors",
|
| 538 |
+
"visual.blocks.23.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 539 |
+
"visual.blocks.23.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 540 |
+
"visual.blocks.23.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 541 |
+
"visual.blocks.23.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 542 |
+
"visual.blocks.23.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 543 |
+
"visual.blocks.23.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 544 |
+
"visual.blocks.23.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 545 |
+
"visual.blocks.23.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 546 |
+
"visual.blocks.23.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 547 |
+
"visual.blocks.23.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 548 |
+
"visual.blocks.23.norm1.weight": "model-00001-of-00004.safetensors",
|
| 549 |
+
"visual.blocks.23.norm2.weight": "model-00001-of-00004.safetensors",
|
| 550 |
+
"visual.blocks.24.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 551 |
+
"visual.blocks.24.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 552 |
+
"visual.blocks.24.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 553 |
+
"visual.blocks.24.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 554 |
+
"visual.blocks.24.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 555 |
+
"visual.blocks.24.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 556 |
+
"visual.blocks.24.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 557 |
+
"visual.blocks.24.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 558 |
+
"visual.blocks.24.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 559 |
+
"visual.blocks.24.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 560 |
+
"visual.blocks.24.norm1.weight": "model-00001-of-00004.safetensors",
|
| 561 |
+
"visual.blocks.24.norm2.weight": "model-00001-of-00004.safetensors",
|
| 562 |
+
"visual.blocks.25.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 563 |
+
"visual.blocks.25.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 564 |
+
"visual.blocks.25.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 565 |
+
"visual.blocks.25.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 566 |
+
"visual.blocks.25.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 567 |
+
"visual.blocks.25.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 568 |
+
"visual.blocks.25.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 569 |
+
"visual.blocks.25.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 570 |
+
"visual.blocks.25.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 571 |
+
"visual.blocks.25.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 572 |
+
"visual.blocks.25.norm1.weight": "model-00001-of-00004.safetensors",
|
| 573 |
+
"visual.blocks.25.norm2.weight": "model-00001-of-00004.safetensors",
|
| 574 |
+
"visual.blocks.26.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 575 |
+
"visual.blocks.26.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 576 |
+
"visual.blocks.26.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 577 |
+
"visual.blocks.26.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 578 |
+
"visual.blocks.26.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 579 |
+
"visual.blocks.26.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 580 |
+
"visual.blocks.26.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 581 |
+
"visual.blocks.26.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 582 |
+
"visual.blocks.26.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 583 |
+
"visual.blocks.26.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 584 |
+
"visual.blocks.26.norm1.weight": "model-00001-of-00004.safetensors",
|
| 585 |
+
"visual.blocks.26.norm2.weight": "model-00001-of-00004.safetensors",
|
| 586 |
+
"visual.blocks.27.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 587 |
+
"visual.blocks.27.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 588 |
+
"visual.blocks.27.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 589 |
+
"visual.blocks.27.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 590 |
+
"visual.blocks.27.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 591 |
+
"visual.blocks.27.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 592 |
+
"visual.blocks.27.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 593 |
+
"visual.blocks.27.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 594 |
+
"visual.blocks.27.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 595 |
+
"visual.blocks.27.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 596 |
+
"visual.blocks.27.norm1.weight": "model-00001-of-00004.safetensors",
|
| 597 |
+
"visual.blocks.27.norm2.weight": "model-00001-of-00004.safetensors",
|
| 598 |
+
"visual.blocks.28.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 599 |
+
"visual.blocks.28.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 600 |
+
"visual.blocks.28.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 601 |
+
"visual.blocks.28.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 602 |
+
"visual.blocks.28.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 603 |
+
"visual.blocks.28.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 604 |
+
"visual.blocks.28.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 605 |
+
"visual.blocks.28.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 606 |
+
"visual.blocks.28.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 607 |
+
"visual.blocks.28.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 608 |
+
"visual.blocks.28.norm1.weight": "model-00001-of-00004.safetensors",
|
| 609 |
+
"visual.blocks.28.norm2.weight": "model-00001-of-00004.safetensors",
|
| 610 |
+
"visual.blocks.29.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 611 |
+
"visual.blocks.29.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 612 |
+
"visual.blocks.29.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 613 |
+
"visual.blocks.29.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 614 |
+
"visual.blocks.29.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 615 |
+
"visual.blocks.29.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 616 |
+
"visual.blocks.29.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 617 |
+
"visual.blocks.29.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 618 |
+
"visual.blocks.29.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 619 |
+
"visual.blocks.29.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 620 |
+
"visual.blocks.29.norm1.weight": "model-00001-of-00004.safetensors",
|
| 621 |
+
"visual.blocks.29.norm2.weight": "model-00001-of-00004.safetensors",
|
| 622 |
+
"visual.blocks.3.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 623 |
+
"visual.blocks.3.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 624 |
+
"visual.blocks.3.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 625 |
+
"visual.blocks.3.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 626 |
+
"visual.blocks.3.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 627 |
+
"visual.blocks.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 628 |
+
"visual.blocks.3.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 629 |
+
"visual.blocks.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 630 |
+
"visual.blocks.3.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 631 |
+
"visual.blocks.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 632 |
+
"visual.blocks.3.norm1.weight": "model-00001-of-00004.safetensors",
|
| 633 |
+
"visual.blocks.3.norm2.weight": "model-00001-of-00004.safetensors",
|
| 634 |
+
"visual.blocks.30.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 635 |
+
"visual.blocks.30.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 636 |
+
"visual.blocks.30.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 637 |
+
"visual.blocks.30.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 638 |
+
"visual.blocks.30.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 639 |
+
"visual.blocks.30.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 640 |
+
"visual.blocks.30.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 641 |
+
"visual.blocks.30.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 642 |
+
"visual.blocks.30.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 643 |
+
"visual.blocks.30.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 644 |
+
"visual.blocks.30.norm1.weight": "model-00001-of-00004.safetensors",
|
| 645 |
+
"visual.blocks.30.norm2.weight": "model-00001-of-00004.safetensors",
|
| 646 |
+
"visual.blocks.31.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 647 |
+
"visual.blocks.31.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 648 |
+
"visual.blocks.31.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 649 |
+
"visual.blocks.31.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 650 |
+
"visual.blocks.31.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 651 |
+
"visual.blocks.31.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 652 |
+
"visual.blocks.31.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 653 |
+
"visual.blocks.31.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 654 |
+
"visual.blocks.31.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 655 |
+
"visual.blocks.31.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 656 |
+
"visual.blocks.31.norm1.weight": "model-00001-of-00004.safetensors",
|
| 657 |
+
"visual.blocks.31.norm2.weight": "model-00001-of-00004.safetensors",
|
| 658 |
+
"visual.blocks.4.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 659 |
+
"visual.blocks.4.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 660 |
+
"visual.blocks.4.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 661 |
+
"visual.blocks.4.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 662 |
+
"visual.blocks.4.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 663 |
+
"visual.blocks.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 664 |
+
"visual.blocks.4.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 665 |
+
"visual.blocks.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 666 |
+
"visual.blocks.4.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 667 |
+
"visual.blocks.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 668 |
+
"visual.blocks.4.norm1.weight": "model-00001-of-00004.safetensors",
|
| 669 |
+
"visual.blocks.4.norm2.weight": "model-00001-of-00004.safetensors",
|
| 670 |
+
"visual.blocks.5.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 671 |
+
"visual.blocks.5.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 672 |
+
"visual.blocks.5.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 673 |
+
"visual.blocks.5.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 674 |
+
"visual.blocks.5.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 675 |
+
"visual.blocks.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 676 |
+
"visual.blocks.5.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 677 |
+
"visual.blocks.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 678 |
+
"visual.blocks.5.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 679 |
+
"visual.blocks.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 680 |
+
"visual.blocks.5.norm1.weight": "model-00001-of-00004.safetensors",
|
| 681 |
+
"visual.blocks.5.norm2.weight": "model-00001-of-00004.safetensors",
|
| 682 |
+
"visual.blocks.6.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 683 |
+
"visual.blocks.6.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 684 |
+
"visual.blocks.6.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 685 |
+
"visual.blocks.6.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 686 |
+
"visual.blocks.6.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 687 |
+
"visual.blocks.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 688 |
+
"visual.blocks.6.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 689 |
+
"visual.blocks.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 690 |
+
"visual.blocks.6.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 691 |
+
"visual.blocks.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 692 |
+
"visual.blocks.6.norm1.weight": "model-00001-of-00004.safetensors",
|
| 693 |
+
"visual.blocks.6.norm2.weight": "model-00001-of-00004.safetensors",
|
| 694 |
+
"visual.blocks.7.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 695 |
+
"visual.blocks.7.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 696 |
+
"visual.blocks.7.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 697 |
+
"visual.blocks.7.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 698 |
+
"visual.blocks.7.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 699 |
+
"visual.blocks.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 700 |
+
"visual.blocks.7.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 701 |
+
"visual.blocks.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 702 |
+
"visual.blocks.7.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 703 |
+
"visual.blocks.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 704 |
+
"visual.blocks.7.norm1.weight": "model-00001-of-00004.safetensors",
|
| 705 |
+
"visual.blocks.7.norm2.weight": "model-00001-of-00004.safetensors",
|
| 706 |
+
"visual.blocks.8.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 707 |
+
"visual.blocks.8.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 708 |
+
"visual.blocks.8.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 709 |
+
"visual.blocks.8.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 710 |
+
"visual.blocks.8.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 711 |
+
"visual.blocks.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 712 |
+
"visual.blocks.8.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 713 |
+
"visual.blocks.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 714 |
+
"visual.blocks.8.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 715 |
+
"visual.blocks.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 716 |
+
"visual.blocks.8.norm1.weight": "model-00001-of-00004.safetensors",
|
| 717 |
+
"visual.blocks.8.norm2.weight": "model-00001-of-00004.safetensors",
|
| 718 |
+
"visual.blocks.9.attn.proj.bias": "model-00001-of-00004.safetensors",
|
| 719 |
+
"visual.blocks.9.attn.proj.weight": "model-00001-of-00004.safetensors",
|
| 720 |
+
"visual.blocks.9.attn.qkv.bias": "model-00001-of-00004.safetensors",
|
| 721 |
+
"visual.blocks.9.attn.qkv.weight": "model-00001-of-00004.safetensors",
|
| 722 |
+
"visual.blocks.9.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
|
| 723 |
+
"visual.blocks.9.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 724 |
+
"visual.blocks.9.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
|
| 725 |
+
"visual.blocks.9.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 726 |
+
"visual.blocks.9.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
|
| 727 |
+
"visual.blocks.9.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 728 |
+
"visual.blocks.9.norm1.weight": "model-00001-of-00004.safetensors",
|
| 729 |
+
"visual.blocks.9.norm2.weight": "model-00001-of-00004.safetensors",
|
| 730 |
+
"visual.merger.ln_q.weight": "model-00001-of-00004.safetensors",
|
| 731 |
+
"visual.merger.mlp.0.bias": "model-00001-of-00004.safetensors",
|
| 732 |
+
"visual.merger.mlp.0.weight": "model-00001-of-00004.safetensors",
|
| 733 |
+
"visual.merger.mlp.2.bias": "model-00001-of-00004.safetensors",
|
| 734 |
+
"visual.merger.mlp.2.weight": "model-00001-of-00004.safetensors",
|
| 735 |
+
"visual.patch_embed.proj.weight": "model-00001-of-00004.safetensors"
|
| 736 |
+
}
|
| 737 |
+
}
|
preprocessor_config.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"crop_size": null,
|
| 3 |
+
"data_format": "channels_first",
|
| 4 |
+
"default_to_square": true,
|
| 5 |
+
"device": null,
|
| 6 |
+
"disable_grouping": null,
|
| 7 |
+
"do_center_crop": null,
|
| 8 |
+
"do_convert_rgb": true,
|
| 9 |
+
"do_normalize": true,
|
| 10 |
+
"do_rescale": true,
|
| 11 |
+
"do_resize": true,
|
| 12 |
+
"image_mean": [
|
| 13 |
+
0.48145466,
|
| 14 |
+
0.4578275,
|
| 15 |
+
0.40821073
|
| 16 |
+
],
|
| 17 |
+
"image_processor_type": "Qwen2VLImageProcessorFast",
|
| 18 |
+
"image_std": [
|
| 19 |
+
0.26862954,
|
| 20 |
+
0.26130258,
|
| 21 |
+
0.27577711
|
| 22 |
+
],
|
| 23 |
+
"input_data_format": null,
|
| 24 |
+
"max_pixels": 12845056,
|
| 25 |
+
"merge_size": 2,
|
| 26 |
+
"min_pixels": 3136,
|
| 27 |
+
"patch_size": 14,
|
| 28 |
+
"processor_class": "Qwen2_5_VLProcessor",
|
| 29 |
+
"resample": 3,
|
| 30 |
+
"rescale_factor": 0.00392156862745098,
|
| 31 |
+
"return_tensors": null,
|
| 32 |
+
"size": {
|
| 33 |
+
"longest_edge": 12845056,
|
| 34 |
+
"shortest_edge": 3136
|
| 35 |
+
},
|
| 36 |
+
"temporal_patch_size": 2
|
| 37 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<|im_start|>",
|
| 4 |
+
"<|im_end|>",
|
| 5 |
+
"<|object_ref_start|>",
|
| 6 |
+
"<|object_ref_end|>",
|
| 7 |
+
"<|box_start|>",
|
| 8 |
+
"<|box_end|>",
|
| 9 |
+
"<|quad_start|>",
|
| 10 |
+
"<|quad_end|>",
|
| 11 |
+
"<|vision_start|>",
|
| 12 |
+
"<|vision_end|>",
|
| 13 |
+
"<|vision_pad|>",
|
| 14 |
+
"<|image_pad|>",
|
| 15 |
+
"<|video_pad|>"
|
| 16 |
+
],
|
| 17 |
+
"eos_token": {
|
| 18 |
+
"content": "<|im_end|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
},
|
| 24 |
+
"pad_token": {
|
| 25 |
+
"content": "<|endoftext|>",
|
| 26 |
+
"lstrip": false,
|
| 27 |
+
"normalized": false,
|
| 28 |
+
"rstrip": false,
|
| 29 |
+
"single_word": false
|
| 30 |
+
}
|
| 31 |
+
}
|
tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
|
| 3 |
+
size 11421896
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": false,
|
| 3 |
+
"add_prefix_space": false,
|
| 4 |
+
"added_tokens_decoder": {
|
| 5 |
+
"151643": {
|
| 6 |
+
"content": "<|endoftext|>",
|
| 7 |
+
"lstrip": false,
|
| 8 |
+
"normalized": false,
|
| 9 |
+
"rstrip": false,
|
| 10 |
+
"single_word": false,
|
| 11 |
+
"special": true
|
| 12 |
+
},
|
| 13 |
+
"151644": {
|
| 14 |
+
"content": "<|im_start|>",
|
| 15 |
+
"lstrip": false,
|
| 16 |
+
"normalized": false,
|
| 17 |
+
"rstrip": false,
|
| 18 |
+
"single_word": false,
|
| 19 |
+
"special": true
|
| 20 |
+
},
|
| 21 |
+
"151645": {
|
| 22 |
+
"content": "<|im_end|>",
|
| 23 |
+
"lstrip": false,
|
| 24 |
+
"normalized": false,
|
| 25 |
+
"rstrip": false,
|
| 26 |
+
"single_word": false,
|
| 27 |
+
"special": true
|
| 28 |
+
},
|
| 29 |
+
"151646": {
|
| 30 |
+
"content": "<|object_ref_start|>",
|
| 31 |
+
"lstrip": false,
|
| 32 |
+
"normalized": false,
|
| 33 |
+
"rstrip": false,
|
| 34 |
+
"single_word": false,
|
| 35 |
+
"special": true
|
| 36 |
+
},
|
| 37 |
+
"151647": {
|
| 38 |
+
"content": "<|object_ref_end|>",
|
| 39 |
+
"lstrip": false,
|
| 40 |
+
"normalized": false,
|
| 41 |
+
"rstrip": false,
|
| 42 |
+
"single_word": false,
|
| 43 |
+
"special": true
|
| 44 |
+
},
|
| 45 |
+
"151648": {
|
| 46 |
+
"content": "<|box_start|>",
|
| 47 |
+
"lstrip": false,
|
| 48 |
+
"normalized": false,
|
| 49 |
+
"rstrip": false,
|
| 50 |
+
"single_word": false,
|
| 51 |
+
"special": true
|
| 52 |
+
},
|
| 53 |
+
"151649": {
|
| 54 |
+
"content": "<|box_end|>",
|
| 55 |
+
"lstrip": false,
|
| 56 |
+
"normalized": false,
|
| 57 |
+
"rstrip": false,
|
| 58 |
+
"single_word": false,
|
| 59 |
+
"special": true
|
| 60 |
+
},
|
| 61 |
+
"151650": {
|
| 62 |
+
"content": "<|quad_start|>",
|
| 63 |
+
"lstrip": false,
|
| 64 |
+
"normalized": false,
|
| 65 |
+
"rstrip": false,
|
| 66 |
+
"single_word": false,
|
| 67 |
+
"special": true
|
| 68 |
+
},
|
| 69 |
+
"151651": {
|
| 70 |
+
"content": "<|quad_end|>",
|
| 71 |
+
"lstrip": false,
|
| 72 |
+
"normalized": false,
|
| 73 |
+
"rstrip": false,
|
| 74 |
+
"single_word": false,
|
| 75 |
+
"special": true
|
| 76 |
+
},
|
| 77 |
+
"151652": {
|
| 78 |
+
"content": "<|vision_start|>",
|
| 79 |
+
"lstrip": false,
|
| 80 |
+
"normalized": false,
|
| 81 |
+
"rstrip": false,
|
| 82 |
+
"single_word": false,
|
| 83 |
+
"special": true
|
| 84 |
+
},
|
| 85 |
+
"151653": {
|
| 86 |
+
"content": "<|vision_end|>",
|
| 87 |
+
"lstrip": false,
|
| 88 |
+
"normalized": false,
|
| 89 |
+
"rstrip": false,
|
| 90 |
+
"single_word": false,
|
| 91 |
+
"special": true
|
| 92 |
+
},
|
| 93 |
+
"151654": {
|
| 94 |
+
"content": "<|vision_pad|>",
|
| 95 |
+
"lstrip": false,
|
| 96 |
+
"normalized": false,
|
| 97 |
+
"rstrip": false,
|
| 98 |
+
"single_word": false,
|
| 99 |
+
"special": true
|
| 100 |
+
},
|
| 101 |
+
"151655": {
|
| 102 |
+
"content": "<|image_pad|>",
|
| 103 |
+
"lstrip": false,
|
| 104 |
+
"normalized": false,
|
| 105 |
+
"rstrip": false,
|
| 106 |
+
"single_word": false,
|
| 107 |
+
"special": true
|
| 108 |
+
},
|
| 109 |
+
"151656": {
|
| 110 |
+
"content": "<|video_pad|>",
|
| 111 |
+
"lstrip": false,
|
| 112 |
+
"normalized": false,
|
| 113 |
+
"rstrip": false,
|
| 114 |
+
"single_word": false,
|
| 115 |
+
"special": true
|
| 116 |
+
},
|
| 117 |
+
"151657": {
|
| 118 |
+
"content": "<tool_call>",
|
| 119 |
+
"lstrip": false,
|
| 120 |
+
"normalized": false,
|
| 121 |
+
"rstrip": false,
|
| 122 |
+
"single_word": false,
|
| 123 |
+
"special": false
|
| 124 |
+
},
|
| 125 |
+
"151658": {
|
| 126 |
+
"content": "</tool_call>",
|
| 127 |
+
"lstrip": false,
|
| 128 |
+
"normalized": false,
|
| 129 |
+
"rstrip": false,
|
| 130 |
+
"single_word": false,
|
| 131 |
+
"special": false
|
| 132 |
+
},
|
| 133 |
+
"151659": {
|
| 134 |
+
"content": "<|fim_prefix|>",
|
| 135 |
+
"lstrip": false,
|
| 136 |
+
"normalized": false,
|
| 137 |
+
"rstrip": false,
|
| 138 |
+
"single_word": false,
|
| 139 |
+
"special": false
|
| 140 |
+
},
|
| 141 |
+
"151660": {
|
| 142 |
+
"content": "<|fim_middle|>",
|
| 143 |
+
"lstrip": false,
|
| 144 |
+
"normalized": false,
|
| 145 |
+
"rstrip": false,
|
| 146 |
+
"single_word": false,
|
| 147 |
+
"special": false
|
| 148 |
+
},
|
| 149 |
+
"151661": {
|
| 150 |
+
"content": "<|fim_suffix|>",
|
| 151 |
+
"lstrip": false,
|
| 152 |
+
"normalized": false,
|
| 153 |
+
"rstrip": false,
|
| 154 |
+
"single_word": false,
|
| 155 |
+
"special": false
|
| 156 |
+
},
|
| 157 |
+
"151662": {
|
| 158 |
+
"content": "<|fim_pad|>",
|
| 159 |
+
"lstrip": false,
|
| 160 |
+
"normalized": false,
|
| 161 |
+
"rstrip": false,
|
| 162 |
+
"single_word": false,
|
| 163 |
+
"special": false
|
| 164 |
+
},
|
| 165 |
+
"151663": {
|
| 166 |
+
"content": "<|repo_name|>",
|
| 167 |
+
"lstrip": false,
|
| 168 |
+
"normalized": false,
|
| 169 |
+
"rstrip": false,
|
| 170 |
+
"single_word": false,
|
| 171 |
+
"special": false
|
| 172 |
+
},
|
| 173 |
+
"151664": {
|
| 174 |
+
"content": "<|file_sep|>",
|
| 175 |
+
"lstrip": false,
|
| 176 |
+
"normalized": false,
|
| 177 |
+
"rstrip": false,
|
| 178 |
+
"single_word": false,
|
| 179 |
+
"special": false
|
| 180 |
+
}
|
| 181 |
+
},
|
| 182 |
+
"additional_special_tokens": [
|
| 183 |
+
"<|im_start|>",
|
| 184 |
+
"<|im_end|>",
|
| 185 |
+
"<|object_ref_start|>",
|
| 186 |
+
"<|object_ref_end|>",
|
| 187 |
+
"<|box_start|>",
|
| 188 |
+
"<|box_end|>",
|
| 189 |
+
"<|quad_start|>",
|
| 190 |
+
"<|quad_end|>",
|
| 191 |
+
"<|vision_start|>",
|
| 192 |
+
"<|vision_end|>",
|
| 193 |
+
"<|vision_pad|>",
|
| 194 |
+
"<|image_pad|>",
|
| 195 |
+
"<|video_pad|>"
|
| 196 |
+
],
|
| 197 |
+
"bos_token": null,
|
| 198 |
+
"clean_up_tokenization_spaces": false,
|
| 199 |
+
"eos_token": "<|im_end|>",
|
| 200 |
+
"errors": "replace",
|
| 201 |
+
"extra_special_tokens": {},
|
| 202 |
+
"model_max_length": 131072,
|
| 203 |
+
"pad_token": "<|endoftext|>",
|
| 204 |
+
"padding_side": "right",
|
| 205 |
+
"processor_class": "Qwen2_5_VLProcessor",
|
| 206 |
+
"split_special_tokens": false,
|
| 207 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
| 208 |
+
"unk_token": null
|
| 209 |
+
}
|
train_results.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"effective_tokens_per_sec": 237.99224040941883,
|
| 3 |
+
"epoch": 2.0,
|
| 4 |
+
"total_flos": 688709313101824.0,
|
| 5 |
+
"train_loss": 0.4963684876922902,
|
| 6 |
+
"train_runtime": 35426.998,
|
| 7 |
+
"train_samples_per_second": 2.201,
|
| 8 |
+
"train_steps_per_second": 0.138
|
| 9 |
+
}
|
trainer_log.jsonl
ADDED
|
@@ -0,0 +1,488 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"current_steps": 10, "total_steps": 4874, "loss": 1.3123, "lr": 1.844262295081967e-08, "epoch": 0.004103405826836274, "percentage": 0.21, "elapsed_time": "0:01:19", "remaining_time": "10:43:21"}
|
| 2 |
+
{"current_steps": 20, "total_steps": 4874, "loss": 1.2103, "lr": 3.8934426229508196e-08, "epoch": 0.008206811653672548, "percentage": 0.41, "elapsed_time": "0:02:31", "remaining_time": "10:14:32"}
|
| 3 |
+
{"current_steps": 30, "total_steps": 4874, "loss": 1.2461, "lr": 5.9426229508196716e-08, "epoch": 0.012310217480508822, "percentage": 0.62, "elapsed_time": "0:03:43", "remaining_time": "10:00:20"}
|
| 4 |
+
{"current_steps": 40, "total_steps": 4874, "loss": 1.2492, "lr": 7.991803278688524e-08, "epoch": 0.016413623307345096, "percentage": 0.82, "elapsed_time": "0:04:54", "remaining_time": "9:53:48"}
|
| 5 |
+
{"current_steps": 50, "total_steps": 4874, "loss": 1.1791, "lr": 1.0040983606557377e-07, "epoch": 0.02051702913418137, "percentage": 1.03, "elapsed_time": "0:06:09", "remaining_time": "9:54:01"}
|
| 6 |
+
{"current_steps": 60, "total_steps": 4874, "loss": 1.0959, "lr": 1.209016393442623e-07, "epoch": 0.024620434961017644, "percentage": 1.23, "elapsed_time": "0:07:19", "remaining_time": "9:47:58"}
|
| 7 |
+
{"current_steps": 70, "total_steps": 4874, "loss": 1.0009, "lr": 1.413934426229508e-07, "epoch": 0.028723840787853918, "percentage": 1.44, "elapsed_time": "0:08:30", "remaining_time": "9:44:12"}
|
| 8 |
+
{"current_steps": 80, "total_steps": 4874, "loss": 0.8824, "lr": 1.6188524590163935e-07, "epoch": 0.03282724661469019, "percentage": 1.64, "elapsed_time": "0:09:43", "remaining_time": "9:42:56"}
|
| 9 |
+
{"current_steps": 90, "total_steps": 4874, "loss": 0.8539, "lr": 1.8237704918032787e-07, "epoch": 0.03693065244152647, "percentage": 1.85, "elapsed_time": "0:10:55", "remaining_time": "9:40:25"}
|
| 10 |
+
{"current_steps": 100, "total_steps": 4874, "loss": 0.8715, "lr": 2.028688524590164e-07, "epoch": 0.04103405826836274, "percentage": 2.05, "elapsed_time": "0:12:05", "remaining_time": "9:37:31"}
|
| 11 |
+
{"current_steps": 110, "total_steps": 4874, "loss": 0.7815, "lr": 2.233606557377049e-07, "epoch": 0.04513746409519902, "percentage": 2.26, "elapsed_time": "0:13:17", "remaining_time": "9:35:27"}
|
| 12 |
+
{"current_steps": 120, "total_steps": 4874, "loss": 0.7836, "lr": 2.438524590163934e-07, "epoch": 0.04924086992203529, "percentage": 2.46, "elapsed_time": "0:14:29", "remaining_time": "9:34:05"}
|
| 13 |
+
{"current_steps": 130, "total_steps": 4874, "loss": 0.7828, "lr": 2.643442622950819e-07, "epoch": 0.053344275748871565, "percentage": 2.67, "elapsed_time": "0:15:41", "remaining_time": "9:32:44"}
|
| 14 |
+
{"current_steps": 140, "total_steps": 4874, "loss": 0.7648, "lr": 2.848360655737705e-07, "epoch": 0.057447681575707836, "percentage": 2.87, "elapsed_time": "0:16:52", "remaining_time": "9:30:42"}
|
| 15 |
+
{"current_steps": 150, "total_steps": 4874, "loss": 0.7652, "lr": 3.05327868852459e-07, "epoch": 0.061551087402544113, "percentage": 3.08, "elapsed_time": "0:18:04", "remaining_time": "9:29:22"}
|
| 16 |
+
{"current_steps": 160, "total_steps": 4874, "loss": 0.7392, "lr": 3.258196721311475e-07, "epoch": 0.06565449322938038, "percentage": 3.28, "elapsed_time": "0:19:17", "remaining_time": "9:28:20"}
|
| 17 |
+
{"current_steps": 170, "total_steps": 4874, "loss": 0.7627, "lr": 3.463114754098361e-07, "epoch": 0.06975789905621665, "percentage": 3.49, "elapsed_time": "0:20:28", "remaining_time": "9:26:44"}
|
| 18 |
+
{"current_steps": 180, "total_steps": 4874, "loss": 0.7051, "lr": 3.6680327868852456e-07, "epoch": 0.07386130488305294, "percentage": 3.69, "elapsed_time": "0:21:40", "remaining_time": "9:25:02"}
|
| 19 |
+
{"current_steps": 190, "total_steps": 4874, "loss": 0.722, "lr": 3.8729508196721314e-07, "epoch": 0.07796471070988921, "percentage": 3.9, "elapsed_time": "0:22:51", "remaining_time": "9:23:30"}
|
| 20 |
+
{"current_steps": 200, "total_steps": 4874, "loss": 0.711, "lr": 4.077868852459016e-07, "epoch": 0.08206811653672548, "percentage": 4.1, "elapsed_time": "0:24:03", "remaining_time": "9:22:22"}
|
| 21 |
+
{"current_steps": 210, "total_steps": 4874, "loss": 0.7414, "lr": 4.2827868852459014e-07, "epoch": 0.08617152236356175, "percentage": 4.31, "elapsed_time": "0:25:14", "remaining_time": "9:20:44"}
|
| 22 |
+
{"current_steps": 220, "total_steps": 4874, "loss": 0.7169, "lr": 4.487704918032787e-07, "epoch": 0.09027492819039803, "percentage": 4.51, "elapsed_time": "0:26:28", "remaining_time": "9:20:13"}
|
| 23 |
+
{"current_steps": 230, "total_steps": 4874, "loss": 0.697, "lr": 4.692622950819672e-07, "epoch": 0.0943783340172343, "percentage": 4.72, "elapsed_time": "0:27:40", "remaining_time": "9:18:52"}
|
| 24 |
+
{"current_steps": 240, "total_steps": 4874, "loss": 0.714, "lr": 4.897540983606557e-07, "epoch": 0.09848173984407058, "percentage": 4.92, "elapsed_time": "0:28:52", "remaining_time": "9:17:26"}
|
| 25 |
+
{"current_steps": 250, "total_steps": 4874, "loss": 0.6683, "lr": 5.102459016393442e-07, "epoch": 0.10258514567090685, "percentage": 5.13, "elapsed_time": "0:30:03", "remaining_time": "9:15:53"}
|
| 26 |
+
{"current_steps": 260, "total_steps": 4874, "loss": 0.6644, "lr": 5.307377049180327e-07, "epoch": 0.10668855149774313, "percentage": 5.33, "elapsed_time": "0:31:15", "remaining_time": "9:14:36"}
|
| 27 |
+
{"current_steps": 270, "total_steps": 4874, "loss": 0.6607, "lr": 5.512295081967213e-07, "epoch": 0.1107919573245794, "percentage": 5.54, "elapsed_time": "0:32:26", "remaining_time": "9:13:19"}
|
| 28 |
+
{"current_steps": 280, "total_steps": 4874, "loss": 0.6528, "lr": 5.717213114754098e-07, "epoch": 0.11489536315141567, "percentage": 5.74, "elapsed_time": "0:33:39", "remaining_time": "9:12:07"}
|
| 29 |
+
{"current_steps": 290, "total_steps": 4874, "loss": 0.6636, "lr": 5.922131147540983e-07, "epoch": 0.11899876897825194, "percentage": 5.95, "elapsed_time": "0:34:51", "remaining_time": "9:10:59"}
|
| 30 |
+
{"current_steps": 300, "total_steps": 4874, "loss": 0.6363, "lr": 6.127049180327869e-07, "epoch": 0.12310217480508823, "percentage": 6.16, "elapsed_time": "0:36:04", "remaining_time": "9:10:08"}
|
| 31 |
+
{"current_steps": 310, "total_steps": 4874, "loss": 0.6598, "lr": 6.331967213114754e-07, "epoch": 0.1272055806319245, "percentage": 6.36, "elapsed_time": "0:37:16", "remaining_time": "9:08:54"}
|
| 32 |
+
{"current_steps": 320, "total_steps": 4874, "loss": 0.6326, "lr": 6.536885245901639e-07, "epoch": 0.13130898645876077, "percentage": 6.57, "elapsed_time": "0:38:30", "remaining_time": "9:07:56"}
|
| 33 |
+
{"current_steps": 330, "total_steps": 4874, "loss": 0.6157, "lr": 6.741803278688525e-07, "epoch": 0.13541239228559704, "percentage": 6.77, "elapsed_time": "0:39:44", "remaining_time": "9:07:17"}
|
| 34 |
+
{"current_steps": 340, "total_steps": 4874, "loss": 0.6603, "lr": 6.94672131147541e-07, "epoch": 0.1395157981124333, "percentage": 6.98, "elapsed_time": "0:40:55", "remaining_time": "9:05:44"}
|
| 35 |
+
{"current_steps": 350, "total_steps": 4874, "loss": 0.6766, "lr": 7.151639344262295e-07, "epoch": 0.1436192039392696, "percentage": 7.18, "elapsed_time": "0:42:08", "remaining_time": "9:04:41"}
|
| 36 |
+
{"current_steps": 360, "total_steps": 4874, "loss": 0.6128, "lr": 7.356557377049179e-07, "epoch": 0.14772260976610588, "percentage": 7.39, "elapsed_time": "0:43:19", "remaining_time": "9:03:18"}
|
| 37 |
+
{"current_steps": 370, "total_steps": 4874, "loss": 0.629, "lr": 7.561475409836066e-07, "epoch": 0.15182601559294215, "percentage": 7.59, "elapsed_time": "0:44:31", "remaining_time": "9:02:05"}
|
| 38 |
+
{"current_steps": 380, "total_steps": 4874, "loss": 0.5847, "lr": 7.766393442622951e-07, "epoch": 0.15592942141977842, "percentage": 7.8, "elapsed_time": "0:45:44", "remaining_time": "9:01:01"}
|
| 39 |
+
{"current_steps": 390, "total_steps": 4874, "loss": 0.6029, "lr": 7.971311475409835e-07, "epoch": 0.1600328272466147, "percentage": 8.0, "elapsed_time": "0:46:59", "remaining_time": "9:00:14"}
|
| 40 |
+
{"current_steps": 400, "total_steps": 4874, "loss": 0.6016, "lr": 8.176229508196721e-07, "epoch": 0.16413623307345096, "percentage": 8.21, "elapsed_time": "0:48:08", "remaining_time": "8:58:27"}
|
| 41 |
+
{"current_steps": 410, "total_steps": 4874, "loss": 0.5641, "lr": 8.381147540983607e-07, "epoch": 0.16823963890028723, "percentage": 8.41, "elapsed_time": "0:49:22", "remaining_time": "8:57:31"}
|
| 42 |
+
{"current_steps": 420, "total_steps": 4874, "loss": 0.5899, "lr": 8.586065573770491e-07, "epoch": 0.1723430447271235, "percentage": 8.62, "elapsed_time": "0:50:34", "remaining_time": "8:56:18"}
|
| 43 |
+
{"current_steps": 430, "total_steps": 4874, "loss": 0.6022, "lr": 8.790983606557376e-07, "epoch": 0.1764464505539598, "percentage": 8.82, "elapsed_time": "0:51:46", "remaining_time": "8:55:00"}
|
| 44 |
+
{"current_steps": 440, "total_steps": 4874, "loss": 0.5648, "lr": 8.995901639344262e-07, "epoch": 0.18054985638079607, "percentage": 9.03, "elapsed_time": "0:52:59", "remaining_time": "8:54:02"}
|
| 45 |
+
{"current_steps": 450, "total_steps": 4874, "loss": 0.6225, "lr": 9.200819672131147e-07, "epoch": 0.18465326220763234, "percentage": 9.23, "elapsed_time": "0:54:13", "remaining_time": "8:53:04"}
|
| 46 |
+
{"current_steps": 460, "total_steps": 4874, "loss": 0.5951, "lr": 9.405737704918032e-07, "epoch": 0.1887566680344686, "percentage": 9.44, "elapsed_time": "0:55:26", "remaining_time": "8:51:59"}
|
| 47 |
+
{"current_steps": 470, "total_steps": 4874, "loss": 0.634, "lr": 9.610655737704918e-07, "epoch": 0.19286007386130488, "percentage": 9.64, "elapsed_time": "0:56:39", "remaining_time": "8:50:55"}
|
| 48 |
+
{"current_steps": 480, "total_steps": 4874, "loss": 0.6268, "lr": 9.815573770491803e-07, "epoch": 0.19696347968814115, "percentage": 9.85, "elapsed_time": "0:57:50", "remaining_time": "8:49:25"}
|
| 49 |
+
{"current_steps": 490, "total_steps": 4874, "loss": 0.6108, "lr": 9.99999871736679e-07, "epoch": 0.20106688551497742, "percentage": 10.05, "elapsed_time": "0:59:02", "remaining_time": "8:48:15"}
|
| 50 |
+
{"current_steps": 500, "total_steps": 4874, "loss": 0.6219, "lr": 9.99984480217773e-07, "epoch": 0.2051702913418137, "percentage": 10.26, "elapsed_time": "1:00:16", "remaining_time": "8:47:15"}
|
| 51 |
+
{"current_steps": 510, "total_steps": 4874, "loss": 0.6191, "lr": 9.999434369394724e-07, "epoch": 0.20927369716865, "percentage": 10.46, "elapsed_time": "1:01:23", "remaining_time": "8:45:21"}
|
| 52 |
+
{"current_steps": 520, "total_steps": 4874, "loss": 0.596, "lr": 9.998767440075073e-07, "epoch": 0.21337710299548626, "percentage": 10.67, "elapsed_time": "1:02:35", "remaining_time": "8:44:01"}
|
| 53 |
+
{"current_steps": 530, "total_steps": 4874, "loss": 0.5608, "lr": 9.997844048435662e-07, "epoch": 0.21748050882232253, "percentage": 10.87, "elapsed_time": "1:03:46", "remaining_time": "8:42:42"}
|
| 54 |
+
{"current_steps": 540, "total_steps": 4874, "loss": 0.5549, "lr": 9.996664241851196e-07, "epoch": 0.2215839146491588, "percentage": 11.08, "elapsed_time": "1:04:58", "remaining_time": "8:41:29"}
|
| 55 |
+
{"current_steps": 550, "total_steps": 4874, "loss": 0.5673, "lr": 9.995228080851788e-07, "epoch": 0.22568732047599507, "percentage": 11.28, "elapsed_time": "1:06:08", "remaining_time": "8:40:00"}
|
| 56 |
+
{"current_steps": 560, "total_steps": 4874, "loss": 0.5602, "lr": 9.993535639119836e-07, "epoch": 0.22979072630283134, "percentage": 11.49, "elapsed_time": "1:07:21", "remaining_time": "8:38:54"}
|
| 57 |
+
{"current_steps": 570, "total_steps": 4874, "loss": 0.6036, "lr": 9.99158700348625e-07, "epoch": 0.23389413212966761, "percentage": 11.69, "elapsed_time": "1:08:31", "remaining_time": "8:37:28"}
|
| 58 |
+
{"current_steps": 580, "total_steps": 4874, "loss": 0.6003, "lr": 9.989382273926001e-07, "epoch": 0.23799753795650389, "percentage": 11.9, "elapsed_time": "1:09:41", "remaining_time": "8:35:59"}
|
| 59 |
+
{"current_steps": 590, "total_steps": 4874, "loss": 0.5939, "lr": 9.986921563552983e-07, "epoch": 0.24210094378334018, "percentage": 12.11, "elapsed_time": "1:10:56", "remaining_time": "8:35:06"}
|
| 60 |
+
{"current_steps": 600, "total_steps": 4874, "loss": 0.5812, "lr": 9.984204998614217e-07, "epoch": 0.24620434961017645, "percentage": 12.31, "elapsed_time": "1:12:07", "remaining_time": "8:33:43"}
|
| 61 |
+
{"current_steps": 610, "total_steps": 4874, "loss": 0.5496, "lr": 9.981232718483366e-07, "epoch": 0.2503077554370127, "percentage": 12.52, "elapsed_time": "1:13:12", "remaining_time": "8:31:45"}
|
| 62 |
+
{"current_steps": 620, "total_steps": 4874, "loss": 0.577, "lr": 9.978004875653595e-07, "epoch": 0.254411161263849, "percentage": 12.72, "elapsed_time": "1:14:22", "remaining_time": "8:30:19"}
|
| 63 |
+
{"current_steps": 630, "total_steps": 4874, "loss": 0.5981, "lr": 9.97452163572974e-07, "epoch": 0.25851456709068527, "percentage": 12.93, "elapsed_time": "1:15:35", "remaining_time": "8:29:14"}
|
| 64 |
+
{"current_steps": 640, "total_steps": 4874, "loss": 0.5367, "lr": 9.970783177419811e-07, "epoch": 0.26261797291752154, "percentage": 13.13, "elapsed_time": "1:16:44", "remaining_time": "8:27:44"}
|
| 65 |
+
{"current_steps": 650, "total_steps": 4874, "loss": 0.5641, "lr": 9.96678969252583e-07, "epoch": 0.2667213787443578, "percentage": 13.34, "elapsed_time": "1:17:58", "remaining_time": "8:26:41"}
|
| 66 |
+
{"current_steps": 660, "total_steps": 4874, "loss": 0.5385, "lr": 9.962541385933984e-07, "epoch": 0.2708247845711941, "percentage": 13.54, "elapsed_time": "1:19:05", "remaining_time": "8:25:01"}
|
| 67 |
+
{"current_steps": 670, "total_steps": 4874, "loss": 0.5379, "lr": 9.958038475604113e-07, "epoch": 0.27492819039803035, "percentage": 13.75, "elapsed_time": "1:20:17", "remaining_time": "8:23:45"}
|
| 68 |
+
{"current_steps": 680, "total_steps": 4874, "loss": 0.5794, "lr": 9.953281192558534e-07, "epoch": 0.2790315962248666, "percentage": 13.95, "elapsed_time": "1:21:27", "remaining_time": "8:22:22"}
|
| 69 |
+
{"current_steps": 690, "total_steps": 4874, "loss": 0.5496, "lr": 9.948269780870183e-07, "epoch": 0.2831350020517029, "percentage": 14.16, "elapsed_time": "1:22:41", "remaining_time": "8:21:24"}
|
| 70 |
+
{"current_steps": 700, "total_steps": 4874, "loss": 0.5535, "lr": 9.943004497650092e-07, "epoch": 0.2872384078785392, "percentage": 14.36, "elapsed_time": "1:23:53", "remaining_time": "8:20:13"}
|
| 71 |
+
{"current_steps": 710, "total_steps": 4874, "loss": 0.5409, "lr": 9.937485613034207e-07, "epoch": 0.2913418137053755, "percentage": 14.57, "elapsed_time": "1:25:06", "remaining_time": "8:19:10"}
|
| 72 |
+
{"current_steps": 720, "total_steps": 4874, "loss": 0.5136, "lr": 9.931713410169512e-07, "epoch": 0.29544521953221176, "percentage": 14.77, "elapsed_time": "1:26:18", "remaining_time": "8:17:54"}
|
| 73 |
+
{"current_steps": 730, "total_steps": 4874, "loss": 0.5681, "lr": 9.925688185199524e-07, "epoch": 0.299548625359048, "percentage": 14.98, "elapsed_time": "1:27:30", "remaining_time": "8:16:43"}
|
| 74 |
+
{"current_steps": 740, "total_steps": 4874, "loss": 0.5203, "lr": 9.919410247249077e-07, "epoch": 0.3036520311858843, "percentage": 15.18, "elapsed_time": "1:28:41", "remaining_time": "8:15:30"}
|
| 75 |
+
{"current_steps": 750, "total_steps": 4874, "loss": 0.5854, "lr": 9.912879918408474e-07, "epoch": 0.30775543701272057, "percentage": 15.39, "elapsed_time": "1:29:54", "remaining_time": "8:14:23"}
|
| 76 |
+
{"current_steps": 760, "total_steps": 4874, "loss": 0.545, "lr": 9.906097533716965e-07, "epoch": 0.31185884283955684, "percentage": 15.59, "elapsed_time": "1:31:06", "remaining_time": "8:13:11"}
|
| 77 |
+
{"current_steps": 770, "total_steps": 4874, "loss": 0.5887, "lr": 9.89906344114555e-07, "epoch": 0.3159622486663931, "percentage": 15.8, "elapsed_time": "1:32:18", "remaining_time": "8:12:02"}
|
| 78 |
+
{"current_steps": 780, "total_steps": 4874, "loss": 0.5125, "lr": 9.891778001579133e-07, "epoch": 0.3200656544932294, "percentage": 16.0, "elapsed_time": "1:33:31", "remaining_time": "8:10:55"}
|
| 79 |
+
{"current_steps": 790, "total_steps": 4874, "loss": 0.5558, "lr": 9.884241588798003e-07, "epoch": 0.32416906032006565, "percentage": 16.21, "elapsed_time": "1:34:42", "remaining_time": "8:09:34"}
|
| 80 |
+
{"current_steps": 800, "total_steps": 4874, "loss": 0.5326, "lr": 9.876454589458654e-07, "epoch": 0.3282724661469019, "percentage": 16.41, "elapsed_time": "1:35:52", "remaining_time": "8:08:16"}
|
| 81 |
+
{"current_steps": 810, "total_steps": 4874, "loss": 0.5025, "lr": 9.868417403073953e-07, "epoch": 0.3323758719737382, "percentage": 16.62, "elapsed_time": "1:37:04", "remaining_time": "8:07:03"}
|
| 82 |
+
{"current_steps": 820, "total_steps": 4874, "loss": 0.5729, "lr": 9.860130441992641e-07, "epoch": 0.33647927780057446, "percentage": 16.82, "elapsed_time": "1:38:16", "remaining_time": "8:05:53"}
|
| 83 |
+
{"current_steps": 830, "total_steps": 4874, "loss": 0.5491, "lr": 9.851594131378181e-07, "epoch": 0.34058268362741073, "percentage": 17.03, "elapsed_time": "1:39:29", "remaining_time": "8:04:44"}
|
| 84 |
+
{"current_steps": 840, "total_steps": 4874, "loss": 0.525, "lr": 9.84280890918694e-07, "epoch": 0.344686089454247, "percentage": 17.23, "elapsed_time": "1:40:38", "remaining_time": "8:03:20"}
|
| 85 |
+
{"current_steps": 850, "total_steps": 4874, "loss": 0.5567, "lr": 9.833775226145717e-07, "epoch": 0.3487894952810833, "percentage": 17.44, "elapsed_time": "1:41:49", "remaining_time": "8:02:01"}
|
| 86 |
+
{"current_steps": 860, "total_steps": 4874, "loss": 0.4753, "lr": 9.824493545728626e-07, "epoch": 0.3528929011079196, "percentage": 17.64, "elapsed_time": "1:43:01", "remaining_time": "8:00:50"}
|
| 87 |
+
{"current_steps": 870, "total_steps": 4874, "loss": 0.5563, "lr": 9.814964344133316e-07, "epoch": 0.35699630693475587, "percentage": 17.85, "elapsed_time": "1:44:11", "remaining_time": "7:59:30"}
|
| 88 |
+
{"current_steps": 880, "total_steps": 4874, "loss": 0.5269, "lr": 9.805188110256532e-07, "epoch": 0.36109971276159214, "percentage": 18.05, "elapsed_time": "1:45:24", "remaining_time": "7:58:25"}
|
| 89 |
+
{"current_steps": 890, "total_steps": 4874, "loss": 0.514, "lr": 9.795165345669045e-07, "epoch": 0.3652031185884284, "percentage": 18.26, "elapsed_time": "1:46:38", "remaining_time": "7:57:21"}
|
| 90 |
+
{"current_steps": 900, "total_steps": 4874, "loss": 0.5545, "lr": 9.784896564589904e-07, "epoch": 0.3693065244152647, "percentage": 18.47, "elapsed_time": "1:47:49", "remaining_time": "7:56:07"}
|
| 91 |
+
{"current_steps": 910, "total_steps": 4874, "loss": 0.5145, "lr": 9.774382293860067e-07, "epoch": 0.37340993024210095, "percentage": 18.67, "elapsed_time": "1:48:59", "remaining_time": "7:54:44"}
|
| 92 |
+
{"current_steps": 920, "total_steps": 4874, "loss": 0.5595, "lr": 9.76362307291536e-07, "epoch": 0.3775133360689372, "percentage": 18.88, "elapsed_time": "1:50:11", "remaining_time": "7:53:33"}
|
| 93 |
+
{"current_steps": 930, "total_steps": 4874, "loss": 0.5577, "lr": 9.752619453758816e-07, "epoch": 0.3816167418957735, "percentage": 19.08, "elapsed_time": "1:51:21", "remaining_time": "7:52:17"}
|
| 94 |
+
{"current_steps": 940, "total_steps": 4874, "loss": 0.5584, "lr": 9.74137200093234e-07, "epoch": 0.38572014772260976, "percentage": 19.29, "elapsed_time": "1:52:28", "remaining_time": "7:50:45"}
|
| 95 |
+
{"current_steps": 950, "total_steps": 4874, "loss": 0.5634, "lr": 9.729881291487746e-07, "epoch": 0.38982355354944603, "percentage": 19.49, "elapsed_time": "1:53:43", "remaining_time": "7:49:43"}
|
| 96 |
+
{"current_steps": 960, "total_steps": 4874, "loss": 0.5112, "lr": 9.718147914957164e-07, "epoch": 0.3939269593762823, "percentage": 19.7, "elapsed_time": "1:54:54", "remaining_time": "7:48:27"}
|
| 97 |
+
{"current_steps": 970, "total_steps": 4874, "loss": 0.5404, "lr": 9.706172473322782e-07, "epoch": 0.3980303652031186, "percentage": 19.9, "elapsed_time": "1:56:03", "remaining_time": "7:47:04"}
|
| 98 |
+
{"current_steps": 980, "total_steps": 4874, "loss": 0.4917, "lr": 9.693955580985967e-07, "epoch": 0.40213377102995485, "percentage": 20.11, "elapsed_time": "1:57:15", "remaining_time": "7:45:54"}
|
| 99 |
+
{"current_steps": 990, "total_steps": 4874, "loss": 0.5381, "lr": 9.681497864735739e-07, "epoch": 0.4062371768567911, "percentage": 20.31, "elapsed_time": "1:58:29", "remaining_time": "7:44:51"}
|
| 100 |
+
{"current_steps": 1000, "total_steps": 4874, "loss": 0.5399, "lr": 9.668799963716614e-07, "epoch": 0.4103405826836274, "percentage": 20.52, "elapsed_time": "1:59:40", "remaining_time": "7:43:35"}
|
| 101 |
+
{"current_steps": 1010, "total_steps": 4874, "loss": 0.5049, "lr": 9.655862529395821e-07, "epoch": 0.4144439885104637, "percentage": 20.72, "elapsed_time": "2:01:17", "remaining_time": "7:44:01"}
|
| 102 |
+
{"current_steps": 1020, "total_steps": 4874, "loss": 0.541, "lr": 9.642686225529864e-07, "epoch": 0.4185473943373, "percentage": 20.93, "elapsed_time": "2:02:29", "remaining_time": "7:42:49"}
|
| 103 |
+
{"current_steps": 1030, "total_steps": 4874, "loss": 0.539, "lr": 9.62927172813048e-07, "epoch": 0.42265080016413625, "percentage": 21.13, "elapsed_time": "2:03:40", "remaining_time": "7:41:32"}
|
| 104 |
+
{"current_steps": 1040, "total_steps": 4874, "loss": 0.5191, "lr": 9.615619725429953e-07, "epoch": 0.4267542059909725, "percentage": 21.34, "elapsed_time": "2:04:52", "remaining_time": "7:40:20"}
|
| 105 |
+
{"current_steps": 1050, "total_steps": 4874, "loss": 0.5496, "lr": 9.601730917845796e-07, "epoch": 0.4308576118178088, "percentage": 21.54, "elapsed_time": "2:06:03", "remaining_time": "7:39:05"}
|
| 106 |
+
{"current_steps": 1060, "total_steps": 4874, "loss": 0.5204, "lr": 9.587606017944834e-07, "epoch": 0.43496101764464506, "percentage": 21.75, "elapsed_time": "2:07:14", "remaining_time": "7:37:51"}
|
| 107 |
+
{"current_steps": 1070, "total_steps": 4874, "loss": 0.4949, "lr": 9.573245750406623e-07, "epoch": 0.43906442347148134, "percentage": 21.95, "elapsed_time": "2:08:24", "remaining_time": "7:36:30"}
|
| 108 |
+
{"current_steps": 1080, "total_steps": 4874, "loss": 0.5539, "lr": 9.558650851986287e-07, "epoch": 0.4431678292983176, "percentage": 22.16, "elapsed_time": "2:09:35", "remaining_time": "7:35:14"}
|
| 109 |
+
{"current_steps": 1090, "total_steps": 4874, "loss": 0.5323, "lr": 9.543822071476718e-07, "epoch": 0.4472712351251539, "percentage": 22.36, "elapsed_time": "2:10:47", "remaining_time": "7:34:01"}
|
| 110 |
+
{"current_steps": 1100, "total_steps": 4874, "loss": 0.544, "lr": 9.528760169670147e-07, "epoch": 0.45137464095199015, "percentage": 22.57, "elapsed_time": "2:11:59", "remaining_time": "7:32:51"}
|
| 111 |
+
{"current_steps": 1110, "total_steps": 4874, "loss": 0.5094, "lr": 9.513465919319121e-07, "epoch": 0.4554780467788264, "percentage": 22.77, "elapsed_time": "2:13:12", "remaining_time": "7:31:41"}
|
| 112 |
+
{"current_steps": 1120, "total_steps": 4874, "loss": 0.5353, "lr": 9.49794010509686e-07, "epoch": 0.4595814526056627, "percentage": 22.98, "elapsed_time": "2:14:25", "remaining_time": "7:30:35"}
|
| 113 |
+
{"current_steps": 1130, "total_steps": 4874, "loss": 0.5934, "lr": 9.482183523556986e-07, "epoch": 0.46368485843249896, "percentage": 23.18, "elapsed_time": "2:15:38", "remaining_time": "7:29:23"}
|
| 114 |
+
{"current_steps": 1140, "total_steps": 4874, "loss": 0.517, "lr": 9.466196983092671e-07, "epoch": 0.46778826425933523, "percentage": 23.39, "elapsed_time": "2:16:48", "remaining_time": "7:28:07"}
|
| 115 |
+
{"current_steps": 1150, "total_steps": 4874, "loss": 0.5354, "lr": 9.449981303895154e-07, "epoch": 0.4718916700861715, "percentage": 23.59, "elapsed_time": "2:18:02", "remaining_time": "7:27:02"}
|
| 116 |
+
{"current_steps": 1160, "total_steps": 4874, "loss": 0.5152, "lr": 9.433537317911656e-07, "epoch": 0.47599507591300777, "percentage": 23.8, "elapsed_time": "2:19:13", "remaining_time": "7:25:46"}
|
| 117 |
+
{"current_steps": 1170, "total_steps": 4874, "loss": 0.5082, "lr": 9.416865868802711e-07, "epoch": 0.4800984817398441, "percentage": 24.0, "elapsed_time": "2:20:27", "remaining_time": "7:24:39"}
|
| 118 |
+
{"current_steps": 1180, "total_steps": 4874, "loss": 0.5287, "lr": 9.39996781189887e-07, "epoch": 0.48420188756668037, "percentage": 24.21, "elapsed_time": "2:21:39", "remaining_time": "7:23:26"}
|
| 119 |
+
{"current_steps": 1190, "total_steps": 4874, "loss": 0.4979, "lr": 9.382844014156823e-07, "epoch": 0.48830529339351664, "percentage": 24.42, "elapsed_time": "2:22:53", "remaining_time": "7:22:20"}
|
| 120 |
+
{"current_steps": 1200, "total_steps": 4874, "loss": 0.5504, "lr": 9.365495354114918e-07, "epoch": 0.4924086992203529, "percentage": 24.62, "elapsed_time": "2:24:05", "remaining_time": "7:21:08"}
|
| 121 |
+
{"current_steps": 1210, "total_steps": 4874, "loss": 0.4844, "lr": 9.347922721848092e-07, "epoch": 0.4965121050471892, "percentage": 24.83, "elapsed_time": "2:25:17", "remaining_time": "7:19:57"}
|
| 122 |
+
{"current_steps": 1220, "total_steps": 4874, "loss": 0.4787, "lr": 9.330127018922193e-07, "epoch": 0.5006155108740254, "percentage": 25.03, "elapsed_time": "2:26:26", "remaining_time": "7:18:37"}
|
| 123 |
+
{"current_steps": 1230, "total_steps": 4874, "loss": 0.5524, "lr": 9.312109158347744e-07, "epoch": 0.5047189167008617, "percentage": 25.24, "elapsed_time": "2:27:37", "remaining_time": "7:17:21"}
|
| 124 |
+
{"current_steps": 1240, "total_steps": 4874, "loss": 0.5054, "lr": 9.293870064533087e-07, "epoch": 0.508822322527698, "percentage": 25.44, "elapsed_time": "2:28:51", "remaining_time": "7:16:13"}
|
| 125 |
+
{"current_steps": 1250, "total_steps": 4874, "loss": 0.5355, "lr": 9.275410673236957e-07, "epoch": 0.5129257283545343, "percentage": 25.65, "elapsed_time": "2:30:01", "remaining_time": "7:14:56"}
|
| 126 |
+
{"current_steps": 1260, "total_steps": 4874, "loss": 0.5312, "lr": 9.256731931520481e-07, "epoch": 0.5170291341813705, "percentage": 25.85, "elapsed_time": "2:31:14", "remaining_time": "7:13:48"}
|
| 127 |
+
{"current_steps": 1270, "total_steps": 4874, "loss": 0.5262, "lr": 9.23783479769858e-07, "epoch": 0.5211325400082069, "percentage": 26.06, "elapsed_time": "2:32:27", "remaining_time": "7:12:37"}
|
| 128 |
+
{"current_steps": 1280, "total_steps": 4874, "loss": 0.5218, "lr": 9.218720241290809e-07, "epoch": 0.5252359458350431, "percentage": 26.26, "elapsed_time": "2:33:39", "remaining_time": "7:11:27"}
|
| 129 |
+
{"current_steps": 1290, "total_steps": 4874, "loss": 0.5137, "lr": 9.199389242971611e-07, "epoch": 0.5293393516618794, "percentage": 26.47, "elapsed_time": "2:34:50", "remaining_time": "7:10:12"}
|
| 130 |
+
{"current_steps": 1300, "total_steps": 4874, "loss": 0.5573, "lr": 9.179842794520005e-07, "epoch": 0.5334427574887156, "percentage": 26.67, "elapsed_time": "2:36:04", "remaining_time": "7:09:04"}
|
| 131 |
+
{"current_steps": 1310, "total_steps": 4874, "loss": 0.5283, "lr": 9.160081898768704e-07, "epoch": 0.5375461633155519, "percentage": 26.88, "elapsed_time": "2:37:18", "remaining_time": "7:07:57"}
|
| 132 |
+
{"current_steps": 1320, "total_steps": 4874, "loss": 0.4916, "lr": 9.140107569552664e-07, "epoch": 0.5416495691423882, "percentage": 27.08, "elapsed_time": "2:38:30", "remaining_time": "7:06:46"}
|
| 133 |
+
{"current_steps": 1330, "total_steps": 4874, "loss": 0.5511, "lr": 9.119920831657066e-07, "epoch": 0.5457529749692245, "percentage": 27.29, "elapsed_time": "2:39:42", "remaining_time": "7:05:34"}
|
| 134 |
+
{"current_steps": 1340, "total_steps": 4874, "loss": 0.5117, "lr": 9.09952272076475e-07, "epoch": 0.5498563807960607, "percentage": 27.49, "elapsed_time": "2:40:55", "remaining_time": "7:04:25"}
|
| 135 |
+
{"current_steps": 1350, "total_steps": 4874, "loss": 0.5027, "lr": 9.078914283403058e-07, "epoch": 0.553959786622897, "percentage": 27.7, "elapsed_time": "2:42:09", "remaining_time": "7:03:17"}
|
| 136 |
+
{"current_steps": 1360, "total_steps": 4874, "loss": 0.506, "lr": 9.058096576890166e-07, "epoch": 0.5580631924497332, "percentage": 27.9, "elapsed_time": "2:43:18", "remaining_time": "7:01:58"}
|
| 137 |
+
{"current_steps": 1370, "total_steps": 4874, "loss": 0.498, "lr": 9.037070669280822e-07, "epoch": 0.5621665982765696, "percentage": 28.11, "elapsed_time": "2:44:30", "remaining_time": "7:00:45"}
|
| 138 |
+
{"current_steps": 1380, "total_steps": 4874, "loss": 0.5017, "lr": 9.015837639311556e-07, "epoch": 0.5662700041034058, "percentage": 28.31, "elapsed_time": "2:45:41", "remaining_time": "6:59:30"}
|
| 139 |
+
{"current_steps": 1390, "total_steps": 4874, "loss": 0.5065, "lr": 8.994398576345334e-07, "epoch": 0.5703734099302421, "percentage": 28.52, "elapsed_time": "2:46:54", "remaining_time": "6:58:19"}
|
| 140 |
+
{"current_steps": 1400, "total_steps": 4874, "loss": 0.4837, "lr": 8.972754580315668e-07, "epoch": 0.5744768157570784, "percentage": 28.72, "elapsed_time": "2:48:06", "remaining_time": "6:57:08"}
|
| 141 |
+
{"current_steps": 1410, "total_steps": 4874, "loss": 0.5266, "lr": 8.950906761670179e-07, "epoch": 0.5785802215839146, "percentage": 28.93, "elapsed_time": "2:49:18", "remaining_time": "6:55:56"}
|
| 142 |
+
{"current_steps": 1420, "total_steps": 4874, "loss": 0.5176, "lr": 8.928856241313638e-07, "epoch": 0.582683627410751, "percentage": 29.13, "elapsed_time": "2:50:29", "remaining_time": "6:54:42"}
|
| 143 |
+
{"current_steps": 1430, "total_steps": 4874, "loss": 0.5018, "lr": 8.906604150550443e-07, "epoch": 0.5867870332375872, "percentage": 29.34, "elapsed_time": "2:51:38", "remaining_time": "6:53:23"}
|
| 144 |
+
{"current_steps": 1440, "total_steps": 4874, "loss": 0.4862, "lr": 8.884151631026586e-07, "epoch": 0.5908904390644235, "percentage": 29.54, "elapsed_time": "2:52:51", "remaining_time": "6:52:13"}
|
| 145 |
+
{"current_steps": 1450, "total_steps": 4874, "loss": 0.5133, "lr": 8.861499834671082e-07, "epoch": 0.5949938448912597, "percentage": 29.75, "elapsed_time": "2:54:05", "remaining_time": "6:51:04"}
|
| 146 |
+
{"current_steps": 1460, "total_steps": 4874, "loss": 0.494, "lr": 8.838649923636864e-07, "epoch": 0.599097250718096, "percentage": 29.95, "elapsed_time": "2:55:18", "remaining_time": "6:49:56"}
|
| 147 |
+
{"current_steps": 1470, "total_steps": 4874, "loss": 0.4998, "lr": 8.815603070241157e-07, "epoch": 0.6032006565449323, "percentage": 30.16, "elapsed_time": "2:56:30", "remaining_time": "6:48:42"}
|
| 148 |
+
{"current_steps": 1480, "total_steps": 4874, "loss": 0.5247, "lr": 8.792360456905344e-07, "epoch": 0.6073040623717686, "percentage": 30.37, "elapsed_time": "2:57:41", "remaining_time": "6:47:29"}
|
| 149 |
+
{"current_steps": 1490, "total_steps": 4874, "loss": 0.5172, "lr": 8.768923276094282e-07, "epoch": 0.6114074681986048, "percentage": 30.57, "elapsed_time": "2:58:48", "remaining_time": "6:46:05"}
|
| 150 |
+
{"current_steps": 1500, "total_steps": 4874, "loss": 0.5136, "lr": 8.745292730255147e-07, "epoch": 0.6155108740254411, "percentage": 30.78, "elapsed_time": "2:59:59", "remaining_time": "6:44:51"}
|
| 151 |
+
{"current_steps": 1510, "total_steps": 4874, "loss": 0.5048, "lr": 8.721470031755717e-07, "epoch": 0.6196142798522773, "percentage": 30.98, "elapsed_time": "3:01:12", "remaining_time": "6:43:41"}
|
| 152 |
+
{"current_steps": 1520, "total_steps": 4874, "loss": 0.5184, "lr": 8.697456402822196e-07, "epoch": 0.6237176856791137, "percentage": 31.19, "elapsed_time": "3:02:22", "remaining_time": "6:42:24"}
|
| 153 |
+
{"current_steps": 1530, "total_steps": 4874, "loss": 0.5099, "lr": 8.673253075476484e-07, "epoch": 0.6278210915059499, "percentage": 31.39, "elapsed_time": "3:03:30", "remaining_time": "6:41:04"}
|
| 154 |
+
{"current_steps": 1540, "total_steps": 4874, "loss": 0.5517, "lr": 8.648861291472984e-07, "epoch": 0.6319244973327862, "percentage": 31.6, "elapsed_time": "3:04:41", "remaining_time": "6:39:50"}
|
| 155 |
+
{"current_steps": 1550, "total_steps": 4874, "loss": 0.5185, "lr": 8.62428230223489e-07, "epoch": 0.6360279031596224, "percentage": 31.8, "elapsed_time": "3:05:52", "remaining_time": "6:38:36"}
|
| 156 |
+
{"current_steps": 1560, "total_steps": 4874, "loss": 0.5208, "lr": 8.599517368789979e-07, "epoch": 0.6401313089864588, "percentage": 32.01, "elapsed_time": "3:07:05", "remaining_time": "6:37:27"}
|
| 157 |
+
{"current_steps": 1570, "total_steps": 4874, "loss": 0.4841, "lr": 8.574567761705918e-07, "epoch": 0.6442347148132951, "percentage": 32.21, "elapsed_time": "3:08:17", "remaining_time": "6:36:14"}
|
| 158 |
+
{"current_steps": 1580, "total_steps": 4874, "loss": 0.4908, "lr": 8.549434761025074e-07, "epoch": 0.6483381206401313, "percentage": 32.42, "elapsed_time": "3:09:26", "remaining_time": "6:34:57"}
|
| 159 |
+
{"current_steps": 1590, "total_steps": 4874, "loss": 0.5285, "lr": 8.524119656198844e-07, "epoch": 0.6524415264669676, "percentage": 32.62, "elapsed_time": "3:10:40", "remaining_time": "6:33:48"}
|
| 160 |
+
{"current_steps": 1600, "total_steps": 4874, "loss": 0.5628, "lr": 8.498623746021497e-07, "epoch": 0.6565449322938038, "percentage": 32.83, "elapsed_time": "3:11:47", "remaining_time": "6:32:26"}
|
| 161 |
+
{"current_steps": 1610, "total_steps": 4874, "loss": 0.5346, "lr": 8.472948338563544e-07, "epoch": 0.6606483381206402, "percentage": 33.03, "elapsed_time": "3:12:59", "remaining_time": "6:31:15"}
|
| 162 |
+
{"current_steps": 1620, "total_steps": 4874, "loss": 0.519, "lr": 8.44709475110462e-07, "epoch": 0.6647517439474764, "percentage": 33.24, "elapsed_time": "3:14:10", "remaining_time": "6:30:02"}
|
| 163 |
+
{"current_steps": 1630, "total_steps": 4874, "loss": 0.5201, "lr": 8.421064310065907e-07, "epoch": 0.6688551497743127, "percentage": 33.44, "elapsed_time": "3:15:21", "remaining_time": "6:28:48"}
|
| 164 |
+
{"current_steps": 1640, "total_steps": 4874, "loss": 0.4742, "lr": 8.39485835094208e-07, "epoch": 0.6729585556011489, "percentage": 33.65, "elapsed_time": "3:16:33", "remaining_time": "6:27:35"}
|
| 165 |
+
{"current_steps": 1650, "total_steps": 4874, "loss": 0.5373, "lr": 8.368478218232787e-07, "epoch": 0.6770619614279852, "percentage": 33.85, "elapsed_time": "3:17:42", "remaining_time": "6:26:18"}
|
| 166 |
+
{"current_steps": 1660, "total_steps": 4874, "loss": 0.5367, "lr": 8.34192526537367e-07, "epoch": 0.6811653672548215, "percentage": 34.06, "elapsed_time": "3:18:50", "remaining_time": "6:25:00"}
|
| 167 |
+
{"current_steps": 1670, "total_steps": 4874, "loss": 0.5031, "lr": 8.315200854666935e-07, "epoch": 0.6852687730816578, "percentage": 34.26, "elapsed_time": "3:20:03", "remaining_time": "6:23:49"}
|
| 168 |
+
{"current_steps": 1680, "total_steps": 4874, "loss": 0.5293, "lr": 8.288306357211443e-07, "epoch": 0.689372178908494, "percentage": 34.47, "elapsed_time": "3:21:16", "remaining_time": "6:22:39"}
|
| 169 |
+
{"current_steps": 1690, "total_steps": 4874, "loss": 0.4854, "lr": 8.261243152832381e-07, "epoch": 0.6934755847353303, "percentage": 34.67, "elapsed_time": "3:22:30", "remaining_time": "6:21:31"}
|
| 170 |
+
{"current_steps": 1700, "total_steps": 4874, "loss": 0.5718, "lr": 8.23401263001046e-07, "epoch": 0.6975789905621665, "percentage": 34.88, "elapsed_time": "3:23:39", "remaining_time": "6:20:14"}
|
| 171 |
+
{"current_steps": 1710, "total_steps": 4874, "loss": 0.5034, "lr": 8.206616185810686e-07, "epoch": 0.7016823963890029, "percentage": 35.08, "elapsed_time": "3:24:49", "remaining_time": "6:18:59"}
|
| 172 |
+
{"current_steps": 1720, "total_steps": 4874, "loss": 0.5081, "lr": 8.179055225810673e-07, "epoch": 0.7057858022158392, "percentage": 35.29, "elapsed_time": "3:25:59", "remaining_time": "6:17:43"}
|
| 173 |
+
{"current_steps": 1730, "total_steps": 4874, "loss": 0.4858, "lr": 8.151331164028543e-07, "epoch": 0.7098892080426754, "percentage": 35.49, "elapsed_time": "3:27:08", "remaining_time": "6:16:26"}
|
| 174 |
+
{"current_steps": 1740, "total_steps": 4874, "loss": 0.4902, "lr": 8.123445422850371e-07, "epoch": 0.7139926138695117, "percentage": 35.7, "elapsed_time": "3:28:21", "remaining_time": "6:15:16"}
|
| 175 |
+
{"current_steps": 1750, "total_steps": 4874, "loss": 0.4929, "lr": 8.095399432957212e-07, "epoch": 0.718096019696348, "percentage": 35.9, "elapsed_time": "3:29:29", "remaining_time": "6:13:58"}
|
| 176 |
+
{"current_steps": 1760, "total_steps": 4874, "loss": 0.5355, "lr": 8.067194633251697e-07, "epoch": 0.7221994255231843, "percentage": 36.11, "elapsed_time": "3:30:42", "remaining_time": "6:12:47"}
|
| 177 |
+
{"current_steps": 1770, "total_steps": 4874, "loss": 0.4859, "lr": 8.038832470784211e-07, "epoch": 0.7263028313500205, "percentage": 36.32, "elapsed_time": "3:31:53", "remaining_time": "6:11:35"}
|
| 178 |
+
{"current_steps": 1780, "total_steps": 4874, "loss": 0.5418, "lr": 8.010314400678661e-07, "epoch": 0.7304062371768568, "percentage": 36.52, "elapsed_time": "3:33:02", "remaining_time": "6:10:18"}
|
| 179 |
+
{"current_steps": 1790, "total_steps": 4874, "loss": 0.4936, "lr": 7.981641886057805e-07, "epoch": 0.734509643003693, "percentage": 36.73, "elapsed_time": "3:34:13", "remaining_time": "6:09:05"}
|
| 180 |
+
{"current_steps": 1800, "total_steps": 4874, "loss": 0.4927, "lr": 7.952816397968194e-07, "epoch": 0.7386130488305294, "percentage": 36.93, "elapsed_time": "3:35:24", "remaining_time": "6:07:51"}
|
| 181 |
+
{"current_steps": 1810, "total_steps": 4874, "loss": 0.4983, "lr": 7.923839415304705e-07, "epoch": 0.7427164546573656, "percentage": 37.14, "elapsed_time": "3:36:35", "remaining_time": "6:06:39"}
|
| 182 |
+
{"current_steps": 1820, "total_steps": 4874, "loss": 0.5284, "lr": 7.894712424734656e-07, "epoch": 0.7468198604842019, "percentage": 37.34, "elapsed_time": "3:37:46", "remaining_time": "6:05:26"}
|
| 183 |
+
{"current_steps": 1830, "total_steps": 4874, "loss": 0.513, "lr": 7.86543692062154e-07, "epoch": 0.7509232663110381, "percentage": 37.55, "elapsed_time": "3:38:58", "remaining_time": "6:04:14"}
|
| 184 |
+
{"current_steps": 1840, "total_steps": 4874, "loss": 0.4973, "lr": 7.836014404948352e-07, "epoch": 0.7550266721378744, "percentage": 37.75, "elapsed_time": "3:40:09", "remaining_time": "6:03:02"}
|
| 185 |
+
{"current_steps": 1850, "total_steps": 4874, "loss": 0.5059, "lr": 7.806446387240535e-07, "epoch": 0.7591300779647107, "percentage": 37.96, "elapsed_time": "3:41:19", "remaining_time": "6:01:46"}
|
| 186 |
+
{"current_steps": 1860, "total_steps": 4874, "loss": 0.4838, "lr": 7.776734384488528e-07, "epoch": 0.763233483791547, "percentage": 38.16, "elapsed_time": "3:42:32", "remaining_time": "6:00:37"}
|
| 187 |
+
{"current_steps": 1870, "total_steps": 4874, "loss": 0.5423, "lr": 7.746879921069936e-07, "epoch": 0.7673368896183833, "percentage": 38.37, "elapsed_time": "3:43:43", "remaining_time": "5:59:24"}
|
| 188 |
+
{"current_steps": 1880, "total_steps": 4874, "loss": 0.499, "lr": 7.716884528671328e-07, "epoch": 0.7714402954452195, "percentage": 38.57, "elapsed_time": "3:44:57", "remaining_time": "5:58:14"}
|
| 189 |
+
{"current_steps": 1890, "total_steps": 4874, "loss": 0.5326, "lr": 7.686749746209648e-07, "epoch": 0.7755437012720559, "percentage": 38.78, "elapsed_time": "3:46:08", "remaining_time": "5:57:03"}
|
| 190 |
+
{"current_steps": 1900, "total_steps": 4874, "loss": 0.5079, "lr": 7.656477119753267e-07, "epoch": 0.7796471070988921, "percentage": 38.98, "elapsed_time": "3:47:22", "remaining_time": "5:55:53"}
|
| 191 |
+
{"current_steps": 1910, "total_steps": 4874, "loss": 0.5099, "lr": 7.626068202442648e-07, "epoch": 0.7837505129257284, "percentage": 39.19, "elapsed_time": "3:48:30", "remaining_time": "5:54:36"}
|
| 192 |
+
{"current_steps": 1920, "total_steps": 4874, "loss": 0.5085, "lr": 7.595524554410684e-07, "epoch": 0.7878539187525646, "percentage": 39.39, "elapsed_time": "3:49:43", "remaining_time": "5:53:26"}
|
| 193 |
+
{"current_steps": 1930, "total_steps": 4874, "loss": 0.4627, "lr": 7.564847742702631e-07, "epoch": 0.7919573245794009, "percentage": 39.6, "elapsed_time": "3:50:57", "remaining_time": "5:52:18"}
|
| 194 |
+
{"current_steps": 1940, "total_steps": 4874, "loss": 0.5067, "lr": 7.534039341195729e-07, "epoch": 0.7960607304062371, "percentage": 39.8, "elapsed_time": "3:52:09", "remaining_time": "5:51:06"}
|
| 195 |
+
{"current_steps": 1950, "total_steps": 4874, "loss": 0.4911, "lr": 7.503100930518447e-07, "epoch": 0.8001641362330735, "percentage": 40.01, "elapsed_time": "3:53:22", "remaining_time": "5:49:56"}
|
| 196 |
+
{"current_steps": 1960, "total_steps": 4874, "loss": 0.518, "lr": 7.472034097969386e-07, "epoch": 0.8042675420599097, "percentage": 40.21, "elapsed_time": "3:54:34", "remaining_time": "5:48:45"}
|
| 197 |
+
{"current_steps": 1970, "total_steps": 4874, "loss": 0.5078, "lr": 7.440840437435845e-07, "epoch": 0.808370947886746, "percentage": 40.42, "elapsed_time": "3:55:47", "remaining_time": "5:47:34"}
|
| 198 |
+
{"current_steps": 1980, "total_steps": 4874, "loss": 0.4784, "lr": 7.409521549312052e-07, "epoch": 0.8124743537135822, "percentage": 40.62, "elapsed_time": "3:57:01", "remaining_time": "5:46:26"}
|
| 199 |
+
{"current_steps": 1990, "total_steps": 4874, "loss": 0.4796, "lr": 7.378079040417049e-07, "epoch": 0.8165777595404186, "percentage": 40.83, "elapsed_time": "3:58:13", "remaining_time": "5:45:15"}
|
| 200 |
+
{"current_steps": 2000, "total_steps": 4874, "loss": 0.4734, "lr": 7.346514523912249e-07, "epoch": 0.8206811653672548, "percentage": 41.03, "elapsed_time": "3:59:19", "remaining_time": "5:43:54"}
|
| 201 |
+
{"current_steps": 2010, "total_steps": 4874, "loss": 0.4783, "lr": 7.314829619218688e-07, "epoch": 0.8247845711940911, "percentage": 41.24, "elapsed_time": "4:01:11", "remaining_time": "5:43:39"}
|
| 202 |
+
{"current_steps": 2020, "total_steps": 4874, "loss": 0.4581, "lr": 7.283025951933921e-07, "epoch": 0.8288879770209274, "percentage": 41.44, "elapsed_time": "4:02:23", "remaining_time": "5:42:28"}
|
| 203 |
+
{"current_steps": 2030, "total_steps": 4874, "loss": 0.5047, "lr": 7.251105153748645e-07, "epoch": 0.8329913828477636, "percentage": 41.65, "elapsed_time": "4:03:37", "remaining_time": "5:41:19"}
|
| 204 |
+
{"current_steps": 2040, "total_steps": 4874, "loss": 0.4845, "lr": 7.219068862362956e-07, "epoch": 0.8370947886746, "percentage": 41.85, "elapsed_time": "4:04:50", "remaining_time": "5:40:08"}
|
| 205 |
+
{"current_steps": 2050, "total_steps": 4874, "loss": 0.5003, "lr": 7.186918721402353e-07, "epoch": 0.8411981945014362, "percentage": 42.06, "elapsed_time": "4:06:01", "remaining_time": "5:38:54"}
|
| 206 |
+
{"current_steps": 2060, "total_steps": 4874, "loss": 0.4843, "lr": 7.154656380333393e-07, "epoch": 0.8453016003282725, "percentage": 42.27, "elapsed_time": "4:07:11", "remaining_time": "5:37:39"}
|
| 207 |
+
{"current_steps": 2070, "total_steps": 4874, "loss": 0.499, "lr": 7.122283494379075e-07, "epoch": 0.8494050061551087, "percentage": 42.47, "elapsed_time": "4:08:24", "remaining_time": "5:36:29"}
|
| 208 |
+
{"current_steps": 2080, "total_steps": 4874, "loss": 0.4805, "lr": 7.089801724433917e-07, "epoch": 0.853508411981945, "percentage": 42.68, "elapsed_time": "4:09:37", "remaining_time": "5:35:18"}
|
| 209 |
+
{"current_steps": 2090, "total_steps": 4874, "loss": 0.4984, "lr": 7.057212736978738e-07, "epoch": 0.8576118178087813, "percentage": 42.88, "elapsed_time": "4:10:47", "remaining_time": "5:34:04"}
|
| 210 |
+
{"current_steps": 2100, "total_steps": 4874, "loss": 0.4783, "lr": 7.024518203995169e-07, "epoch": 0.8617152236356176, "percentage": 43.09, "elapsed_time": "4:11:58", "remaining_time": "5:32:51"}
|
| 211 |
+
{"current_steps": 2110, "total_steps": 4874, "loss": 0.4926, "lr": 6.99171980287986e-07, "epoch": 0.8658186294624538, "percentage": 43.29, "elapsed_time": "4:13:08", "remaining_time": "5:31:36"}
|
| 212 |
+
{"current_steps": 2120, "total_steps": 4874, "loss": 0.4978, "lr": 6.958819216358431e-07, "epoch": 0.8699220352892901, "percentage": 43.5, "elapsed_time": "4:14:21", "remaining_time": "5:30:25"}
|
| 213 |
+
{"current_steps": 2130, "total_steps": 4874, "loss": 0.4995, "lr": 6.925818132399135e-07, "epoch": 0.8740254411161263, "percentage": 43.7, "elapsed_time": "4:15:34", "remaining_time": "5:29:15"}
|
| 214 |
+
{"current_steps": 2140, "total_steps": 4874, "loss": 0.4804, "lr": 6.892718244126255e-07, "epoch": 0.8781288469429627, "percentage": 43.91, "elapsed_time": "4:16:47", "remaining_time": "5:28:04"}
|
| 215 |
+
{"current_steps": 2150, "total_steps": 4874, "loss": 0.4886, "lr": 6.859521249733247e-07, "epoch": 0.8822322527697989, "percentage": 44.11, "elapsed_time": "4:17:56", "remaining_time": "5:26:48"}
|
| 216 |
+
{"current_steps": 2160, "total_steps": 4874, "loss": 0.4664, "lr": 6.826228852395595e-07, "epoch": 0.8863356585966352, "percentage": 44.32, "elapsed_time": "4:19:07", "remaining_time": "5:25:35"}
|
| 217 |
+
{"current_steps": 2170, "total_steps": 4874, "loss": 0.5123, "lr": 6.792842760183454e-07, "epoch": 0.8904390644234714, "percentage": 44.52, "elapsed_time": "4:20:19", "remaining_time": "5:24:22"}
|
| 218 |
+
{"current_steps": 2180, "total_steps": 4874, "loss": 0.5317, "lr": 6.759364685973998e-07, "epoch": 0.8945424702503078, "percentage": 44.73, "elapsed_time": "4:21:31", "remaining_time": "5:23:11"}
|
| 219 |
+
{"current_steps": 2190, "total_steps": 4874, "loss": 0.4825, "lr": 6.725796347363553e-07, "epoch": 0.8986458760771441, "percentage": 44.93, "elapsed_time": "4:22:41", "remaining_time": "5:21:56"}
|
| 220 |
+
{"current_steps": 2200, "total_steps": 4874, "loss": 0.4977, "lr": 6.692139466579463e-07, "epoch": 0.9027492819039803, "percentage": 45.14, "elapsed_time": "4:23:53", "remaining_time": "5:20:45"}
|
| 221 |
+
{"current_steps": 2210, "total_steps": 4874, "loss": 0.4765, "lr": 6.658395770391744e-07, "epoch": 0.9068526877308166, "percentage": 45.34, "elapsed_time": "4:25:04", "remaining_time": "5:19:31"}
|
| 222 |
+
{"current_steps": 2220, "total_steps": 4874, "loss": 0.4598, "lr": 6.624566990024483e-07, "epoch": 0.9109560935576528, "percentage": 45.55, "elapsed_time": "4:26:15", "remaining_time": "5:18:19"}
|
| 223 |
+
{"current_steps": 2230, "total_steps": 4874, "loss": 0.4683, "lr": 6.590654861067022e-07, "epoch": 0.9150594993844892, "percentage": 45.75, "elapsed_time": "4:27:30", "remaining_time": "5:17:09"}
|
| 224 |
+
{"current_steps": 2240, "total_steps": 4874, "loss": 0.5042, "lr": 6.556661123384908e-07, "epoch": 0.9191629052113254, "percentage": 45.96, "elapsed_time": "4:28:41", "remaining_time": "5:15:57"}
|
| 225 |
+
{"current_steps": 2250, "total_steps": 4874, "loss": 0.5194, "lr": 6.522587521030639e-07, "epoch": 0.9232663110381617, "percentage": 46.16, "elapsed_time": "4:29:52", "remaining_time": "5:14:44"}
|
| 226 |
+
{"current_steps": 2260, "total_steps": 4874, "loss": 0.5078, "lr": 6.488435802154174e-07, "epoch": 0.9273697168649979, "percentage": 46.37, "elapsed_time": "4:31:06", "remaining_time": "5:13:34"}
|
| 227 |
+
{"current_steps": 2270, "total_steps": 4874, "loss": 0.4952, "lr": 6.45420771891325e-07, "epoch": 0.9314731226918342, "percentage": 46.57, "elapsed_time": "4:32:14", "remaining_time": "5:12:18"}
|
| 228 |
+
{"current_steps": 2280, "total_steps": 4874, "loss": 0.4962, "lr": 6.419905027383488e-07, "epoch": 0.9355765285186705, "percentage": 46.78, "elapsed_time": "4:33:28", "remaining_time": "5:11:08"}
|
| 229 |
+
{"current_steps": 2290, "total_steps": 4874, "loss": 0.4669, "lr": 6.385529487468291e-07, "epoch": 0.9396799343455068, "percentage": 46.98, "elapsed_time": "4:34:40", "remaining_time": "5:09:56"}
|
| 230 |
+
{"current_steps": 2300, "total_steps": 4874, "loss": 0.5406, "lr": 6.351082862808562e-07, "epoch": 0.943783340172343, "percentage": 47.19, "elapsed_time": "4:35:50", "remaining_time": "5:08:42"}
|
| 231 |
+
{"current_steps": 2310, "total_steps": 4874, "loss": 0.4783, "lr": 6.316566920692213e-07, "epoch": 0.9478867459991793, "percentage": 47.39, "elapsed_time": "4:37:02", "remaining_time": "5:07:30"}
|
| 232 |
+
{"current_steps": 2320, "total_steps": 4874, "loss": 0.4796, "lr": 6.281983431963492e-07, "epoch": 0.9519901518260155, "percentage": 47.6, "elapsed_time": "4:38:16", "remaining_time": "5:06:20"}
|
| 233 |
+
{"current_steps": 2330, "total_steps": 4874, "loss": 0.4716, "lr": 6.247334170932139e-07, "epoch": 0.9560935576528519, "percentage": 47.8, "elapsed_time": "4:39:29", "remaining_time": "5:05:09"}
|
| 234 |
+
{"current_steps": 2340, "total_steps": 4874, "loss": 0.5183, "lr": 6.212620915282347e-07, "epoch": 0.9601969634796882, "percentage": 48.01, "elapsed_time": "4:40:42", "remaining_time": "5:03:58"}
|
| 235 |
+
{"current_steps": 2350, "total_steps": 4874, "loss": 0.4569, "lr": 6.177845445981559e-07, "epoch": 0.9643003693065244, "percentage": 48.22, "elapsed_time": "4:41:54", "remaining_time": "5:02:46"}
|
| 236 |
+
{"current_steps": 2360, "total_steps": 4874, "loss": 0.4632, "lr": 6.143009547189096e-07, "epoch": 0.9684037751333607, "percentage": 48.42, "elapsed_time": "4:43:04", "remaining_time": "5:01:32"}
|
| 237 |
+
{"current_steps": 2370, "total_steps": 4874, "loss": 0.4633, "lr": 6.108115006164625e-07, "epoch": 0.972507180960197, "percentage": 48.63, "elapsed_time": "4:44:16", "remaining_time": "5:00:20"}
|
| 238 |
+
{"current_steps": 2380, "total_steps": 4874, "loss": 0.524, "lr": 6.073163613176455e-07, "epoch": 0.9766105867870333, "percentage": 48.83, "elapsed_time": "4:45:29", "remaining_time": "4:59:09"}
|
| 239 |
+
{"current_steps": 2390, "total_steps": 4874, "loss": 0.4844, "lr": 6.03815716140969e-07, "epoch": 0.9807139926138695, "percentage": 49.04, "elapsed_time": "4:46:40", "remaining_time": "4:57:56"}
|
| 240 |
+
{"current_steps": 2400, "total_steps": 4874, "loss": 0.5168, "lr": 6.003097446874236e-07, "epoch": 0.9848173984407058, "percentage": 49.24, "elapsed_time": "4:47:49", "remaining_time": "4:56:41"}
|
| 241 |
+
{"current_steps": 2410, "total_steps": 4874, "loss": 0.4667, "lr": 5.96798626831265e-07, "epoch": 0.988920804267542, "percentage": 49.45, "elapsed_time": "4:49:01", "remaining_time": "4:55:30"}
|
| 242 |
+
{"current_steps": 2420, "total_steps": 4874, "loss": 0.4716, "lr": 5.932825427107852e-07, "epoch": 0.9930242100943784, "percentage": 49.65, "elapsed_time": "4:50:13", "remaining_time": "4:54:18"}
|
| 243 |
+
{"current_steps": 2430, "total_steps": 4874, "loss": 0.4699, "lr": 5.897616727190714e-07, "epoch": 0.9971276159212146, "percentage": 49.86, "elapsed_time": "4:51:26", "remaining_time": "4:53:07"}
|
| 244 |
+
{"current_steps": 2440, "total_steps": 4874, "loss": 0.473, "lr": 5.862361974947503e-07, "epoch": 1.001231021748051, "percentage": 50.06, "elapsed_time": "4:52:42", "remaining_time": "4:51:59"}
|
| 245 |
+
{"current_steps": 2450, "total_steps": 4874, "loss": 0.453, "lr": 5.827062979127206e-07, "epoch": 1.0053344275748872, "percentage": 50.27, "elapsed_time": "4:53:53", "remaining_time": "4:50:45"}
|
| 246 |
+
{"current_steps": 2460, "total_steps": 4874, "loss": 0.4411, "lr": 5.791721550748732e-07, "epoch": 1.0094378334017233, "percentage": 50.47, "elapsed_time": "4:55:04", "remaining_time": "4:49:33"}
|
| 247 |
+
{"current_steps": 2470, "total_steps": 4874, "loss": 0.4622, "lr": 5.756339503007997e-07, "epoch": 1.0135412392285597, "percentage": 50.68, "elapsed_time": "4:56:18", "remaining_time": "4:48:23"}
|
| 248 |
+
{"current_steps": 2480, "total_steps": 4874, "loss": 0.4579, "lr": 5.720918651184893e-07, "epoch": 1.017644645055396, "percentage": 50.88, "elapsed_time": "4:57:26", "remaining_time": "4:47:08"}
|
| 249 |
+
{"current_steps": 2490, "total_steps": 4874, "loss": 0.4625, "lr": 5.685460812550172e-07, "epoch": 1.0217480508822323, "percentage": 51.09, "elapsed_time": "4:58:39", "remaining_time": "4:45:56"}
|
| 250 |
+
{"current_steps": 2500, "total_steps": 4874, "loss": 0.4762, "lr": 5.649967806272184e-07, "epoch": 1.0258514567090686, "percentage": 51.29, "elapsed_time": "4:59:52", "remaining_time": "4:44:45"}
|
| 251 |
+
{"current_steps": 2510, "total_steps": 4874, "loss": 0.4552, "lr": 5.614441453323571e-07, "epoch": 1.0299548625359047, "percentage": 51.5, "elapsed_time": "5:01:04", "remaining_time": "4:43:33"}
|
| 252 |
+
{"current_steps": 2520, "total_steps": 4874, "loss": 0.4426, "lr": 5.578883576387821e-07, "epoch": 1.034058268362741, "percentage": 51.7, "elapsed_time": "5:02:18", "remaining_time": "4:42:23"}
|
| 253 |
+
{"current_steps": 2530, "total_steps": 4874, "loss": 0.4075, "lr": 5.543295999765767e-07, "epoch": 1.0381616741895774, "percentage": 51.91, "elapsed_time": "5:03:31", "remaining_time": "4:41:12"}
|
| 254 |
+
{"current_steps": 2540, "total_steps": 4874, "loss": 0.419, "lr": 5.507680549281988e-07, "epoch": 1.0422650800164137, "percentage": 52.11, "elapsed_time": "5:04:44", "remaining_time": "4:40:01"}
|
| 255 |
+
{"current_steps": 2550, "total_steps": 4874, "loss": 0.4284, "lr": 5.472039052191138e-07, "epoch": 1.0463684858432498, "percentage": 52.32, "elapsed_time": "5:05:57", "remaining_time": "4:38:50"}
|
| 256 |
+
{"current_steps": 2560, "total_steps": 4874, "loss": 0.4419, "lr": 5.436373337084184e-07, "epoch": 1.0504718916700861, "percentage": 52.52, "elapsed_time": "5:07:09", "remaining_time": "4:37:38"}
|
| 257 |
+
{"current_steps": 2570, "total_steps": 4874, "loss": 0.4572, "lr": 5.400685233794614e-07, "epoch": 1.0545752974969225, "percentage": 52.73, "elapsed_time": "5:08:21", "remaining_time": "4:36:26"}
|
| 258 |
+
{"current_steps": 2580, "total_steps": 4874, "loss": 0.437, "lr": 5.364976573304537e-07, "epoch": 1.0586787033237588, "percentage": 52.93, "elapsed_time": "5:09:30", "remaining_time": "4:35:12"}
|
| 259 |
+
{"current_steps": 2590, "total_steps": 4874, "loss": 0.4506, "lr": 5.329249187650755e-07, "epoch": 1.062782109150595, "percentage": 53.14, "elapsed_time": "5:10:44", "remaining_time": "4:34:01"}
|
| 260 |
+
{"current_steps": 2600, "total_steps": 4874, "loss": 0.45, "lr": 5.29350490983076e-07, "epoch": 1.0668855149774312, "percentage": 53.34, "elapsed_time": "5:11:57", "remaining_time": "4:32:50"}
|
| 261 |
+
{"current_steps": 2610, "total_steps": 4874, "loss": 0.421, "lr": 5.257745573708707e-07, "epoch": 1.0709889208042676, "percentage": 53.55, "elapsed_time": "5:13:03", "remaining_time": "4:31:33"}
|
| 262 |
+
{"current_steps": 2620, "total_steps": 4874, "loss": 0.4741, "lr": 5.221973013921312e-07, "epoch": 1.0750923266311039, "percentage": 53.75, "elapsed_time": "5:14:17", "remaining_time": "4:30:23"}
|
| 263 |
+
{"current_steps": 2630, "total_steps": 4874, "loss": 0.3984, "lr": 5.18618906578374e-07, "epoch": 1.07919573245794, "percentage": 53.96, "elapsed_time": "5:15:32", "remaining_time": "4:29:13"}
|
| 264 |
+
{"current_steps": 2640, "total_steps": 4874, "loss": 0.4932, "lr": 5.150395565195425e-07, "epoch": 1.0832991382847763, "percentage": 54.16, "elapsed_time": "5:16:42", "remaining_time": "4:28:00"}
|
| 265 |
+
{"current_steps": 2650, "total_steps": 4874, "loss": 0.4284, "lr": 5.114594348545905e-07, "epoch": 1.0874025441116126, "percentage": 54.37, "elapsed_time": "5:17:56", "remaining_time": "4:26:49"}
|
| 266 |
+
{"current_steps": 2660, "total_steps": 4874, "loss": 0.4272, "lr": 5.078787252620581e-07, "epoch": 1.091505949938449, "percentage": 54.58, "elapsed_time": "5:19:07", "remaining_time": "4:25:37"}
|
| 267 |
+
{"current_steps": 2670, "total_steps": 4874, "loss": 0.4019, "lr": 5.042976114506495e-07, "epoch": 1.0956093557652853, "percentage": 54.78, "elapsed_time": "5:20:19", "remaining_time": "4:24:25"}
|
| 268 |
+
{"current_steps": 2680, "total_steps": 4874, "loss": 0.401, "lr": 5.00716277149807e-07, "epoch": 1.0997127615921214, "percentage": 54.99, "elapsed_time": "5:21:29", "remaining_time": "4:23:11"}
|
| 269 |
+
{"current_steps": 2690, "total_steps": 4874, "loss": 0.4364, "lr": 4.971349061002856e-07, "epoch": 1.1038161674189577, "percentage": 55.19, "elapsed_time": "5:22:42", "remaining_time": "4:21:59"}
|
| 270 |
+
{"current_steps": 2700, "total_steps": 4874, "loss": 0.4352, "lr": 4.935536820447257e-07, "epoch": 1.107919573245794, "percentage": 55.4, "elapsed_time": "5:23:52", "remaining_time": "4:20:46"}
|
| 271 |
+
{"current_steps": 2710, "total_steps": 4874, "loss": 0.4505, "lr": 4.89972788718226e-07, "epoch": 1.1120229790726304, "percentage": 55.6, "elapsed_time": "5:25:05", "remaining_time": "4:19:35"}
|
| 272 |
+
{"current_steps": 2720, "total_steps": 4874, "loss": 0.4531, "lr": 4.863924098389166e-07, "epoch": 1.1161263848994665, "percentage": 55.81, "elapsed_time": "5:26:17", "remaining_time": "4:18:23"}
|
| 273 |
+
{"current_steps": 2730, "total_steps": 4874, "loss": 0.4581, "lr": 4.828127290985347e-07, "epoch": 1.1202297907263028, "percentage": 56.01, "elapsed_time": "5:27:28", "remaining_time": "4:17:10"}
|
| 274 |
+
{"current_steps": 2740, "total_steps": 4874, "loss": 0.4435, "lr": 4.79233930152999e-07, "epoch": 1.1243331965531391, "percentage": 56.22, "elapsed_time": "5:28:40", "remaining_time": "4:15:59"}
|
| 275 |
+
{"current_steps": 2750, "total_steps": 4874, "loss": 0.4177, "lr": 4.756561966129875e-07, "epoch": 1.1284366023799755, "percentage": 56.42, "elapsed_time": "5:29:54", "remaining_time": "4:14:48"}
|
| 276 |
+
{"current_steps": 2760, "total_steps": 4874, "loss": 0.4462, "lr": 4.7207971203451775e-07, "epoch": 1.1325400082068118, "percentage": 56.63, "elapsed_time": "5:31:08", "remaining_time": "4:13:37"}
|
| 277 |
+
{"current_steps": 2770, "total_steps": 4874, "loss": 0.4215, "lr": 4.6850465990952884e-07, "epoch": 1.1366434140336479, "percentage": 56.83, "elapsed_time": "5:32:18", "remaining_time": "4:12:24"}
|
| 278 |
+
{"current_steps": 2780, "total_steps": 4874, "loss": 0.4161, "lr": 4.6493122365646823e-07, "epoch": 1.1407468198604842, "percentage": 57.04, "elapsed_time": "5:33:27", "remaining_time": "4:11:10"}
|
| 279 |
+
{"current_steps": 2790, "total_steps": 4874, "loss": 0.4236, "lr": 4.6135958661088014e-07, "epoch": 1.1448502256873205, "percentage": 57.24, "elapsed_time": "5:34:37", "remaining_time": "4:09:56"}
|
| 280 |
+
{"current_steps": 2800, "total_steps": 4874, "loss": 0.4417, "lr": 4.5778993201600076e-07, "epoch": 1.1489536315141566, "percentage": 57.45, "elapsed_time": "5:35:51", "remaining_time": "4:08:46"}
|
| 281 |
+
{"current_steps": 2810, "total_steps": 4874, "loss": 0.4628, "lr": 4.5422244301335646e-07, "epoch": 1.153057037340993, "percentage": 57.65, "elapsed_time": "5:37:04", "remaining_time": "4:07:34"}
|
| 282 |
+
{"current_steps": 2820, "total_steps": 4874, "loss": 0.4328, "lr": 4.5065730263336724e-07, "epoch": 1.1571604431678293, "percentage": 57.86, "elapsed_time": "5:38:17", "remaining_time": "4:06:23"}
|
| 283 |
+
{"current_steps": 2830, "total_steps": 4874, "loss": 0.4305, "lr": 4.470946937859571e-07, "epoch": 1.1612638489946656, "percentage": 58.06, "elapsed_time": "5:39:27", "remaining_time": "4:05:10"}
|
| 284 |
+
{"current_steps": 2840, "total_steps": 4874, "loss": 0.4403, "lr": 4.4353479925116967e-07, "epoch": 1.165367254821502, "percentage": 58.27, "elapsed_time": "5:40:40", "remaining_time": "4:03:59"}
|
| 285 |
+
{"current_steps": 2850, "total_steps": 4874, "loss": 0.4036, "lr": 4.399778016697896e-07, "epoch": 1.169470660648338, "percentage": 58.47, "elapsed_time": "5:41:51", "remaining_time": "4:02:46"}
|
| 286 |
+
{"current_steps": 2860, "total_steps": 4874, "loss": 0.4524, "lr": 4.364238835339743e-07, "epoch": 1.1735740664751744, "percentage": 58.68, "elapsed_time": "5:43:03", "remaining_time": "4:01:34"}
|
| 287 |
+
{"current_steps": 2870, "total_steps": 4874, "loss": 0.4047, "lr": 4.3287322717788877e-07, "epoch": 1.1776774723020107, "percentage": 58.88, "elapsed_time": "5:44:17", "remaining_time": "4:00:24"}
|
| 288 |
+
{"current_steps": 2880, "total_steps": 4874, "loss": 0.426, "lr": 4.2932601476835247e-07, "epoch": 1.181780878128847, "percentage": 59.09, "elapsed_time": "5:45:30", "remaining_time": "3:59:13"}
|
| 289 |
+
{"current_steps": 2890, "total_steps": 4874, "loss": 0.4413, "lr": 4.2578242829549307e-07, "epoch": 1.1858842839556831, "percentage": 59.29, "elapsed_time": "5:46:40", "remaining_time": "3:57:59"}
|
| 290 |
+
{"current_steps": 2900, "total_steps": 4874, "loss": 0.4538, "lr": 4.222426495634086e-07, "epoch": 1.1899876897825195, "percentage": 59.5, "elapsed_time": "5:47:54", "remaining_time": "3:56:48"}
|
| 291 |
+
{"current_steps": 2910, "total_steps": 4874, "loss": 0.4255, "lr": 4.187068601808408e-07, "epoch": 1.1940910956093558, "percentage": 59.7, "elapsed_time": "5:49:06", "remaining_time": "3:55:36"}
|
| 292 |
+
{"current_steps": 2920, "total_steps": 4874, "loss": 0.386, "lr": 4.1517524155185767e-07, "epoch": 1.198194501436192, "percentage": 59.91, "elapsed_time": "5:50:17", "remaining_time": "3:54:24"}
|
| 293 |
+
{"current_steps": 2930, "total_steps": 4874, "loss": 0.4446, "lr": 4.116479748665457e-07, "epoch": 1.2022979072630284, "percentage": 60.11, "elapsed_time": "5:51:28", "remaining_time": "3:53:12"}
|
| 294 |
+
{"current_steps": 2940, "total_steps": 4874, "loss": 0.4226, "lr": 4.0812524109171475e-07, "epoch": 1.2064013130898645, "percentage": 60.32, "elapsed_time": "5:52:41", "remaining_time": "3:52:00"}
|
| 295 |
+
{"current_steps": 2950, "total_steps": 4874, "loss": 0.4596, "lr": 4.0460722096161337e-07, "epoch": 1.2105047189167009, "percentage": 60.53, "elapsed_time": "5:53:50", "remaining_time": "3:50:46"}
|
| 296 |
+
{"current_steps": 2960, "total_steps": 4874, "loss": 0.4225, "lr": 4.0109409496865537e-07, "epoch": 1.2146081247435372, "percentage": 60.73, "elapsed_time": "5:55:05", "remaining_time": "3:49:36"}
|
| 297 |
+
{"current_steps": 2970, "total_steps": 4874, "loss": 0.4413, "lr": 3.9758604335416125e-07, "epoch": 1.2187115305703733, "percentage": 60.94, "elapsed_time": "5:56:18", "remaining_time": "3:48:25"}
|
| 298 |
+
{"current_steps": 2980, "total_steps": 4874, "loss": 0.4438, "lr": 3.94083246099109e-07, "epoch": 1.2228149363972096, "percentage": 61.14, "elapsed_time": "5:57:31", "remaining_time": "3:47:13"}
|
| 299 |
+
{"current_steps": 2990, "total_steps": 4874, "loss": 0.4335, "lr": 3.905858829149017e-07, "epoch": 1.226918342224046, "percentage": 61.35, "elapsed_time": "5:58:45", "remaining_time": "3:46:03"}
|
| 300 |
+
{"current_steps": 3000, "total_steps": 4874, "loss": 0.443, "lr": 3.8709413323414703e-07, "epoch": 1.2310217480508823, "percentage": 61.55, "elapsed_time": "6:00:00", "remaining_time": "3:44:52"}
|
| 301 |
+
{"current_steps": 3010, "total_steps": 4874, "loss": 0.4594, "lr": 3.8360817620145035e-07, "epoch": 1.2351251538777186, "percentage": 61.76, "elapsed_time": "6:01:49", "remaining_time": "3:44:03"}
|
| 302 |
+
{"current_steps": 3020, "total_steps": 4874, "loss": 0.4291, "lr": 3.801281906642256e-07, "epoch": 1.2392285597045547, "percentage": 61.96, "elapsed_time": "6:03:01", "remaining_time": "3:42:51"}
|
| 303 |
+
{"current_steps": 3030, "total_steps": 4874, "loss": 0.4399, "lr": 3.7665435516351796e-07, "epoch": 1.243331965531391, "percentage": 62.17, "elapsed_time": "6:04:14", "remaining_time": "3:41:40"}
|
| 304 |
+
{"current_steps": 3040, "total_steps": 4874, "loss": 0.4308, "lr": 3.7318684792484395e-07, "epoch": 1.2474353713582274, "percentage": 62.37, "elapsed_time": "6:05:28", "remaining_time": "3:40:29"}
|
| 305 |
+
{"current_steps": 3050, "total_steps": 4874, "loss": 0.4643, "lr": 3.697258468490487e-07, "epoch": 1.2515387771850637, "percentage": 62.58, "elapsed_time": "6:06:35", "remaining_time": "3:39:13"}
|
| 306 |
+
{"current_steps": 3060, "total_steps": 4874, "loss": 0.4506, "lr": 3.662715295031776e-07, "epoch": 1.2556421830118998, "percentage": 62.78, "elapsed_time": "6:07:47", "remaining_time": "3:38:01"}
|
| 307 |
+
{"current_steps": 3070, "total_steps": 4874, "loss": 0.3933, "lr": 3.62824073111366e-07, "epoch": 1.259745588838736, "percentage": 62.99, "elapsed_time": "6:09:01", "remaining_time": "3:36:50"}
|
| 308 |
+
{"current_steps": 3080, "total_steps": 4874, "loss": 0.4548, "lr": 3.5938365454574825e-07, "epoch": 1.2638489946655724, "percentage": 63.19, "elapsed_time": "6:10:11", "remaining_time": "3:35:37"}
|
| 309 |
+
{"current_steps": 3090, "total_steps": 4874, "loss": 0.4258, "lr": 3.559504503173812e-07, "epoch": 1.2679524004924088, "percentage": 63.4, "elapsed_time": "6:11:25", "remaining_time": "3:34:26"}
|
| 310 |
+
{"current_steps": 3100, "total_steps": 4874, "loss": 0.4282, "lr": 3.525246365671902e-07, "epoch": 1.272055806319245, "percentage": 63.6, "elapsed_time": "6:12:39", "remaining_time": "3:33:15"}
|
| 311 |
+
{"current_steps": 3110, "total_steps": 4874, "loss": 0.4513, "lr": 3.491063890569309e-07, "epoch": 1.2761592121460812, "percentage": 63.81, "elapsed_time": "6:13:49", "remaining_time": "3:32:02"}
|
| 312 |
+
{"current_steps": 3120, "total_steps": 4874, "loss": 0.4406, "lr": 3.4569588316017184e-07, "epoch": 1.2802626179729175, "percentage": 64.01, "elapsed_time": "6:15:02", "remaining_time": "3:30:50"}
|
| 313 |
+
{"current_steps": 3130, "total_steps": 4874, "loss": 0.4166, "lr": 3.422932938532979e-07, "epoch": 1.2843660237997538, "percentage": 64.22, "elapsed_time": "6:16:14", "remaining_time": "3:29:37"}
|
| 314 |
+
{"current_steps": 3140, "total_steps": 4874, "loss": 0.4297, "lr": 3.388987957065321e-07, "epoch": 1.28846942962659, "percentage": 64.42, "elapsed_time": "6:17:27", "remaining_time": "3:28:26"}
|
| 315 |
+
{"current_steps": 3150, "total_steps": 4874, "loss": 0.445, "lr": 3.355125628749793e-07, "epoch": 1.2925728354534263, "percentage": 64.63, "elapsed_time": "6:18:37", "remaining_time": "3:27:13"}
|
| 316 |
+
{"current_steps": 3160, "total_steps": 4874, "loss": 0.432, "lr": 3.321347690896921e-07, "epoch": 1.2966762412802626, "percentage": 64.83, "elapsed_time": "6:19:46", "remaining_time": "3:25:59"}
|
| 317 |
+
{"current_steps": 3170, "total_steps": 4874, "loss": 0.411, "lr": 3.287655876487561e-07, "epoch": 1.300779647107099, "percentage": 65.04, "elapsed_time": "6:20:57", "remaining_time": "3:24:46"}
|
| 318 |
+
{"current_steps": 3180, "total_steps": 4874, "loss": 0.4937, "lr": 3.254051914084006e-07, "epoch": 1.3048830529339353, "percentage": 65.24, "elapsed_time": "6:22:11", "remaining_time": "3:23:35"}
|
| 319 |
+
{"current_steps": 3190, "total_steps": 4874, "loss": 0.4253, "lr": 3.220537527741283e-07, "epoch": 1.3089864587607716, "percentage": 65.45, "elapsed_time": "6:23:22", "remaining_time": "3:22:23"}
|
| 320 |
+
{"current_steps": 3200, "total_steps": 4874, "loss": 0.4464, "lr": 3.187114436918711e-07, "epoch": 1.3130898645876077, "percentage": 65.65, "elapsed_time": "6:24:34", "remaining_time": "3:21:10"}
|
| 321 |
+
{"current_steps": 3210, "total_steps": 4874, "loss": 0.4455, "lr": 3.1537843563916873e-07, "epoch": 1.317193270414444, "percentage": 65.86, "elapsed_time": "6:25:48", "remaining_time": "3:19:59"}
|
| 322 |
+
{"current_steps": 3220, "total_steps": 4874, "loss": 0.4462, "lr": 3.120548996163702e-07, "epoch": 1.3212966762412803, "percentage": 66.06, "elapsed_time": "6:26:58", "remaining_time": "3:18:46"}
|
| 323 |
+
{"current_steps": 3230, "total_steps": 4874, "loss": 0.44, "lr": 3.0874100613786064e-07, "epoch": 1.3254000820681164, "percentage": 66.27, "elapsed_time": "6:28:09", "remaining_time": "3:17:33"}
|
| 324 |
+
{"current_steps": 3240, "total_steps": 4874, "loss": 0.4088, "lr": 3.054369252233141e-07, "epoch": 1.3295034878949528, "percentage": 66.48, "elapsed_time": "6:29:22", "remaining_time": "3:16:22"}
|
| 325 |
+
{"current_steps": 3250, "total_steps": 4874, "loss": 0.4339, "lr": 3.0214282638896924e-07, "epoch": 1.333606893721789, "percentage": 66.68, "elapsed_time": "6:30:31", "remaining_time": "3:15:08"}
|
| 326 |
+
{"current_steps": 3260, "total_steps": 4874, "loss": 0.4418, "lr": 2.9885887863893386e-07, "epoch": 1.3377102995486254, "percentage": 66.89, "elapsed_time": "6:31:43", "remaining_time": "3:13:56"}
|
| 327 |
+
{"current_steps": 3270, "total_steps": 4874, "loss": 0.4132, "lr": 2.955852504565122e-07, "epoch": 1.3418137053754617, "percentage": 67.09, "elapsed_time": "6:32:57", "remaining_time": "3:12:45"}
|
| 328 |
+
{"current_steps": 3280, "total_steps": 4874, "loss": 0.469, "lr": 2.923221097955625e-07, "epoch": 1.3459171112022978, "percentage": 67.3, "elapsed_time": "6:34:10", "remaining_time": "3:11:33"}
|
| 329 |
+
{"current_steps": 3290, "total_steps": 4874, "loss": 0.4514, "lr": 2.890696240718798e-07, "epoch": 1.3500205170291342, "percentage": 67.5, "elapsed_time": "6:35:24", "remaining_time": "3:10:22"}
|
| 330 |
+
{"current_steps": 3300, "total_steps": 4874, "loss": 0.453, "lr": 2.858279601546059e-07, "epoch": 1.3541239228559705, "percentage": 67.71, "elapsed_time": "6:36:33", "remaining_time": "3:09:08"}
|
| 331 |
+
{"current_steps": 3310, "total_steps": 4874, "loss": 0.4114, "lr": 2.825972843576685e-07, "epoch": 1.3582273286828066, "percentage": 67.91, "elapsed_time": "6:37:41", "remaining_time": "3:07:54"}
|
| 332 |
+
{"current_steps": 3320, "total_steps": 4874, "loss": 0.4359, "lr": 2.7937776243124934e-07, "epoch": 1.362330734509643, "percentage": 68.12, "elapsed_time": "6:38:53", "remaining_time": "3:06:42"}
|
| 333 |
+
{"current_steps": 3330, "total_steps": 4874, "loss": 0.4175, "lr": 2.761695595532787e-07, "epoch": 1.3664341403364793, "percentage": 68.32, "elapsed_time": "6:40:07", "remaining_time": "3:05:31"}
|
| 334 |
+
{"current_steps": 3340, "total_steps": 4874, "loss": 0.4107, "lr": 2.729728403209624e-07, "epoch": 1.3705375461633156, "percentage": 68.53, "elapsed_time": "6:41:20", "remaining_time": "3:04:19"}
|
| 335 |
+
{"current_steps": 3350, "total_steps": 4874, "loss": 0.416, "lr": 2.6978776874233664e-07, "epoch": 1.374640951990152, "percentage": 68.73, "elapsed_time": "6:42:31", "remaining_time": "3:03:07"}
|
| 336 |
+
{"current_steps": 3360, "total_steps": 4874, "loss": 0.4054, "lr": 2.666145082278528e-07, "epoch": 1.3787443578169882, "percentage": 68.94, "elapsed_time": "6:43:45", "remaining_time": "3:01:55"}
|
| 337 |
+
{"current_steps": 3370, "total_steps": 4874, "loss": 0.4398, "lr": 2.63453221581995e-07, "epoch": 1.3828477636438243, "percentage": 69.14, "elapsed_time": "6:44:59", "remaining_time": "3:00:44"}
|
| 338 |
+
{"current_steps": 3380, "total_steps": 4874, "loss": 0.4337, "lr": 2.6030407099492624e-07, "epoch": 1.3869511694706607, "percentage": 69.35, "elapsed_time": "6:46:13", "remaining_time": "2:59:33"}
|
| 339 |
+
{"current_steps": 3390, "total_steps": 4874, "loss": 0.4221, "lr": 2.5716721803416765e-07, "epoch": 1.391054575297497, "percentage": 69.55, "elapsed_time": "6:47:25", "remaining_time": "2:58:21"}
|
| 340 |
+
{"current_steps": 3400, "total_steps": 4874, "loss": 0.4417, "lr": 2.5404282363630956e-07, "epoch": 1.395157981124333, "percentage": 69.76, "elapsed_time": "6:48:38", "remaining_time": "2:57:09"}
|
| 341 |
+
{"current_steps": 3410, "total_steps": 4874, "loss": 0.4079, "lr": 2.5093104809875433e-07, "epoch": 1.3992613869511694, "percentage": 69.96, "elapsed_time": "6:49:52", "remaining_time": "2:55:58"}
|
| 342 |
+
{"current_steps": 3420, "total_steps": 4874, "loss": 0.3967, "lr": 2.478320510714922e-07, "epoch": 1.4033647927780057, "percentage": 70.17, "elapsed_time": "6:51:05", "remaining_time": "2:54:46"}
|
| 343 |
+
{"current_steps": 3430, "total_steps": 4874, "loss": 0.4239, "lr": 2.447459915489106e-07, "epoch": 1.407468198604842, "percentage": 70.37, "elapsed_time": "6:52:14", "remaining_time": "2:53:33"}
|
| 344 |
+
{"current_steps": 3440, "total_steps": 4874, "loss": 0.4043, "lr": 2.416730278616363e-07, "epoch": 1.4115716044316784, "percentage": 70.58, "elapsed_time": "6:53:28", "remaining_time": "2:52:21"}
|
| 345 |
+
{"current_steps": 3450, "total_steps": 4874, "loss": 0.4376, "lr": 2.3861331766841366e-07, "epoch": 1.4156750102585145, "percentage": 70.78, "elapsed_time": "6:54:39", "remaining_time": "2:51:09"}
|
| 346 |
+
{"current_steps": 3460, "total_steps": 4874, "loss": 0.4329, "lr": 2.3556701794801448e-07, "epoch": 1.4197784160853508, "percentage": 70.99, "elapsed_time": "6:55:50", "remaining_time": "2:49:56"}
|
| 347 |
+
{"current_steps": 3470, "total_steps": 4874, "loss": 0.4249, "lr": 2.3253428499118465e-07, "epoch": 1.4238818219121872, "percentage": 71.19, "elapsed_time": "6:56:59", "remaining_time": "2:48:43"}
|
| 348 |
+
{"current_steps": 3480, "total_steps": 4874, "loss": 0.4224, "lr": 2.2951527439262626e-07, "epoch": 1.4279852277390233, "percentage": 71.4, "elapsed_time": "6:58:12", "remaining_time": "2:47:31"}
|
| 349 |
+
{"current_steps": 3490, "total_steps": 4874, "loss": 0.431, "lr": 2.2651014104301396e-07, "epoch": 1.4320886335658596, "percentage": 71.6, "elapsed_time": "6:59:21", "remaining_time": "2:46:18"}
|
| 350 |
+
{"current_steps": 3500, "total_steps": 4874, "loss": 0.4269, "lr": 2.235190391210489e-07, "epoch": 1.436192039392696, "percentage": 71.81, "elapsed_time": "7:00:33", "remaining_time": "2:45:06"}
|
| 351 |
+
{"current_steps": 3510, "total_steps": 4874, "loss": 0.4334, "lr": 2.205421220855478e-07, "epoch": 1.4402954452195322, "percentage": 72.01, "elapsed_time": "7:01:42", "remaining_time": "2:43:52"}
|
| 352 |
+
{"current_steps": 3520, "total_steps": 4874, "loss": 0.4587, "lr": 2.1757954266757017e-07, "epoch": 1.4443988510463686, "percentage": 72.22, "elapsed_time": "7:02:56", "remaining_time": "2:42:41"}
|
| 353 |
+
{"current_steps": 3530, "total_steps": 4874, "loss": 0.4267, "lr": 2.146314528625832e-07, "epoch": 1.4485022568732049, "percentage": 72.43, "elapsed_time": "7:04:07", "remaining_time": "2:41:28"}
|
| 354 |
+
{"current_steps": 3540, "total_steps": 4874, "loss": 0.4829, "lr": 2.1169800392266206e-07, "epoch": 1.452605662700041, "percentage": 72.63, "elapsed_time": "7:05:19", "remaining_time": "2:40:16"}
|
| 355 |
+
{"current_steps": 3550, "total_steps": 4874, "loss": 0.4236, "lr": 2.0877934634873107e-07, "epoch": 1.4567090685268773, "percentage": 72.84, "elapsed_time": "7:06:30", "remaining_time": "2:39:04"}
|
| 356 |
+
{"current_steps": 3560, "total_steps": 4874, "loss": 0.4075, "lr": 2.0587562988284213e-07, "epoch": 1.4608124743537136, "percentage": 73.04, "elapsed_time": "7:07:41", "remaining_time": "2:37:51"}
|
| 357 |
+
{"current_steps": 3570, "total_steps": 4874, "loss": 0.4346, "lr": 2.0298700350049126e-07, "epoch": 1.4649158801805497, "percentage": 73.25, "elapsed_time": "7:08:54", "remaining_time": "2:36:39"}
|
| 358 |
+
{"current_steps": 3580, "total_steps": 4874, "loss": 0.4258, "lr": 2.0011361540297677e-07, "epoch": 1.469019286007386, "percentage": 73.45, "elapsed_time": "7:10:04", "remaining_time": "2:35:27"}
|
| 359 |
+
{"current_steps": 3590, "total_steps": 4874, "loss": 0.3865, "lr": 1.972556130097946e-07, "epoch": 1.4731226918342224, "percentage": 73.66, "elapsed_time": "7:11:18", "remaining_time": "2:34:15"}
|
| 360 |
+
{"current_steps": 3600, "total_steps": 4874, "loss": 0.4289, "lr": 1.9441314295107535e-07, "epoch": 1.4772260976610587, "percentage": 73.86, "elapsed_time": "7:12:31", "remaining_time": "2:33:03"}
|
| 361 |
+
{"current_steps": 3610, "total_steps": 4874, "loss": 0.4469, "lr": 1.915863510600621e-07, "epoch": 1.481329503487895, "percentage": 74.07, "elapsed_time": "7:13:41", "remaining_time": "2:31:51"}
|
| 362 |
+
{"current_steps": 3620, "total_steps": 4874, "loss": 0.4422, "lr": 1.8877538236562696e-07, "epoch": 1.4854329093147312, "percentage": 74.27, "elapsed_time": "7:14:55", "remaining_time": "2:30:39"}
|
| 363 |
+
{"current_steps": 3630, "total_steps": 4874, "loss": 0.4198, "lr": 1.8598038108483184e-07, "epoch": 1.4895363151415675, "percentage": 74.48, "elapsed_time": "7:16:07", "remaining_time": "2:29:27"}
|
| 364 |
+
{"current_steps": 3640, "total_steps": 4874, "loss": 0.4351, "lr": 1.8320149061552858e-07, "epoch": 1.4936397209684038, "percentage": 74.68, "elapsed_time": "7:17:22", "remaining_time": "2:28:16"}
|
| 365 |
+
{"current_steps": 3650, "total_steps": 4874, "loss": 0.3949, "lr": 1.8043885352900163e-07, "epoch": 1.4977431267952401, "percentage": 74.89, "elapsed_time": "7:18:35", "remaining_time": "2:27:04"}
|
| 366 |
+
{"current_steps": 3660, "total_steps": 4874, "loss": 0.4095, "lr": 1.7769261156265447e-07, "epoch": 1.5018465326220762, "percentage": 75.09, "elapsed_time": "7:19:46", "remaining_time": "2:25:52"}
|
| 367 |
+
{"current_steps": 3670, "total_steps": 4874, "loss": 0.449, "lr": 1.7496290561273657e-07, "epoch": 1.5059499384489126, "percentage": 75.3, "elapsed_time": "7:20:56", "remaining_time": "2:24:39"}
|
| 368 |
+
{"current_steps": 3680, "total_steps": 4874, "loss": 0.4377, "lr": 1.722498757271153e-07, "epoch": 1.5100533442757489, "percentage": 75.5, "elapsed_time": "7:22:07", "remaining_time": "2:23:26"}
|
| 369 |
+
{"current_steps": 3690, "total_steps": 4874, "loss": 0.3979, "lr": 1.695536610980912e-07, "epoch": 1.5141567501025852, "percentage": 75.71, "elapsed_time": "7:23:14", "remaining_time": "2:22:13"}
|
| 370 |
+
{"current_steps": 3700, "total_steps": 4874, "loss": 0.4164, "lr": 1.668744000552555e-07, "epoch": 1.5182601559294215, "percentage": 75.91, "elapsed_time": "7:24:21", "remaining_time": "2:20:59"}
|
| 371 |
+
{"current_steps": 3710, "total_steps": 4874, "loss": 0.4391, "lr": 1.6421223005839424e-07, "epoch": 1.5223635617562576, "percentage": 76.12, "elapsed_time": "7:25:32", "remaining_time": "2:19:47"}
|
| 372 |
+
{"current_steps": 3720, "total_steps": 4874, "loss": 0.4098, "lr": 1.6156728769043566e-07, "epoch": 1.526466967583094, "percentage": 76.32, "elapsed_time": "7:26:43", "remaining_time": "2:18:34"}
|
| 373 |
+
{"current_steps": 3730, "total_steps": 4874, "loss": 0.425, "lr": 1.5893970865044175e-07, "epoch": 1.5305703734099303, "percentage": 76.53, "elapsed_time": "7:27:55", "remaining_time": "2:17:22"}
|
| 374 |
+
{"current_steps": 3740, "total_steps": 4874, "loss": 0.3678, "lr": 1.5632962774664805e-07, "epoch": 1.5346737792367664, "percentage": 76.73, "elapsed_time": "7:29:08", "remaining_time": "2:16:11"}
|
| 375 |
+
{"current_steps": 3750, "total_steps": 4874, "loss": 0.4172, "lr": 1.537371788895455e-07, "epoch": 1.5387771850636027, "percentage": 76.94, "elapsed_time": "7:30:20", "remaining_time": "2:14:58"}
|
| 376 |
+
{"current_steps": 3760, "total_steps": 4874, "loss": 0.3844, "lr": 1.5116249508501112e-07, "epoch": 1.542880590890439, "percentage": 77.14, "elapsed_time": "7:31:33", "remaining_time": "2:13:47"}
|
| 377 |
+
{"current_steps": 3770, "total_steps": 4874, "loss": 0.4355, "lr": 1.486057084274841e-07, "epoch": 1.5469839967172754, "percentage": 77.35, "elapsed_time": "7:32:45", "remaining_time": "2:12:35"}
|
| 378 |
+
{"current_steps": 3780, "total_steps": 4874, "loss": 0.4262, "lr": 1.4606695009318854e-07, "epoch": 1.5510874025441117, "percentage": 77.55, "elapsed_time": "7:33:54", "remaining_time": "2:11:22"}
|
| 379 |
+
{"current_steps": 3790, "total_steps": 4874, "loss": 0.3866, "lr": 1.4354635033340305e-07, "epoch": 1.555190808370948, "percentage": 77.76, "elapsed_time": "7:35:07", "remaining_time": "2:10:10"}
|
| 380 |
+
{"current_steps": 3800, "total_steps": 4874, "loss": 0.3842, "lr": 1.4104403846777906e-07, "epoch": 1.5592942141977841, "percentage": 77.96, "elapsed_time": "7:36:20", "remaining_time": "2:08:58"}
|
| 381 |
+
{"current_steps": 3810, "total_steps": 4874, "loss": 0.3904, "lr": 1.3856014287770502e-07, "epoch": 1.5633976200246205, "percentage": 78.17, "elapsed_time": "7:37:32", "remaining_time": "2:07:46"}
|
| 382 |
+
{"current_steps": 3820, "total_steps": 4874, "loss": 0.4254, "lr": 1.360947909997209e-07, "epoch": 1.5675010258514566, "percentage": 78.38, "elapsed_time": "7:38:45", "remaining_time": "2:06:34"}
|
| 383 |
+
{"current_steps": 3830, "total_steps": 4874, "loss": 0.4283, "lr": 1.3364810931897885e-07, "epoch": 1.571604431678293, "percentage": 78.58, "elapsed_time": "7:39:57", "remaining_time": "2:05:22"}
|
| 384 |
+
{"current_steps": 3840, "total_steps": 4874, "loss": 0.4295, "lr": 1.3122022336275475e-07, "epoch": 1.5757078375051292, "percentage": 78.79, "elapsed_time": "7:41:08", "remaining_time": "2:04:10"}
|
| 385 |
+
{"current_steps": 3850, "total_steps": 4874, "loss": 0.4312, "lr": 1.2881125769400785e-07, "epoch": 1.5798112433319655, "percentage": 78.99, "elapsed_time": "7:42:19", "remaining_time": "2:02:58"}
|
| 386 |
+
{"current_steps": 3860, "total_steps": 4874, "loss": 0.3986, "lr": 1.2642133590499005e-07, "epoch": 1.5839146491588019, "percentage": 79.2, "elapsed_time": "7:43:34", "remaining_time": "2:01:46"}
|
| 387 |
+
{"current_steps": 3870, "total_steps": 4874, "loss": 0.3693, "lr": 1.240505806109043e-07, "epoch": 1.5880180549856382, "percentage": 79.4, "elapsed_time": "7:44:48", "remaining_time": "2:00:35"}
|
| 388 |
+
{"current_steps": 3880, "total_steps": 4874, "loss": 0.4272, "lr": 1.216991134436151e-07, "epoch": 1.5921214608124743, "percentage": 79.61, "elapsed_time": "7:45:55", "remaining_time": "1:59:21"}
|
| 389 |
+
{"current_steps": 3890, "total_steps": 4874, "loss": 0.4347, "lr": 1.1936705504540684e-07, "epoch": 1.5962248666393106, "percentage": 79.81, "elapsed_time": "7:47:08", "remaining_time": "1:58:09"}
|
| 390 |
+
{"current_steps": 3900, "total_steps": 4874, "loss": 0.4363, "lr": 1.1705452506279545e-07, "epoch": 1.600328272466147, "percentage": 80.02, "elapsed_time": "7:48:19", "remaining_time": "1:56:57"}
|
| 391 |
+
{"current_steps": 3910, "total_steps": 4874, "loss": 0.4067, "lr": 1.1476164214038891e-07, "epoch": 1.604431678292983, "percentage": 80.22, "elapsed_time": "7:49:30", "remaining_time": "1:55:45"}
|
| 392 |
+
{"current_steps": 3920, "total_steps": 4874, "loss": 0.4286, "lr": 1.1248852391480047e-07, "epoch": 1.6085350841198194, "percentage": 80.43, "elapsed_time": "7:50:40", "remaining_time": "1:54:32"}
|
| 393 |
+
{"current_steps": 3930, "total_steps": 4874, "loss": 0.4155, "lr": 1.1023528700861384e-07, "epoch": 1.6126384899466557, "percentage": 80.63, "elapsed_time": "7:51:54", "remaining_time": "1:53:21"}
|
| 394 |
+
{"current_steps": 3940, "total_steps": 4874, "loss": 0.485, "lr": 1.0800204702439935e-07, "epoch": 1.616741895773492, "percentage": 80.84, "elapsed_time": "7:53:08", "remaining_time": "1:52:09"}
|
| 395 |
+
{"current_steps": 3950, "total_steps": 4874, "loss": 0.4327, "lr": 1.0578891853878264e-07, "epoch": 1.6208453016003284, "percentage": 81.04, "elapsed_time": "7:54:19", "remaining_time": "1:50:57"}
|
| 396 |
+
{"current_steps": 3960, "total_steps": 4874, "loss": 0.4414, "lr": 1.0359601509656723e-07, "epoch": 1.6249487074271647, "percentage": 81.25, "elapsed_time": "7:55:33", "remaining_time": "1:49:45"}
|
| 397 |
+
{"current_steps": 3970, "total_steps": 4874, "loss": 0.4014, "lr": 1.0142344920490787e-07, "epoch": 1.6290521132540008, "percentage": 81.45, "elapsed_time": "7:56:45", "remaining_time": "1:48:33"}
|
| 398 |
+
{"current_steps": 3980, "total_steps": 4874, "loss": 0.4124, "lr": 9.927133232753976e-08, "epoch": 1.6331555190808371, "percentage": 81.66, "elapsed_time": "7:57:56", "remaining_time": "1:47:21"}
|
| 399 |
+
{"current_steps": 3990, "total_steps": 4874, "loss": 0.4061, "lr": 9.71397748790585e-08, "epoch": 1.6372589249076732, "percentage": 81.86, "elapsed_time": "7:59:09", "remaining_time": "1:46:09"}
|
| 400 |
+
{"current_steps": 4000, "total_steps": 4874, "loss": 0.4102, "lr": 9.502888621925626e-08, "epoch": 1.6413623307345095, "percentage": 82.07, "elapsed_time": "8:00:22", "remaining_time": "1:44:57"}
|
| 401 |
+
{"current_steps": 4010, "total_steps": 4874, "loss": 0.3875, "lr": 9.293877464751076e-08, "epoch": 1.6454657365613459, "percentage": 82.27, "elapsed_time": "8:05:44", "remaining_time": "1:44:39"}
|
| 402 |
+
{"current_steps": 4020, "total_steps": 4874, "loss": 0.4093, "lr": 9.086954739722869e-08, "epoch": 1.6495691423881822, "percentage": 82.48, "elapsed_time": "8:06:56", "remaining_time": "1:43:26"}
|
| 403 |
+
{"current_steps": 4030, "total_steps": 4874, "loss": 0.4095, "lr": 8.882131063034426e-08, "epoch": 1.6536725482150185, "percentage": 82.68, "elapsed_time": "8:08:05", "remaining_time": "1:42:13"}
|
| 404 |
+
{"current_steps": 4040, "total_steps": 4874, "loss": 0.3839, "lr": 8.67941694318729e-08, "epoch": 1.6577759540418548, "percentage": 82.89, "elapsed_time": "8:09:15", "remaining_time": "1:40:59"}
|
| 405 |
+
{"current_steps": 4050, "total_steps": 4874, "loss": 0.42, "lr": 8.478822780451917e-08, "epoch": 1.6618793598686912, "percentage": 83.09, "elapsed_time": "8:10:28", "remaining_time": "1:39:47"}
|
| 406 |
+
{"current_steps": 4060, "total_steps": 4874, "loss": 0.4056, "lr": 8.28035886633417e-08, "epoch": 1.6659827656955273, "percentage": 83.3, "elapsed_time": "8:11:37", "remaining_time": "1:38:34"}
|
| 407 |
+
{"current_steps": 4070, "total_steps": 4874, "loss": 0.4272, "lr": 8.084035383047222e-08, "epoch": 1.6700861715223636, "percentage": 83.5, "elapsed_time": "8:12:49", "remaining_time": "1:37:21"}
|
| 408 |
+
{"current_steps": 4080, "total_steps": 4874, "loss": 0.4611, "lr": 7.88986240298925e-08, "epoch": 1.6741895773491997, "percentage": 83.71, "elapsed_time": "8:14:00", "remaining_time": "1:36:08"}
|
| 409 |
+
{"current_steps": 4090, "total_steps": 4874, "loss": 0.4232, "lr": 7.697849888226605e-08, "epoch": 1.678292983176036, "percentage": 83.91, "elapsed_time": "8:15:12", "remaining_time": "1:34:55"}
|
| 410 |
+
{"current_steps": 4100, "total_steps": 4874, "loss": 0.4206, "lr": 7.508007689982715e-08, "epoch": 1.6823963890028724, "percentage": 84.12, "elapsed_time": "8:16:21", "remaining_time": "1:33:42"}
|
| 411 |
+
{"current_steps": 4110, "total_steps": 4874, "loss": 0.4029, "lr": 7.320345548132678e-08, "epoch": 1.6864997948297087, "percentage": 84.32, "elapsed_time": "8:17:34", "remaining_time": "1:32:29"}
|
| 412 |
+
{"current_steps": 4120, "total_steps": 4874, "loss": 0.3938, "lr": 7.134873090703586e-08, "epoch": 1.690603200656545, "percentage": 84.53, "elapsed_time": "8:18:47", "remaining_time": "1:31:17"}
|
| 413 |
+
{"current_steps": 4130, "total_steps": 4874, "loss": 0.4319, "lr": 6.951599833380478e-08, "epoch": 1.6947066064833813, "percentage": 84.74, "elapsed_time": "8:19:59", "remaining_time": "1:30:04"}
|
| 414 |
+
{"current_steps": 4140, "total_steps": 4874, "loss": 0.4568, "lr": 6.770535179018228e-08, "epoch": 1.6988100123102174, "percentage": 84.94, "elapsed_time": "8:21:13", "remaining_time": "1:28:51"}
|
| 415 |
+
{"current_steps": 4150, "total_steps": 4874, "loss": 0.418, "lr": 6.591688417159091e-08, "epoch": 1.7029134181370538, "percentage": 85.15, "elapsed_time": "8:22:26", "remaining_time": "1:27:39"}
|
| 416 |
+
{"current_steps": 4160, "total_steps": 4874, "loss": 0.4234, "lr": 6.415068723556066e-08, "epoch": 1.7070168239638899, "percentage": 85.35, "elapsed_time": "8:23:38", "remaining_time": "1:26:26"}
|
| 417 |
+
{"current_steps": 4170, "total_steps": 4874, "loss": 0.4304, "lr": 6.240685159702203e-08, "epoch": 1.7111202297907262, "percentage": 85.56, "elapsed_time": "8:24:50", "remaining_time": "1:25:13"}
|
| 418 |
+
{"current_steps": 4180, "total_steps": 4874, "loss": 0.4771, "lr": 6.068546672365643e-08, "epoch": 1.7152236356175625, "percentage": 85.76, "elapsed_time": "8:26:01", "remaining_time": "1:24:00"}
|
| 419 |
+
{"current_steps": 4190, "total_steps": 4874, "loss": 0.4194, "lr": 5.898662093130618e-08, "epoch": 1.7193270414443989, "percentage": 85.97, "elapsed_time": "8:27:16", "remaining_time": "1:22:48"}
|
| 420 |
+
{"current_steps": 4200, "total_steps": 4874, "loss": 0.4224, "lr": 5.7310401379443987e-08, "epoch": 1.7234304472712352, "percentage": 86.17, "elapsed_time": "8:28:30", "remaining_time": "1:21:36"}
|
| 421 |
+
{"current_steps": 4210, "total_steps": 4874, "loss": 0.4054, "lr": 5.5656894066700264e-08, "epoch": 1.7275338530980715, "percentage": 86.38, "elapsed_time": "8:29:42", "remaining_time": "1:20:23"}
|
| 422 |
+
{"current_steps": 4220, "total_steps": 4874, "loss": 0.4129, "lr": 5.4026183826451746e-08, "epoch": 1.7316372589249078, "percentage": 86.58, "elapsed_time": "8:30:53", "remaining_time": "1:19:10"}
|
| 423 |
+
{"current_steps": 4230, "total_steps": 4874, "loss": 0.4162, "lr": 5.2418354322468884e-08, "epoch": 1.735740664751744, "percentage": 86.79, "elapsed_time": "8:32:07", "remaining_time": "1:17:58"}
|
| 424 |
+
{"current_steps": 4240, "total_steps": 4874, "loss": 0.4289, "lr": 5.083348804462312e-08, "epoch": 1.7398440705785803, "percentage": 86.99, "elapsed_time": "8:33:17", "remaining_time": "1:16:45"}
|
| 425 |
+
{"current_steps": 4250, "total_steps": 4874, "loss": 0.3888, "lr": 4.927166630465534e-08, "epoch": 1.7439474764054164, "percentage": 87.2, "elapsed_time": "8:34:25", "remaining_time": "1:15:31"}
|
| 426 |
+
{"current_steps": 4260, "total_steps": 4874, "loss": 0.3999, "lr": 4.773296923200371e-08, "epoch": 1.7480508822322527, "percentage": 87.4, "elapsed_time": "8:35:38", "remaining_time": "1:14:19"}
|
| 427 |
+
{"current_steps": 4270, "total_steps": 4874, "loss": 0.4038, "lr": 4.621747576969259e-08, "epoch": 1.752154288059089, "percentage": 87.61, "elapsed_time": "8:36:49", "remaining_time": "1:13:06"}
|
| 428 |
+
{"current_steps": 4280, "total_steps": 4874, "loss": 0.3873, "lr": 4.47252636702829e-08, "epoch": 1.7562576938859253, "percentage": 87.81, "elapsed_time": "8:38:02", "remaining_time": "1:11:53"}
|
| 429 |
+
{"current_steps": 4290, "total_steps": 4874, "loss": 0.4061, "lr": 4.325640949188225e-08, "epoch": 1.7603610997127617, "percentage": 88.02, "elapsed_time": "8:39:10", "remaining_time": "1:10:40"}
|
| 430 |
+
{"current_steps": 4300, "total_steps": 4874, "loss": 0.4481, "lr": 4.181098859421789e-08, "epoch": 1.764464505539598, "percentage": 88.22, "elapsed_time": "8:40:18", "remaining_time": "1:09:27"}
|
| 431 |
+
{"current_steps": 4310, "total_steps": 4874, "loss": 0.4194, "lr": 4.0389075134769856e-08, "epoch": 1.768567911366434, "percentage": 88.43, "elapsed_time": "8:41:31", "remaining_time": "1:08:14"}
|
| 432 |
+
{"current_steps": 4320, "total_steps": 4874, "loss": 0.4082, "lr": 3.899074206496616e-08, "epoch": 1.7726713171932704, "percentage": 88.63, "elapsed_time": "8:42:44", "remaining_time": "1:07:02"}
|
| 433 |
+
{"current_steps": 4330, "total_steps": 4874, "loss": 0.4287, "lr": 3.761606112644089e-08, "epoch": 1.7767747230201065, "percentage": 88.84, "elapsed_time": "8:43:58", "remaining_time": "1:05:49"}
|
| 434 |
+
{"current_steps": 4340, "total_steps": 4874, "loss": 0.4471, "lr": 3.626510284735229e-08, "epoch": 1.7808781288469429, "percentage": 89.04, "elapsed_time": "8:45:09", "remaining_time": "1:04:36"}
|
| 435 |
+
{"current_steps": 4350, "total_steps": 4874, "loss": 0.4098, "lr": 3.4937936538765256e-08, "epoch": 1.7849815346737792, "percentage": 89.25, "elapsed_time": "8:46:20", "remaining_time": "1:03:24"}
|
| 436 |
+
{"current_steps": 4360, "total_steps": 4874, "loss": 0.4142, "lr": 3.363463029109498e-08, "epoch": 1.7890849405006155, "percentage": 89.45, "elapsed_time": "8:47:31", "remaining_time": "1:02:11"}
|
| 437 |
+
{"current_steps": 4370, "total_steps": 4874, "loss": 0.4015, "lr": 3.2355250970613533e-08, "epoch": 1.7931883463274518, "percentage": 89.66, "elapsed_time": "8:48:42", "remaining_time": "1:00:58"}
|
| 438 |
+
{"current_steps": 4380, "total_steps": 4874, "loss": 0.4237, "lr": 3.1099864216019345e-08, "epoch": 1.7972917521542882, "percentage": 89.86, "elapsed_time": "8:49:53", "remaining_time": "0:59:45"}
|
| 439 |
+
{"current_steps": 4390, "total_steps": 4874, "loss": 0.3938, "lr": 2.986853443506954e-08, "epoch": 1.8013951579811245, "percentage": 90.07, "elapsed_time": "8:51:05", "remaining_time": "0:58:33"}
|
| 440 |
+
{"current_steps": 4400, "total_steps": 4874, "loss": 0.4114, "lr": 2.8661324801275422e-08, "epoch": 1.8054985638079606, "percentage": 90.27, "elapsed_time": "8:52:15", "remaining_time": "0:57:20"}
|
| 441 |
+
{"current_steps": 4410, "total_steps": 4874, "loss": 0.4366, "lr": 2.747829725066181e-08, "epoch": 1.809601969634797, "percentage": 90.48, "elapsed_time": "8:53:29", "remaining_time": "0:56:07"}
|
| 442 |
+
{"current_steps": 4420, "total_steps": 4874, "loss": 0.4087, "lr": 2.6319512478588657e-08, "epoch": 1.813705375461633, "percentage": 90.69, "elapsed_time": "8:54:42", "remaining_time": "0:54:55"}
|
| 443 |
+
{"current_steps": 4430, "total_steps": 4874, "loss": 0.4451, "lr": 2.518502993663768e-08, "epoch": 1.8178087812884693, "percentage": 90.89, "elapsed_time": "8:55:53", "remaining_time": "0:53:42"}
|
| 444 |
+
{"current_steps": 4440, "total_steps": 4874, "loss": 0.4727, "lr": 2.4074907829561952e-08, "epoch": 1.8219121871153057, "percentage": 91.1, "elapsed_time": "8:57:07", "remaining_time": "0:52:30"}
|
| 445 |
+
{"current_steps": 4450, "total_steps": 4874, "loss": 0.4531, "lr": 2.298920311229968e-08, "epoch": 1.826015592942142, "percentage": 91.3, "elapsed_time": "8:58:19", "remaining_time": "0:51:17"}
|
| 446 |
+
{"current_steps": 4460, "total_steps": 4874, "loss": 0.4263, "lr": 2.1927971487052276e-08, "epoch": 1.8301189987689783, "percentage": 91.51, "elapsed_time": "8:59:31", "remaining_time": "0:50:04"}
|
| 447 |
+
{"current_steps": 4470, "total_steps": 4874, "loss": 0.424, "lr": 2.089126740042635e-08, "epoch": 1.8342224045958146, "percentage": 91.71, "elapsed_time": "9:00:41", "remaining_time": "0:48:52"}
|
| 448 |
+
{"current_steps": 4480, "total_steps": 4874, "loss": 0.4132, "lr": 1.9879144040640338e-08, "epoch": 1.8383258104226508, "percentage": 91.92, "elapsed_time": "9:01:54", "remaining_time": "0:47:39"}
|
| 449 |
+
{"current_steps": 4490, "total_steps": 4874, "loss": 0.4193, "lr": 1.889165333479592e-08, "epoch": 1.842429216249487, "percentage": 92.12, "elapsed_time": "9:03:07", "remaining_time": "0:46:27"}
|
| 450 |
+
{"current_steps": 4500, "total_steps": 4874, "loss": 0.4038, "lr": 1.792884594621358e-08, "epoch": 1.8465326220763232, "percentage": 92.33, "elapsed_time": "9:04:20", "remaining_time": "0:45:14"}
|
| 451 |
+
{"current_steps": 4510, "total_steps": 4874, "loss": 0.4046, "lr": 1.6990771271833572e-08, "epoch": 1.8506360279031595, "percentage": 92.53, "elapsed_time": "9:05:31", "remaining_time": "0:44:01"}
|
| 452 |
+
{"current_steps": 4520, "total_steps": 4874, "loss": 0.4446, "lr": 1.607747743968152e-08, "epoch": 1.8547394337299958, "percentage": 92.74, "elapsed_time": "9:06:39", "remaining_time": "0:42:48"}
|
| 453 |
+
{"current_steps": 4530, "total_steps": 4874, "loss": 0.4119, "lr": 1.5189011306398937e-08, "epoch": 1.8588428395568322, "percentage": 92.94, "elapsed_time": "9:07:50", "remaining_time": "0:41:36"}
|
| 454 |
+
{"current_steps": 4540, "total_steps": 4874, "loss": 0.4687, "lr": 1.4325418454839866e-08, "epoch": 1.8629462453836685, "percentage": 93.15, "elapsed_time": "9:09:01", "remaining_time": "0:40:23"}
|
| 455 |
+
{"current_steps": 4550, "total_steps": 4874, "loss": 0.4278, "lr": 1.3486743191731487e-08, "epoch": 1.8670496512105048, "percentage": 93.35, "elapsed_time": "9:10:12", "remaining_time": "0:39:10"}
|
| 456 |
+
{"current_steps": 4560, "total_steps": 4874, "loss": 0.4389, "lr": 1.2673028545401531e-08, "epoch": 1.8711530570373411, "percentage": 93.56, "elapsed_time": "9:11:23", "remaining_time": "0:37:58"}
|
| 457 |
+
{"current_steps": 4570, "total_steps": 4874, "loss": 0.4103, "lr": 1.188431626357056e-08, "epoch": 1.8752564628641772, "percentage": 93.76, "elapsed_time": "9:12:34", "remaining_time": "0:36:45"}
|
| 458 |
+
{"current_steps": 4580, "total_steps": 4874, "loss": 0.3892, "lr": 1.1120646811209888e-08, "epoch": 1.8793598686910136, "percentage": 93.97, "elapsed_time": "9:13:44", "remaining_time": "0:35:32"}
|
| 459 |
+
{"current_steps": 4590, "total_steps": 4874, "loss": 0.4355, "lr": 1.03820593684657e-08, "epoch": 1.8834632745178497, "percentage": 94.17, "elapsed_time": "9:14:55", "remaining_time": "0:34:20"}
|
| 460 |
+
{"current_steps": 4600, "total_steps": 4874, "loss": 0.4332, "lr": 9.668591828649097e-09, "epoch": 1.887566680344686, "percentage": 94.38, "elapsed_time": "9:16:07", "remaining_time": "0:33:07"}
|
| 461 |
+
{"current_steps": 4610, "total_steps": 4874, "loss": 0.3732, "lr": 8.980280796291429e-09, "epoch": 1.8916700861715223, "percentage": 94.58, "elapsed_time": "9:17:18", "remaining_time": "0:31:54"}
|
| 462 |
+
{"current_steps": 4620, "total_steps": 4874, "loss": 0.403, "lr": 8.317161585266963e-09, "epoch": 1.8957734919983587, "percentage": 94.79, "elapsed_time": "9:18:32", "remaining_time": "0:30:42"}
|
| 463 |
+
{"current_steps": 4630, "total_steps": 4874, "loss": 0.4237, "lr": 7.679268216980506e-09, "epoch": 1.899876897825195, "percentage": 94.99, "elapsed_time": "9:19:45", "remaining_time": "0:29:29"}
|
| 464 |
+
{"current_steps": 4640, "total_steps": 4874, "loss": 0.4611, "lr": 7.066633418622236e-09, "epoch": 1.9039803036520313, "percentage": 95.2, "elapsed_time": "9:20:57", "remaining_time": "0:28:17"}
|
| 465 |
+
{"current_steps": 4650, "total_steps": 4874, "loss": 0.4264, "lr": 6.479288621488832e-09, "epoch": 1.9080837094788674, "percentage": 95.4, "elapsed_time": "9:22:09", "remaining_time": "0:27:04"}
|
| 466 |
+
{"current_steps": 4660, "total_steps": 4874, "loss": 0.4177, "lr": 5.917263959370311e-09, "epoch": 1.9121871153057037, "percentage": 95.61, "elapsed_time": "9:23:18", "remaining_time": "0:25:52"}
|
| 467 |
+
{"current_steps": 4670, "total_steps": 4874, "loss": 0.4231, "lr": 5.3805882670045485e-09, "epoch": 1.91629052113254, "percentage": 95.81, "elapsed_time": "9:24:29", "remaining_time": "0:24:39"}
|
| 468 |
+
{"current_steps": 4680, "total_steps": 4874, "loss": 0.3936, "lr": 4.8692890785977935e-09, "epoch": 1.9203939269593762, "percentage": 96.02, "elapsed_time": "9:25:42", "remaining_time": "0:23:27"}
|
| 469 |
+
{"current_steps": 4690, "total_steps": 4874, "loss": 0.4007, "lr": 4.383392626411575e-09, "epoch": 1.9244973327862125, "percentage": 96.22, "elapsed_time": "9:26:54", "remaining_time": "0:22:14"}
|
| 470 |
+
{"current_steps": 4700, "total_steps": 4874, "loss": 0.4008, "lr": 3.922923839417613e-09, "epoch": 1.9286007386130488, "percentage": 96.43, "elapsed_time": "9:28:05", "remaining_time": "0:21:01"}
|
| 471 |
+
{"current_steps": 4710, "total_steps": 4874, "loss": 0.4236, "lr": 3.487906342018232e-09, "epoch": 1.9327041444398851, "percentage": 96.64, "elapsed_time": "9:29:19", "remaining_time": "0:19:49"}
|
| 472 |
+
{"current_steps": 4720, "total_steps": 4874, "loss": 0.4131, "lr": 3.0783624528344933e-09, "epoch": 1.9368075502667215, "percentage": 96.84, "elapsed_time": "9:30:30", "remaining_time": "0:18:36"}
|
| 473 |
+
{"current_steps": 4730, "total_steps": 4874, "loss": 0.4025, "lr": 2.694313183561225e-09, "epoch": 1.9409109560935578, "percentage": 97.05, "elapsed_time": "9:31:41", "remaining_time": "0:17:24"}
|
| 474 |
+
{"current_steps": 4740, "total_steps": 4874, "loss": 0.3995, "lr": 2.335778237888941e-09, "epoch": 1.945014361920394, "percentage": 97.25, "elapsed_time": "9:32:54", "remaining_time": "0:16:11"}
|
| 475 |
+
{"current_steps": 4750, "total_steps": 4874, "loss": 0.4225, "lr": 2.0027760104929237e-09, "epoch": 1.9491177677472302, "percentage": 97.46, "elapsed_time": "9:34:04", "remaining_time": "0:14:59"}
|
| 476 |
+
{"current_steps": 4760, "total_steps": 4874, "loss": 0.4118, "lr": 1.695323586089481e-09, "epoch": 1.9532211735740663, "percentage": 97.66, "elapsed_time": "9:35:15", "remaining_time": "0:13:46"}
|
| 477 |
+
{"current_steps": 4770, "total_steps": 4874, "loss": 0.4224, "lr": 1.4134367385594815e-09, "epoch": 1.9573245794009027, "percentage": 97.87, "elapsed_time": "9:36:29", "remaining_time": "0:12:34"}
|
| 478 |
+
{"current_steps": 4780, "total_steps": 4874, "loss": 0.3956, "lr": 1.157129930139056e-09, "epoch": 1.961427985227739, "percentage": 98.07, "elapsed_time": "9:37:40", "remaining_time": "0:11:21"}
|
| 479 |
+
{"current_steps": 4790, "total_steps": 4874, "loss": 0.4388, "lr": 9.264163106774137e-10, "epoch": 1.9655313910545753, "percentage": 98.28, "elapsed_time": "9:38:50", "remaining_time": "0:10:09"}
|
| 480 |
+
{"current_steps": 4800, "total_steps": 4874, "loss": 0.4074, "lr": 7.213077169625492e-10, "epoch": 1.9696347968814116, "percentage": 98.48, "elapsed_time": "9:40:03", "remaining_time": "0:08:56"}
|
| 481 |
+
{"current_steps": 4810, "total_steps": 4874, "loss": 0.4761, "lr": 5.418146721136163e-10, "epoch": 1.973738202708248, "percentage": 98.69, "elapsed_time": "9:41:17", "remaining_time": "0:07:44"}
|
| 482 |
+
{"current_steps": 4820, "total_steps": 4874, "loss": 0.4033, "lr": 3.8794638504136003e-10, "epoch": 1.9778416085350843, "percentage": 98.89, "elapsed_time": "9:42:29", "remaining_time": "0:06:31"}
|
| 483 |
+
{"current_steps": 4830, "total_steps": 4874, "loss": 0.3972, "lr": 2.5971074997532816e-10, "epoch": 1.9819450143619204, "percentage": 99.1, "elapsed_time": "9:43:43", "remaining_time": "0:05:19"}
|
| 484 |
+
{"current_steps": 4840, "total_steps": 4874, "loss": 0.476, "lr": 1.5711434605908401e-10, "epoch": 1.9860484201887567, "percentage": 99.3, "elapsed_time": "9:44:57", "remaining_time": "0:04:06"}
|
| 485 |
+
{"current_steps": 4850, "total_steps": 4874, "loss": 0.4148, "lr": 8.016243701242099e-11, "epoch": 1.9901518260155928, "percentage": 99.51, "elapsed_time": "9:46:11", "remaining_time": "0:02:54"}
|
| 486 |
+
{"current_steps": 4860, "total_steps": 4874, "loss": 0.4305, "lr": 2.8858970861744824e-11, "epoch": 1.9942552318424291, "percentage": 99.71, "elapsed_time": "9:47:21", "remaining_time": "0:01:41"}
|
| 487 |
+
{"current_steps": 4870, "total_steps": 4874, "loss": 0.3936, "lr": 3.2065797370139923e-12, "epoch": 1.9983586376692655, "percentage": 99.92, "elapsed_time": "9:48:34", "remaining_time": "0:00:29"}
|
| 488 |
+
{"current_steps": 4874, "total_steps": 4874, "epoch": 2.0, "percentage": 100.0, "elapsed_time": "9:50:26", "remaining_time": "0:00:00"}
|
trainer_state.json
ADDED
|
@@ -0,0 +1,3452 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_global_step": null,
|
| 3 |
+
"best_metric": null,
|
| 4 |
+
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 2.0,
|
| 6 |
+
"eval_steps": 500,
|
| 7 |
+
"global_step": 4874,
|
| 8 |
+
"is_hyper_param_search": false,
|
| 9 |
+
"is_local_process_zero": true,
|
| 10 |
+
"is_world_process_zero": true,
|
| 11 |
+
"log_history": [
|
| 12 |
+
{
|
| 13 |
+
"epoch": 0.004103405826836274,
|
| 14 |
+
"grad_norm": 182.4220800951539,
|
| 15 |
+
"learning_rate": 1.844262295081967e-08,
|
| 16 |
+
"loss": 1.3123,
|
| 17 |
+
"step": 10
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"epoch": 0.008206811653672548,
|
| 21 |
+
"grad_norm": 146.12277272125723,
|
| 22 |
+
"learning_rate": 3.8934426229508196e-08,
|
| 23 |
+
"loss": 1.2103,
|
| 24 |
+
"step": 20
|
| 25 |
+
},
|
| 26 |
+
{
|
| 27 |
+
"epoch": 0.012310217480508822,
|
| 28 |
+
"grad_norm": 194.53163264532577,
|
| 29 |
+
"learning_rate": 5.9426229508196716e-08,
|
| 30 |
+
"loss": 1.2461,
|
| 31 |
+
"step": 30
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"epoch": 0.016413623307345096,
|
| 35 |
+
"grad_norm": 90.26230143147222,
|
| 36 |
+
"learning_rate": 7.991803278688524e-08,
|
| 37 |
+
"loss": 1.2492,
|
| 38 |
+
"step": 40
|
| 39 |
+
},
|
| 40 |
+
{
|
| 41 |
+
"epoch": 0.02051702913418137,
|
| 42 |
+
"grad_norm": 113.36189954995595,
|
| 43 |
+
"learning_rate": 1.0040983606557377e-07,
|
| 44 |
+
"loss": 1.1791,
|
| 45 |
+
"step": 50
|
| 46 |
+
},
|
| 47 |
+
{
|
| 48 |
+
"epoch": 0.024620434961017644,
|
| 49 |
+
"grad_norm": 128.04363673552712,
|
| 50 |
+
"learning_rate": 1.209016393442623e-07,
|
| 51 |
+
"loss": 1.0959,
|
| 52 |
+
"step": 60
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"epoch": 0.028723840787853918,
|
| 56 |
+
"grad_norm": 1492.119262678329,
|
| 57 |
+
"learning_rate": 1.413934426229508e-07,
|
| 58 |
+
"loss": 1.0009,
|
| 59 |
+
"step": 70
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"epoch": 0.03282724661469019,
|
| 63 |
+
"grad_norm": 82.77338945102021,
|
| 64 |
+
"learning_rate": 1.6188524590163935e-07,
|
| 65 |
+
"loss": 0.8824,
|
| 66 |
+
"step": 80
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"epoch": 0.03693065244152647,
|
| 70 |
+
"grad_norm": 236.91752351940642,
|
| 71 |
+
"learning_rate": 1.8237704918032787e-07,
|
| 72 |
+
"loss": 0.8539,
|
| 73 |
+
"step": 90
|
| 74 |
+
},
|
| 75 |
+
{
|
| 76 |
+
"epoch": 0.04103405826836274,
|
| 77 |
+
"grad_norm": 189.02428507183598,
|
| 78 |
+
"learning_rate": 2.028688524590164e-07,
|
| 79 |
+
"loss": 0.8715,
|
| 80 |
+
"step": 100
|
| 81 |
+
},
|
| 82 |
+
{
|
| 83 |
+
"epoch": 0.04513746409519902,
|
| 84 |
+
"grad_norm": 107.26362960595385,
|
| 85 |
+
"learning_rate": 2.233606557377049e-07,
|
| 86 |
+
"loss": 0.7815,
|
| 87 |
+
"step": 110
|
| 88 |
+
},
|
| 89 |
+
{
|
| 90 |
+
"epoch": 0.04924086992203529,
|
| 91 |
+
"grad_norm": 75.40514943661051,
|
| 92 |
+
"learning_rate": 2.438524590163934e-07,
|
| 93 |
+
"loss": 0.7836,
|
| 94 |
+
"step": 120
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"epoch": 0.053344275748871565,
|
| 98 |
+
"grad_norm": 47.87811381157323,
|
| 99 |
+
"learning_rate": 2.643442622950819e-07,
|
| 100 |
+
"loss": 0.7828,
|
| 101 |
+
"step": 130
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"epoch": 0.057447681575707836,
|
| 105 |
+
"grad_norm": 31.20870665945522,
|
| 106 |
+
"learning_rate": 2.848360655737705e-07,
|
| 107 |
+
"loss": 0.7648,
|
| 108 |
+
"step": 140
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"epoch": 0.061551087402544113,
|
| 112 |
+
"grad_norm": 81.71967711663402,
|
| 113 |
+
"learning_rate": 3.05327868852459e-07,
|
| 114 |
+
"loss": 0.7652,
|
| 115 |
+
"step": 150
|
| 116 |
+
},
|
| 117 |
+
{
|
| 118 |
+
"epoch": 0.06565449322938038,
|
| 119 |
+
"grad_norm": 161.4265084563511,
|
| 120 |
+
"learning_rate": 3.258196721311475e-07,
|
| 121 |
+
"loss": 0.7392,
|
| 122 |
+
"step": 160
|
| 123 |
+
},
|
| 124 |
+
{
|
| 125 |
+
"epoch": 0.06975789905621665,
|
| 126 |
+
"grad_norm": 35.18290498208787,
|
| 127 |
+
"learning_rate": 3.463114754098361e-07,
|
| 128 |
+
"loss": 0.7627,
|
| 129 |
+
"step": 170
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"epoch": 0.07386130488305294,
|
| 133 |
+
"grad_norm": 327.98114328321805,
|
| 134 |
+
"learning_rate": 3.6680327868852456e-07,
|
| 135 |
+
"loss": 0.7051,
|
| 136 |
+
"step": 180
|
| 137 |
+
},
|
| 138 |
+
{
|
| 139 |
+
"epoch": 0.07796471070988921,
|
| 140 |
+
"grad_norm": 90.9567629167306,
|
| 141 |
+
"learning_rate": 3.8729508196721314e-07,
|
| 142 |
+
"loss": 0.722,
|
| 143 |
+
"step": 190
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"epoch": 0.08206811653672548,
|
| 147 |
+
"grad_norm": 104.17728838677859,
|
| 148 |
+
"learning_rate": 4.077868852459016e-07,
|
| 149 |
+
"loss": 0.711,
|
| 150 |
+
"step": 200
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"epoch": 0.08617152236356175,
|
| 154 |
+
"grad_norm": 61.56737474314141,
|
| 155 |
+
"learning_rate": 4.2827868852459014e-07,
|
| 156 |
+
"loss": 0.7414,
|
| 157 |
+
"step": 210
|
| 158 |
+
},
|
| 159 |
+
{
|
| 160 |
+
"epoch": 0.09027492819039803,
|
| 161 |
+
"grad_norm": 689.4808630058474,
|
| 162 |
+
"learning_rate": 4.487704918032787e-07,
|
| 163 |
+
"loss": 0.7169,
|
| 164 |
+
"step": 220
|
| 165 |
+
},
|
| 166 |
+
{
|
| 167 |
+
"epoch": 0.0943783340172343,
|
| 168 |
+
"grad_norm": 28.351376743452306,
|
| 169 |
+
"learning_rate": 4.692622950819672e-07,
|
| 170 |
+
"loss": 0.697,
|
| 171 |
+
"step": 230
|
| 172 |
+
},
|
| 173 |
+
{
|
| 174 |
+
"epoch": 0.09848173984407058,
|
| 175 |
+
"grad_norm": 58.234015989672606,
|
| 176 |
+
"learning_rate": 4.897540983606557e-07,
|
| 177 |
+
"loss": 0.714,
|
| 178 |
+
"step": 240
|
| 179 |
+
},
|
| 180 |
+
{
|
| 181 |
+
"epoch": 0.10258514567090685,
|
| 182 |
+
"grad_norm": 154.99700792671075,
|
| 183 |
+
"learning_rate": 5.102459016393442e-07,
|
| 184 |
+
"loss": 0.6683,
|
| 185 |
+
"step": 250
|
| 186 |
+
},
|
| 187 |
+
{
|
| 188 |
+
"epoch": 0.10668855149774313,
|
| 189 |
+
"grad_norm": 90.90195156431803,
|
| 190 |
+
"learning_rate": 5.307377049180327e-07,
|
| 191 |
+
"loss": 0.6644,
|
| 192 |
+
"step": 260
|
| 193 |
+
},
|
| 194 |
+
{
|
| 195 |
+
"epoch": 0.1107919573245794,
|
| 196 |
+
"grad_norm": 160.6164849765411,
|
| 197 |
+
"learning_rate": 5.512295081967213e-07,
|
| 198 |
+
"loss": 0.6607,
|
| 199 |
+
"step": 270
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"epoch": 0.11489536315141567,
|
| 203 |
+
"grad_norm": 71.87364350201041,
|
| 204 |
+
"learning_rate": 5.717213114754098e-07,
|
| 205 |
+
"loss": 0.6528,
|
| 206 |
+
"step": 280
|
| 207 |
+
},
|
| 208 |
+
{
|
| 209 |
+
"epoch": 0.11899876897825194,
|
| 210 |
+
"grad_norm": 31.527708863782216,
|
| 211 |
+
"learning_rate": 5.922131147540983e-07,
|
| 212 |
+
"loss": 0.6636,
|
| 213 |
+
"step": 290
|
| 214 |
+
},
|
| 215 |
+
{
|
| 216 |
+
"epoch": 0.12310217480508823,
|
| 217 |
+
"grad_norm": 31.416881048763628,
|
| 218 |
+
"learning_rate": 6.127049180327869e-07,
|
| 219 |
+
"loss": 0.6363,
|
| 220 |
+
"step": 300
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"epoch": 0.1272055806319245,
|
| 224 |
+
"grad_norm": 79.50223710514128,
|
| 225 |
+
"learning_rate": 6.331967213114754e-07,
|
| 226 |
+
"loss": 0.6598,
|
| 227 |
+
"step": 310
|
| 228 |
+
},
|
| 229 |
+
{
|
| 230 |
+
"epoch": 0.13130898645876077,
|
| 231 |
+
"grad_norm": 1848.248070941333,
|
| 232 |
+
"learning_rate": 6.536885245901639e-07,
|
| 233 |
+
"loss": 0.6326,
|
| 234 |
+
"step": 320
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
"epoch": 0.13541239228559704,
|
| 238 |
+
"grad_norm": 209.94936457714124,
|
| 239 |
+
"learning_rate": 6.741803278688525e-07,
|
| 240 |
+
"loss": 0.6157,
|
| 241 |
+
"step": 330
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"epoch": 0.1395157981124333,
|
| 245 |
+
"grad_norm": 148.2894750579905,
|
| 246 |
+
"learning_rate": 6.94672131147541e-07,
|
| 247 |
+
"loss": 0.6603,
|
| 248 |
+
"step": 340
|
| 249 |
+
},
|
| 250 |
+
{
|
| 251 |
+
"epoch": 0.1436192039392696,
|
| 252 |
+
"grad_norm": 80.69705741424745,
|
| 253 |
+
"learning_rate": 7.151639344262295e-07,
|
| 254 |
+
"loss": 0.6766,
|
| 255 |
+
"step": 350
|
| 256 |
+
},
|
| 257 |
+
{
|
| 258 |
+
"epoch": 0.14772260976610588,
|
| 259 |
+
"grad_norm": 42.28428253445748,
|
| 260 |
+
"learning_rate": 7.356557377049179e-07,
|
| 261 |
+
"loss": 0.6128,
|
| 262 |
+
"step": 360
|
| 263 |
+
},
|
| 264 |
+
{
|
| 265 |
+
"epoch": 0.15182601559294215,
|
| 266 |
+
"grad_norm": 309.93244130805004,
|
| 267 |
+
"learning_rate": 7.561475409836066e-07,
|
| 268 |
+
"loss": 0.629,
|
| 269 |
+
"step": 370
|
| 270 |
+
},
|
| 271 |
+
{
|
| 272 |
+
"epoch": 0.15592942141977842,
|
| 273 |
+
"grad_norm": 76.28136620227207,
|
| 274 |
+
"learning_rate": 7.766393442622951e-07,
|
| 275 |
+
"loss": 0.5847,
|
| 276 |
+
"step": 380
|
| 277 |
+
},
|
| 278 |
+
{
|
| 279 |
+
"epoch": 0.1600328272466147,
|
| 280 |
+
"grad_norm": 115.26821738307879,
|
| 281 |
+
"learning_rate": 7.971311475409835e-07,
|
| 282 |
+
"loss": 0.6029,
|
| 283 |
+
"step": 390
|
| 284 |
+
},
|
| 285 |
+
{
|
| 286 |
+
"epoch": 0.16413623307345096,
|
| 287 |
+
"grad_norm": 73.91068278136744,
|
| 288 |
+
"learning_rate": 8.176229508196721e-07,
|
| 289 |
+
"loss": 0.6016,
|
| 290 |
+
"step": 400
|
| 291 |
+
},
|
| 292 |
+
{
|
| 293 |
+
"epoch": 0.16823963890028723,
|
| 294 |
+
"grad_norm": 187.87559505050157,
|
| 295 |
+
"learning_rate": 8.381147540983607e-07,
|
| 296 |
+
"loss": 0.5641,
|
| 297 |
+
"step": 410
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"epoch": 0.1723430447271235,
|
| 301 |
+
"grad_norm": 383.67583669063623,
|
| 302 |
+
"learning_rate": 8.586065573770491e-07,
|
| 303 |
+
"loss": 0.5899,
|
| 304 |
+
"step": 420
|
| 305 |
+
},
|
| 306 |
+
{
|
| 307 |
+
"epoch": 0.1764464505539598,
|
| 308 |
+
"grad_norm": 47.129563766764484,
|
| 309 |
+
"learning_rate": 8.790983606557376e-07,
|
| 310 |
+
"loss": 0.6022,
|
| 311 |
+
"step": 430
|
| 312 |
+
},
|
| 313 |
+
{
|
| 314 |
+
"epoch": 0.18054985638079607,
|
| 315 |
+
"grad_norm": 145.25552567247303,
|
| 316 |
+
"learning_rate": 8.995901639344262e-07,
|
| 317 |
+
"loss": 0.5648,
|
| 318 |
+
"step": 440
|
| 319 |
+
},
|
| 320 |
+
{
|
| 321 |
+
"epoch": 0.18465326220763234,
|
| 322 |
+
"grad_norm": 30.867350825220864,
|
| 323 |
+
"learning_rate": 9.200819672131147e-07,
|
| 324 |
+
"loss": 0.6225,
|
| 325 |
+
"step": 450
|
| 326 |
+
},
|
| 327 |
+
{
|
| 328 |
+
"epoch": 0.1887566680344686,
|
| 329 |
+
"grad_norm": 78.23914843951572,
|
| 330 |
+
"learning_rate": 9.405737704918032e-07,
|
| 331 |
+
"loss": 0.5951,
|
| 332 |
+
"step": 460
|
| 333 |
+
},
|
| 334 |
+
{
|
| 335 |
+
"epoch": 0.19286007386130488,
|
| 336 |
+
"grad_norm": 121.25860831153248,
|
| 337 |
+
"learning_rate": 9.610655737704918e-07,
|
| 338 |
+
"loss": 0.634,
|
| 339 |
+
"step": 470
|
| 340 |
+
},
|
| 341 |
+
{
|
| 342 |
+
"epoch": 0.19696347968814115,
|
| 343 |
+
"grad_norm": 84.05073186604757,
|
| 344 |
+
"learning_rate": 9.815573770491803e-07,
|
| 345 |
+
"loss": 0.6268,
|
| 346 |
+
"step": 480
|
| 347 |
+
},
|
| 348 |
+
{
|
| 349 |
+
"epoch": 0.20106688551497742,
|
| 350 |
+
"grad_norm": 91.70050220333418,
|
| 351 |
+
"learning_rate": 9.99999871736679e-07,
|
| 352 |
+
"loss": 0.6108,
|
| 353 |
+
"step": 490
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"epoch": 0.2051702913418137,
|
| 357 |
+
"grad_norm": 40.856783197994645,
|
| 358 |
+
"learning_rate": 9.99984480217773e-07,
|
| 359 |
+
"loss": 0.6219,
|
| 360 |
+
"step": 500
|
| 361 |
+
},
|
| 362 |
+
{
|
| 363 |
+
"epoch": 0.20927369716865,
|
| 364 |
+
"grad_norm": 49.752403359052266,
|
| 365 |
+
"learning_rate": 9.999434369394724e-07,
|
| 366 |
+
"loss": 0.6191,
|
| 367 |
+
"step": 510
|
| 368 |
+
},
|
| 369 |
+
{
|
| 370 |
+
"epoch": 0.21337710299548626,
|
| 371 |
+
"grad_norm": 233.36093284470726,
|
| 372 |
+
"learning_rate": 9.998767440075073e-07,
|
| 373 |
+
"loss": 0.596,
|
| 374 |
+
"step": 520
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"epoch": 0.21748050882232253,
|
| 378 |
+
"grad_norm": 23.585475945637587,
|
| 379 |
+
"learning_rate": 9.997844048435662e-07,
|
| 380 |
+
"loss": 0.5608,
|
| 381 |
+
"step": 530
|
| 382 |
+
},
|
| 383 |
+
{
|
| 384 |
+
"epoch": 0.2215839146491588,
|
| 385 |
+
"grad_norm": 151.4973086683178,
|
| 386 |
+
"learning_rate": 9.996664241851196e-07,
|
| 387 |
+
"loss": 0.5549,
|
| 388 |
+
"step": 540
|
| 389 |
+
},
|
| 390 |
+
{
|
| 391 |
+
"epoch": 0.22568732047599507,
|
| 392 |
+
"grad_norm": 132.41159974100458,
|
| 393 |
+
"learning_rate": 9.995228080851788e-07,
|
| 394 |
+
"loss": 0.5673,
|
| 395 |
+
"step": 550
|
| 396 |
+
},
|
| 397 |
+
{
|
| 398 |
+
"epoch": 0.22979072630283134,
|
| 399 |
+
"grad_norm": 66.68977601953932,
|
| 400 |
+
"learning_rate": 9.993535639119836e-07,
|
| 401 |
+
"loss": 0.5602,
|
| 402 |
+
"step": 560
|
| 403 |
+
},
|
| 404 |
+
{
|
| 405 |
+
"epoch": 0.23389413212966761,
|
| 406 |
+
"grad_norm": 73.11362129044448,
|
| 407 |
+
"learning_rate": 9.99158700348625e-07,
|
| 408 |
+
"loss": 0.6036,
|
| 409 |
+
"step": 570
|
| 410 |
+
},
|
| 411 |
+
{
|
| 412 |
+
"epoch": 0.23799753795650389,
|
| 413 |
+
"grad_norm": 154.92212661986795,
|
| 414 |
+
"learning_rate": 9.989382273926001e-07,
|
| 415 |
+
"loss": 0.6003,
|
| 416 |
+
"step": 580
|
| 417 |
+
},
|
| 418 |
+
{
|
| 419 |
+
"epoch": 0.24210094378334018,
|
| 420 |
+
"grad_norm": 69.4018260554704,
|
| 421 |
+
"learning_rate": 9.986921563552983e-07,
|
| 422 |
+
"loss": 0.5939,
|
| 423 |
+
"step": 590
|
| 424 |
+
},
|
| 425 |
+
{
|
| 426 |
+
"epoch": 0.24620434961017645,
|
| 427 |
+
"grad_norm": 241.23823094073975,
|
| 428 |
+
"learning_rate": 9.984204998614217e-07,
|
| 429 |
+
"loss": 0.5812,
|
| 430 |
+
"step": 600
|
| 431 |
+
},
|
| 432 |
+
{
|
| 433 |
+
"epoch": 0.2503077554370127,
|
| 434 |
+
"grad_norm": 48.88474226040217,
|
| 435 |
+
"learning_rate": 9.981232718483366e-07,
|
| 436 |
+
"loss": 0.5496,
|
| 437 |
+
"step": 610
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"epoch": 0.254411161263849,
|
| 441 |
+
"grad_norm": 92.313854832292,
|
| 442 |
+
"learning_rate": 9.978004875653595e-07,
|
| 443 |
+
"loss": 0.577,
|
| 444 |
+
"step": 620
|
| 445 |
+
},
|
| 446 |
+
{
|
| 447 |
+
"epoch": 0.25851456709068527,
|
| 448 |
+
"grad_norm": 71.73356362648478,
|
| 449 |
+
"learning_rate": 9.97452163572974e-07,
|
| 450 |
+
"loss": 0.5981,
|
| 451 |
+
"step": 630
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"epoch": 0.26261797291752154,
|
| 455 |
+
"grad_norm": 24.61250419815578,
|
| 456 |
+
"learning_rate": 9.970783177419811e-07,
|
| 457 |
+
"loss": 0.5367,
|
| 458 |
+
"step": 640
|
| 459 |
+
},
|
| 460 |
+
{
|
| 461 |
+
"epoch": 0.2667213787443578,
|
| 462 |
+
"grad_norm": 149.62570526394055,
|
| 463 |
+
"learning_rate": 9.96678969252583e-07,
|
| 464 |
+
"loss": 0.5641,
|
| 465 |
+
"step": 650
|
| 466 |
+
},
|
| 467 |
+
{
|
| 468 |
+
"epoch": 0.2708247845711941,
|
| 469 |
+
"grad_norm": 158.08414256244646,
|
| 470 |
+
"learning_rate": 9.962541385933984e-07,
|
| 471 |
+
"loss": 0.5385,
|
| 472 |
+
"step": 660
|
| 473 |
+
},
|
| 474 |
+
{
|
| 475 |
+
"epoch": 0.27492819039803035,
|
| 476 |
+
"grad_norm": 50.71208617479511,
|
| 477 |
+
"learning_rate": 9.958038475604113e-07,
|
| 478 |
+
"loss": 0.5379,
|
| 479 |
+
"step": 670
|
| 480 |
+
},
|
| 481 |
+
{
|
| 482 |
+
"epoch": 0.2790315962248666,
|
| 483 |
+
"grad_norm": 48.985261740409086,
|
| 484 |
+
"learning_rate": 9.953281192558534e-07,
|
| 485 |
+
"loss": 0.5794,
|
| 486 |
+
"step": 680
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"epoch": 0.2831350020517029,
|
| 490 |
+
"grad_norm": 114.8270055828904,
|
| 491 |
+
"learning_rate": 9.948269780870183e-07,
|
| 492 |
+
"loss": 0.5496,
|
| 493 |
+
"step": 690
|
| 494 |
+
},
|
| 495 |
+
{
|
| 496 |
+
"epoch": 0.2872384078785392,
|
| 497 |
+
"grad_norm": 56.924021607958345,
|
| 498 |
+
"learning_rate": 9.943004497650092e-07,
|
| 499 |
+
"loss": 0.5535,
|
| 500 |
+
"step": 700
|
| 501 |
+
},
|
| 502 |
+
{
|
| 503 |
+
"epoch": 0.2913418137053755,
|
| 504 |
+
"grad_norm": 72.57164575247967,
|
| 505 |
+
"learning_rate": 9.937485613034207e-07,
|
| 506 |
+
"loss": 0.5409,
|
| 507 |
+
"step": 710
|
| 508 |
+
},
|
| 509 |
+
{
|
| 510 |
+
"epoch": 0.29544521953221176,
|
| 511 |
+
"grad_norm": 27.48847747465432,
|
| 512 |
+
"learning_rate": 9.931713410169512e-07,
|
| 513 |
+
"loss": 0.5136,
|
| 514 |
+
"step": 720
|
| 515 |
+
},
|
| 516 |
+
{
|
| 517 |
+
"epoch": 0.299548625359048,
|
| 518 |
+
"grad_norm": 206.40024139341446,
|
| 519 |
+
"learning_rate": 9.925688185199524e-07,
|
| 520 |
+
"loss": 0.5681,
|
| 521 |
+
"step": 730
|
| 522 |
+
},
|
| 523 |
+
{
|
| 524 |
+
"epoch": 0.3036520311858843,
|
| 525 |
+
"grad_norm": 109.42065371268323,
|
| 526 |
+
"learning_rate": 9.919410247249077e-07,
|
| 527 |
+
"loss": 0.5203,
|
| 528 |
+
"step": 740
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"epoch": 0.30775543701272057,
|
| 532 |
+
"grad_norm": 36.35592817437106,
|
| 533 |
+
"learning_rate": 9.912879918408474e-07,
|
| 534 |
+
"loss": 0.5854,
|
| 535 |
+
"step": 750
|
| 536 |
+
},
|
| 537 |
+
{
|
| 538 |
+
"epoch": 0.31185884283955684,
|
| 539 |
+
"grad_norm": 47.919040744944475,
|
| 540 |
+
"learning_rate": 9.906097533716965e-07,
|
| 541 |
+
"loss": 0.545,
|
| 542 |
+
"step": 760
|
| 543 |
+
},
|
| 544 |
+
{
|
| 545 |
+
"epoch": 0.3159622486663931,
|
| 546 |
+
"grad_norm": 83.6266756279424,
|
| 547 |
+
"learning_rate": 9.89906344114555e-07,
|
| 548 |
+
"loss": 0.5887,
|
| 549 |
+
"step": 770
|
| 550 |
+
},
|
| 551 |
+
{
|
| 552 |
+
"epoch": 0.3200656544932294,
|
| 553 |
+
"grad_norm": 56.9056616402729,
|
| 554 |
+
"learning_rate": 9.891778001579133e-07,
|
| 555 |
+
"loss": 0.5125,
|
| 556 |
+
"step": 780
|
| 557 |
+
},
|
| 558 |
+
{
|
| 559 |
+
"epoch": 0.32416906032006565,
|
| 560 |
+
"grad_norm": 386.7331522318636,
|
| 561 |
+
"learning_rate": 9.884241588798003e-07,
|
| 562 |
+
"loss": 0.5558,
|
| 563 |
+
"step": 790
|
| 564 |
+
},
|
| 565 |
+
{
|
| 566 |
+
"epoch": 0.3282724661469019,
|
| 567 |
+
"grad_norm": 52.21658367780307,
|
| 568 |
+
"learning_rate": 9.876454589458654e-07,
|
| 569 |
+
"loss": 0.5326,
|
| 570 |
+
"step": 800
|
| 571 |
+
},
|
| 572 |
+
{
|
| 573 |
+
"epoch": 0.3323758719737382,
|
| 574 |
+
"grad_norm": 29.240086618925154,
|
| 575 |
+
"learning_rate": 9.868417403073953e-07,
|
| 576 |
+
"loss": 0.5025,
|
| 577 |
+
"step": 810
|
| 578 |
+
},
|
| 579 |
+
{
|
| 580 |
+
"epoch": 0.33647927780057446,
|
| 581 |
+
"grad_norm": 46.01587891118647,
|
| 582 |
+
"learning_rate": 9.860130441992641e-07,
|
| 583 |
+
"loss": 0.5729,
|
| 584 |
+
"step": 820
|
| 585 |
+
},
|
| 586 |
+
{
|
| 587 |
+
"epoch": 0.34058268362741073,
|
| 588 |
+
"grad_norm": 91.76328360504101,
|
| 589 |
+
"learning_rate": 9.851594131378181e-07,
|
| 590 |
+
"loss": 0.5491,
|
| 591 |
+
"step": 830
|
| 592 |
+
},
|
| 593 |
+
{
|
| 594 |
+
"epoch": 0.344686089454247,
|
| 595 |
+
"grad_norm": 341.6564949410884,
|
| 596 |
+
"learning_rate": 9.84280890918694e-07,
|
| 597 |
+
"loss": 0.525,
|
| 598 |
+
"step": 840
|
| 599 |
+
},
|
| 600 |
+
{
|
| 601 |
+
"epoch": 0.3487894952810833,
|
| 602 |
+
"grad_norm": 76.45718389522499,
|
| 603 |
+
"learning_rate": 9.833775226145717e-07,
|
| 604 |
+
"loss": 0.5567,
|
| 605 |
+
"step": 850
|
| 606 |
+
},
|
| 607 |
+
{
|
| 608 |
+
"epoch": 0.3528929011079196,
|
| 609 |
+
"grad_norm": 65.16833780779854,
|
| 610 |
+
"learning_rate": 9.824493545728626e-07,
|
| 611 |
+
"loss": 0.4753,
|
| 612 |
+
"step": 860
|
| 613 |
+
},
|
| 614 |
+
{
|
| 615 |
+
"epoch": 0.35699630693475587,
|
| 616 |
+
"grad_norm": 88.94084275767729,
|
| 617 |
+
"learning_rate": 9.814964344133316e-07,
|
| 618 |
+
"loss": 0.5563,
|
| 619 |
+
"step": 870
|
| 620 |
+
},
|
| 621 |
+
{
|
| 622 |
+
"epoch": 0.36109971276159214,
|
| 623 |
+
"grad_norm": 36.87799348159678,
|
| 624 |
+
"learning_rate": 9.805188110256532e-07,
|
| 625 |
+
"loss": 0.5269,
|
| 626 |
+
"step": 880
|
| 627 |
+
},
|
| 628 |
+
{
|
| 629 |
+
"epoch": 0.3652031185884284,
|
| 630 |
+
"grad_norm": 56.86792611979442,
|
| 631 |
+
"learning_rate": 9.795165345669045e-07,
|
| 632 |
+
"loss": 0.514,
|
| 633 |
+
"step": 890
|
| 634 |
+
},
|
| 635 |
+
{
|
| 636 |
+
"epoch": 0.3693065244152647,
|
| 637 |
+
"grad_norm": 67.39786874724282,
|
| 638 |
+
"learning_rate": 9.784896564589904e-07,
|
| 639 |
+
"loss": 0.5545,
|
| 640 |
+
"step": 900
|
| 641 |
+
},
|
| 642 |
+
{
|
| 643 |
+
"epoch": 0.37340993024210095,
|
| 644 |
+
"grad_norm": 1934.1914804718213,
|
| 645 |
+
"learning_rate": 9.774382293860067e-07,
|
| 646 |
+
"loss": 0.5145,
|
| 647 |
+
"step": 910
|
| 648 |
+
},
|
| 649 |
+
{
|
| 650 |
+
"epoch": 0.3775133360689372,
|
| 651 |
+
"grad_norm": 86.34140336852126,
|
| 652 |
+
"learning_rate": 9.76362307291536e-07,
|
| 653 |
+
"loss": 0.5595,
|
| 654 |
+
"step": 920
|
| 655 |
+
},
|
| 656 |
+
{
|
| 657 |
+
"epoch": 0.3816167418957735,
|
| 658 |
+
"grad_norm": 89.71026929377706,
|
| 659 |
+
"learning_rate": 9.752619453758816e-07,
|
| 660 |
+
"loss": 0.5577,
|
| 661 |
+
"step": 930
|
| 662 |
+
},
|
| 663 |
+
{
|
| 664 |
+
"epoch": 0.38572014772260976,
|
| 665 |
+
"grad_norm": 247.55345158954626,
|
| 666 |
+
"learning_rate": 9.74137200093234e-07,
|
| 667 |
+
"loss": 0.5584,
|
| 668 |
+
"step": 940
|
| 669 |
+
},
|
| 670 |
+
{
|
| 671 |
+
"epoch": 0.38982355354944603,
|
| 672 |
+
"grad_norm": 98.89187582084968,
|
| 673 |
+
"learning_rate": 9.729881291487746e-07,
|
| 674 |
+
"loss": 0.5634,
|
| 675 |
+
"step": 950
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"epoch": 0.3939269593762823,
|
| 679 |
+
"grad_norm": 41.332386221785185,
|
| 680 |
+
"learning_rate": 9.718147914957164e-07,
|
| 681 |
+
"loss": 0.5112,
|
| 682 |
+
"step": 960
|
| 683 |
+
},
|
| 684 |
+
{
|
| 685 |
+
"epoch": 0.3980303652031186,
|
| 686 |
+
"grad_norm": 47.45890272512611,
|
| 687 |
+
"learning_rate": 9.706172473322782e-07,
|
| 688 |
+
"loss": 0.5404,
|
| 689 |
+
"step": 970
|
| 690 |
+
},
|
| 691 |
+
{
|
| 692 |
+
"epoch": 0.40213377102995485,
|
| 693 |
+
"grad_norm": 56.38902937458578,
|
| 694 |
+
"learning_rate": 9.693955580985967e-07,
|
| 695 |
+
"loss": 0.4917,
|
| 696 |
+
"step": 980
|
| 697 |
+
},
|
| 698 |
+
{
|
| 699 |
+
"epoch": 0.4062371768567911,
|
| 700 |
+
"grad_norm": 34.367608821604705,
|
| 701 |
+
"learning_rate": 9.681497864735739e-07,
|
| 702 |
+
"loss": 0.5381,
|
| 703 |
+
"step": 990
|
| 704 |
+
},
|
| 705 |
+
{
|
| 706 |
+
"epoch": 0.4103405826836274,
|
| 707 |
+
"grad_norm": 67.41434377190457,
|
| 708 |
+
"learning_rate": 9.668799963716614e-07,
|
| 709 |
+
"loss": 0.5399,
|
| 710 |
+
"step": 1000
|
| 711 |
+
},
|
| 712 |
+
{
|
| 713 |
+
"epoch": 0.4144439885104637,
|
| 714 |
+
"grad_norm": 48.41744675290322,
|
| 715 |
+
"learning_rate": 9.655862529395821e-07,
|
| 716 |
+
"loss": 0.5049,
|
| 717 |
+
"step": 1010
|
| 718 |
+
},
|
| 719 |
+
{
|
| 720 |
+
"epoch": 0.4185473943373,
|
| 721 |
+
"grad_norm": 150.44155219986365,
|
| 722 |
+
"learning_rate": 9.642686225529864e-07,
|
| 723 |
+
"loss": 0.541,
|
| 724 |
+
"step": 1020
|
| 725 |
+
},
|
| 726 |
+
{
|
| 727 |
+
"epoch": 0.42265080016413625,
|
| 728 |
+
"grad_norm": 106.77752480297073,
|
| 729 |
+
"learning_rate": 9.62927172813048e-07,
|
| 730 |
+
"loss": 0.539,
|
| 731 |
+
"step": 1030
|
| 732 |
+
},
|
| 733 |
+
{
|
| 734 |
+
"epoch": 0.4267542059909725,
|
| 735 |
+
"grad_norm": 50.83647665733109,
|
| 736 |
+
"learning_rate": 9.615619725429953e-07,
|
| 737 |
+
"loss": 0.5191,
|
| 738 |
+
"step": 1040
|
| 739 |
+
},
|
| 740 |
+
{
|
| 741 |
+
"epoch": 0.4308576118178088,
|
| 742 |
+
"grad_norm": 31.657657680571123,
|
| 743 |
+
"learning_rate": 9.601730917845796e-07,
|
| 744 |
+
"loss": 0.5496,
|
| 745 |
+
"step": 1050
|
| 746 |
+
},
|
| 747 |
+
{
|
| 748 |
+
"epoch": 0.43496101764464506,
|
| 749 |
+
"grad_norm": 73.9529611593091,
|
| 750 |
+
"learning_rate": 9.587606017944834e-07,
|
| 751 |
+
"loss": 0.5204,
|
| 752 |
+
"step": 1060
|
| 753 |
+
},
|
| 754 |
+
{
|
| 755 |
+
"epoch": 0.43906442347148134,
|
| 756 |
+
"grad_norm": 33.53249036413356,
|
| 757 |
+
"learning_rate": 9.573245750406623e-07,
|
| 758 |
+
"loss": 0.4949,
|
| 759 |
+
"step": 1070
|
| 760 |
+
},
|
| 761 |
+
{
|
| 762 |
+
"epoch": 0.4431678292983176,
|
| 763 |
+
"grad_norm": 199.06096261428218,
|
| 764 |
+
"learning_rate": 9.558650851986287e-07,
|
| 765 |
+
"loss": 0.5539,
|
| 766 |
+
"step": 1080
|
| 767 |
+
},
|
| 768 |
+
{
|
| 769 |
+
"epoch": 0.4472712351251539,
|
| 770 |
+
"grad_norm": 94.27980077909132,
|
| 771 |
+
"learning_rate": 9.543822071476718e-07,
|
| 772 |
+
"loss": 0.5323,
|
| 773 |
+
"step": 1090
|
| 774 |
+
},
|
| 775 |
+
{
|
| 776 |
+
"epoch": 0.45137464095199015,
|
| 777 |
+
"grad_norm": 1819.2869598147427,
|
| 778 |
+
"learning_rate": 9.528760169670147e-07,
|
| 779 |
+
"loss": 0.544,
|
| 780 |
+
"step": 1100
|
| 781 |
+
},
|
| 782 |
+
{
|
| 783 |
+
"epoch": 0.4554780467788264,
|
| 784 |
+
"grad_norm": 37.17797577164199,
|
| 785 |
+
"learning_rate": 9.513465919319121e-07,
|
| 786 |
+
"loss": 0.5094,
|
| 787 |
+
"step": 1110
|
| 788 |
+
},
|
| 789 |
+
{
|
| 790 |
+
"epoch": 0.4595814526056627,
|
| 791 |
+
"grad_norm": 839.7857588901318,
|
| 792 |
+
"learning_rate": 9.49794010509686e-07,
|
| 793 |
+
"loss": 0.5353,
|
| 794 |
+
"step": 1120
|
| 795 |
+
},
|
| 796 |
+
{
|
| 797 |
+
"epoch": 0.46368485843249896,
|
| 798 |
+
"grad_norm": 47.75521927089079,
|
| 799 |
+
"learning_rate": 9.482183523556986e-07,
|
| 800 |
+
"loss": 0.5934,
|
| 801 |
+
"step": 1130
|
| 802 |
+
},
|
| 803 |
+
{
|
| 804 |
+
"epoch": 0.46778826425933523,
|
| 805 |
+
"grad_norm": 81.58572720181627,
|
| 806 |
+
"learning_rate": 9.466196983092671e-07,
|
| 807 |
+
"loss": 0.517,
|
| 808 |
+
"step": 1140
|
| 809 |
+
},
|
| 810 |
+
{
|
| 811 |
+
"epoch": 0.4718916700861715,
|
| 812 |
+
"grad_norm": 84.84691159783527,
|
| 813 |
+
"learning_rate": 9.449981303895154e-07,
|
| 814 |
+
"loss": 0.5354,
|
| 815 |
+
"step": 1150
|
| 816 |
+
},
|
| 817 |
+
{
|
| 818 |
+
"epoch": 0.47599507591300777,
|
| 819 |
+
"grad_norm": 131.65616001085624,
|
| 820 |
+
"learning_rate": 9.433537317911656e-07,
|
| 821 |
+
"loss": 0.5152,
|
| 822 |
+
"step": 1160
|
| 823 |
+
},
|
| 824 |
+
{
|
| 825 |
+
"epoch": 0.4800984817398441,
|
| 826 |
+
"grad_norm": 401.2276612889552,
|
| 827 |
+
"learning_rate": 9.416865868802711e-07,
|
| 828 |
+
"loss": 0.5082,
|
| 829 |
+
"step": 1170
|
| 830 |
+
},
|
| 831 |
+
{
|
| 832 |
+
"epoch": 0.48420188756668037,
|
| 833 |
+
"grad_norm": 169.35620272095687,
|
| 834 |
+
"learning_rate": 9.39996781189887e-07,
|
| 835 |
+
"loss": 0.5287,
|
| 836 |
+
"step": 1180
|
| 837 |
+
},
|
| 838 |
+
{
|
| 839 |
+
"epoch": 0.48830529339351664,
|
| 840 |
+
"grad_norm": 63.314645755822674,
|
| 841 |
+
"learning_rate": 9.382844014156823e-07,
|
| 842 |
+
"loss": 0.4979,
|
| 843 |
+
"step": 1190
|
| 844 |
+
},
|
| 845 |
+
{
|
| 846 |
+
"epoch": 0.4924086992203529,
|
| 847 |
+
"grad_norm": 36.351552209624806,
|
| 848 |
+
"learning_rate": 9.365495354114918e-07,
|
| 849 |
+
"loss": 0.5504,
|
| 850 |
+
"step": 1200
|
| 851 |
+
},
|
| 852 |
+
{
|
| 853 |
+
"epoch": 0.4965121050471892,
|
| 854 |
+
"grad_norm": 42.006357674185814,
|
| 855 |
+
"learning_rate": 9.347922721848092e-07,
|
| 856 |
+
"loss": 0.4844,
|
| 857 |
+
"step": 1210
|
| 858 |
+
},
|
| 859 |
+
{
|
| 860 |
+
"epoch": 0.5006155108740254,
|
| 861 |
+
"grad_norm": 131.55299749650482,
|
| 862 |
+
"learning_rate": 9.330127018922193e-07,
|
| 863 |
+
"loss": 0.4787,
|
| 864 |
+
"step": 1220
|
| 865 |
+
},
|
| 866 |
+
{
|
| 867 |
+
"epoch": 0.5047189167008617,
|
| 868 |
+
"grad_norm": 3006.644413367846,
|
| 869 |
+
"learning_rate": 9.312109158347744e-07,
|
| 870 |
+
"loss": 0.5524,
|
| 871 |
+
"step": 1230
|
| 872 |
+
},
|
| 873 |
+
{
|
| 874 |
+
"epoch": 0.508822322527698,
|
| 875 |
+
"grad_norm": 67.57853682418342,
|
| 876 |
+
"learning_rate": 9.293870064533087e-07,
|
| 877 |
+
"loss": 0.5054,
|
| 878 |
+
"step": 1240
|
| 879 |
+
},
|
| 880 |
+
{
|
| 881 |
+
"epoch": 0.5129257283545343,
|
| 882 |
+
"grad_norm": 33.91813893378403,
|
| 883 |
+
"learning_rate": 9.275410673236957e-07,
|
| 884 |
+
"loss": 0.5355,
|
| 885 |
+
"step": 1250
|
| 886 |
+
},
|
| 887 |
+
{
|
| 888 |
+
"epoch": 0.5170291341813705,
|
| 889 |
+
"grad_norm": 466.0897510373524,
|
| 890 |
+
"learning_rate": 9.256731931520481e-07,
|
| 891 |
+
"loss": 0.5312,
|
| 892 |
+
"step": 1260
|
| 893 |
+
},
|
| 894 |
+
{
|
| 895 |
+
"epoch": 0.5211325400082069,
|
| 896 |
+
"grad_norm": 174.59079543901223,
|
| 897 |
+
"learning_rate": 9.23783479769858e-07,
|
| 898 |
+
"loss": 0.5262,
|
| 899 |
+
"step": 1270
|
| 900 |
+
},
|
| 901 |
+
{
|
| 902 |
+
"epoch": 0.5252359458350431,
|
| 903 |
+
"grad_norm": 80.8596613911154,
|
| 904 |
+
"learning_rate": 9.218720241290809e-07,
|
| 905 |
+
"loss": 0.5218,
|
| 906 |
+
"step": 1280
|
| 907 |
+
},
|
| 908 |
+
{
|
| 909 |
+
"epoch": 0.5293393516618794,
|
| 910 |
+
"grad_norm": 678.7536246635647,
|
| 911 |
+
"learning_rate": 9.199389242971611e-07,
|
| 912 |
+
"loss": 0.5137,
|
| 913 |
+
"step": 1290
|
| 914 |
+
},
|
| 915 |
+
{
|
| 916 |
+
"epoch": 0.5334427574887156,
|
| 917 |
+
"grad_norm": 23.61559848573651,
|
| 918 |
+
"learning_rate": 9.179842794520005e-07,
|
| 919 |
+
"loss": 0.5573,
|
| 920 |
+
"step": 1300
|
| 921 |
+
},
|
| 922 |
+
{
|
| 923 |
+
"epoch": 0.5375461633155519,
|
| 924 |
+
"grad_norm": 91.57759277822655,
|
| 925 |
+
"learning_rate": 9.160081898768704e-07,
|
| 926 |
+
"loss": 0.5283,
|
| 927 |
+
"step": 1310
|
| 928 |
+
},
|
| 929 |
+
{
|
| 930 |
+
"epoch": 0.5416495691423882,
|
| 931 |
+
"grad_norm": 1813.2773508872508,
|
| 932 |
+
"learning_rate": 9.140107569552664e-07,
|
| 933 |
+
"loss": 0.4916,
|
| 934 |
+
"step": 1320
|
| 935 |
+
},
|
| 936 |
+
{
|
| 937 |
+
"epoch": 0.5457529749692245,
|
| 938 |
+
"grad_norm": 50.963035893636864,
|
| 939 |
+
"learning_rate": 9.119920831657066e-07,
|
| 940 |
+
"loss": 0.5511,
|
| 941 |
+
"step": 1330
|
| 942 |
+
},
|
| 943 |
+
{
|
| 944 |
+
"epoch": 0.5498563807960607,
|
| 945 |
+
"grad_norm": 58.31100935949381,
|
| 946 |
+
"learning_rate": 9.09952272076475e-07,
|
| 947 |
+
"loss": 0.5117,
|
| 948 |
+
"step": 1340
|
| 949 |
+
},
|
| 950 |
+
{
|
| 951 |
+
"epoch": 0.553959786622897,
|
| 952 |
+
"grad_norm": 56.57821653172397,
|
| 953 |
+
"learning_rate": 9.078914283403058e-07,
|
| 954 |
+
"loss": 0.5027,
|
| 955 |
+
"step": 1350
|
| 956 |
+
},
|
| 957 |
+
{
|
| 958 |
+
"epoch": 0.5580631924497332,
|
| 959 |
+
"grad_norm": 59.4888330455112,
|
| 960 |
+
"learning_rate": 9.058096576890166e-07,
|
| 961 |
+
"loss": 0.506,
|
| 962 |
+
"step": 1360
|
| 963 |
+
},
|
| 964 |
+
{
|
| 965 |
+
"epoch": 0.5621665982765696,
|
| 966 |
+
"grad_norm": 38.84825806998227,
|
| 967 |
+
"learning_rate": 9.037070669280822e-07,
|
| 968 |
+
"loss": 0.498,
|
| 969 |
+
"step": 1370
|
| 970 |
+
},
|
| 971 |
+
{
|
| 972 |
+
"epoch": 0.5662700041034058,
|
| 973 |
+
"grad_norm": 95.10318310320463,
|
| 974 |
+
"learning_rate": 9.015837639311556e-07,
|
| 975 |
+
"loss": 0.5017,
|
| 976 |
+
"step": 1380
|
| 977 |
+
},
|
| 978 |
+
{
|
| 979 |
+
"epoch": 0.5703734099302421,
|
| 980 |
+
"grad_norm": 80.14513240760986,
|
| 981 |
+
"learning_rate": 8.994398576345334e-07,
|
| 982 |
+
"loss": 0.5065,
|
| 983 |
+
"step": 1390
|
| 984 |
+
},
|
| 985 |
+
{
|
| 986 |
+
"epoch": 0.5744768157570784,
|
| 987 |
+
"grad_norm": 72.84114599937811,
|
| 988 |
+
"learning_rate": 8.972754580315668e-07,
|
| 989 |
+
"loss": 0.4837,
|
| 990 |
+
"step": 1400
|
| 991 |
+
},
|
| 992 |
+
{
|
| 993 |
+
"epoch": 0.5785802215839146,
|
| 994 |
+
"grad_norm": 53.17011283905588,
|
| 995 |
+
"learning_rate": 8.950906761670179e-07,
|
| 996 |
+
"loss": 0.5266,
|
| 997 |
+
"step": 1410
|
| 998 |
+
},
|
| 999 |
+
{
|
| 1000 |
+
"epoch": 0.582683627410751,
|
| 1001 |
+
"grad_norm": 45.00382657943095,
|
| 1002 |
+
"learning_rate": 8.928856241313638e-07,
|
| 1003 |
+
"loss": 0.5176,
|
| 1004 |
+
"step": 1420
|
| 1005 |
+
},
|
| 1006 |
+
{
|
| 1007 |
+
"epoch": 0.5867870332375872,
|
| 1008 |
+
"grad_norm": 106.51574934292424,
|
| 1009 |
+
"learning_rate": 8.906604150550443e-07,
|
| 1010 |
+
"loss": 0.5018,
|
| 1011 |
+
"step": 1430
|
| 1012 |
+
},
|
| 1013 |
+
{
|
| 1014 |
+
"epoch": 0.5908904390644235,
|
| 1015 |
+
"grad_norm": 41.04596394265793,
|
| 1016 |
+
"learning_rate": 8.884151631026586e-07,
|
| 1017 |
+
"loss": 0.4862,
|
| 1018 |
+
"step": 1440
|
| 1019 |
+
},
|
| 1020 |
+
{
|
| 1021 |
+
"epoch": 0.5949938448912597,
|
| 1022 |
+
"grad_norm": 19.34656497041061,
|
| 1023 |
+
"learning_rate": 8.861499834671082e-07,
|
| 1024 |
+
"loss": 0.5133,
|
| 1025 |
+
"step": 1450
|
| 1026 |
+
},
|
| 1027 |
+
{
|
| 1028 |
+
"epoch": 0.599097250718096,
|
| 1029 |
+
"grad_norm": 44.76498355999253,
|
| 1030 |
+
"learning_rate": 8.838649923636864e-07,
|
| 1031 |
+
"loss": 0.494,
|
| 1032 |
+
"step": 1460
|
| 1033 |
+
},
|
| 1034 |
+
{
|
| 1035 |
+
"epoch": 0.6032006565449323,
|
| 1036 |
+
"grad_norm": 63.918308692171294,
|
| 1037 |
+
"learning_rate": 8.815603070241157e-07,
|
| 1038 |
+
"loss": 0.4998,
|
| 1039 |
+
"step": 1470
|
| 1040 |
+
},
|
| 1041 |
+
{
|
| 1042 |
+
"epoch": 0.6073040623717686,
|
| 1043 |
+
"grad_norm": 32.81211949745255,
|
| 1044 |
+
"learning_rate": 8.792360456905344e-07,
|
| 1045 |
+
"loss": 0.5247,
|
| 1046 |
+
"step": 1480
|
| 1047 |
+
},
|
| 1048 |
+
{
|
| 1049 |
+
"epoch": 0.6114074681986048,
|
| 1050 |
+
"grad_norm": 48.12638890560711,
|
| 1051 |
+
"learning_rate": 8.768923276094282e-07,
|
| 1052 |
+
"loss": 0.5172,
|
| 1053 |
+
"step": 1490
|
| 1054 |
+
},
|
| 1055 |
+
{
|
| 1056 |
+
"epoch": 0.6155108740254411,
|
| 1057 |
+
"grad_norm": 56.98075776994379,
|
| 1058 |
+
"learning_rate": 8.745292730255147e-07,
|
| 1059 |
+
"loss": 0.5136,
|
| 1060 |
+
"step": 1500
|
| 1061 |
+
},
|
| 1062 |
+
{
|
| 1063 |
+
"epoch": 0.6196142798522773,
|
| 1064 |
+
"grad_norm": 82.35998800049703,
|
| 1065 |
+
"learning_rate": 8.721470031755717e-07,
|
| 1066 |
+
"loss": 0.5048,
|
| 1067 |
+
"step": 1510
|
| 1068 |
+
},
|
| 1069 |
+
{
|
| 1070 |
+
"epoch": 0.6237176856791137,
|
| 1071 |
+
"grad_norm": 44.30584048064861,
|
| 1072 |
+
"learning_rate": 8.697456402822196e-07,
|
| 1073 |
+
"loss": 0.5184,
|
| 1074 |
+
"step": 1520
|
| 1075 |
+
},
|
| 1076 |
+
{
|
| 1077 |
+
"epoch": 0.6278210915059499,
|
| 1078 |
+
"grad_norm": 62.40599570999164,
|
| 1079 |
+
"learning_rate": 8.673253075476484e-07,
|
| 1080 |
+
"loss": 0.5099,
|
| 1081 |
+
"step": 1530
|
| 1082 |
+
},
|
| 1083 |
+
{
|
| 1084 |
+
"epoch": 0.6319244973327862,
|
| 1085 |
+
"grad_norm": 60.71284275388492,
|
| 1086 |
+
"learning_rate": 8.648861291472984e-07,
|
| 1087 |
+
"loss": 0.5517,
|
| 1088 |
+
"step": 1540
|
| 1089 |
+
},
|
| 1090 |
+
{
|
| 1091 |
+
"epoch": 0.6360279031596224,
|
| 1092 |
+
"grad_norm": 355.8101185616839,
|
| 1093 |
+
"learning_rate": 8.62428230223489e-07,
|
| 1094 |
+
"loss": 0.5185,
|
| 1095 |
+
"step": 1550
|
| 1096 |
+
},
|
| 1097 |
+
{
|
| 1098 |
+
"epoch": 0.6401313089864588,
|
| 1099 |
+
"grad_norm": 141.46641670795717,
|
| 1100 |
+
"learning_rate": 8.599517368789979e-07,
|
| 1101 |
+
"loss": 0.5208,
|
| 1102 |
+
"step": 1560
|
| 1103 |
+
},
|
| 1104 |
+
{
|
| 1105 |
+
"epoch": 0.6442347148132951,
|
| 1106 |
+
"grad_norm": 55.150345964187686,
|
| 1107 |
+
"learning_rate": 8.574567761705918e-07,
|
| 1108 |
+
"loss": 0.4841,
|
| 1109 |
+
"step": 1570
|
| 1110 |
+
},
|
| 1111 |
+
{
|
| 1112 |
+
"epoch": 0.6483381206401313,
|
| 1113 |
+
"grad_norm": 90.94262528229893,
|
| 1114 |
+
"learning_rate": 8.549434761025074e-07,
|
| 1115 |
+
"loss": 0.4908,
|
| 1116 |
+
"step": 1580
|
| 1117 |
+
},
|
| 1118 |
+
{
|
| 1119 |
+
"epoch": 0.6524415264669676,
|
| 1120 |
+
"grad_norm": 66.88215827202998,
|
| 1121 |
+
"learning_rate": 8.524119656198844e-07,
|
| 1122 |
+
"loss": 0.5285,
|
| 1123 |
+
"step": 1590
|
| 1124 |
+
},
|
| 1125 |
+
{
|
| 1126 |
+
"epoch": 0.6565449322938038,
|
| 1127 |
+
"grad_norm": 152.0217876169289,
|
| 1128 |
+
"learning_rate": 8.498623746021497e-07,
|
| 1129 |
+
"loss": 0.5628,
|
| 1130 |
+
"step": 1600
|
| 1131 |
+
},
|
| 1132 |
+
{
|
| 1133 |
+
"epoch": 0.6606483381206402,
|
| 1134 |
+
"grad_norm": 31.93599834235815,
|
| 1135 |
+
"learning_rate": 8.472948338563544e-07,
|
| 1136 |
+
"loss": 0.5346,
|
| 1137 |
+
"step": 1610
|
| 1138 |
+
},
|
| 1139 |
+
{
|
| 1140 |
+
"epoch": 0.6647517439474764,
|
| 1141 |
+
"grad_norm": 103.13795670567646,
|
| 1142 |
+
"learning_rate": 8.44709475110462e-07,
|
| 1143 |
+
"loss": 0.519,
|
| 1144 |
+
"step": 1620
|
| 1145 |
+
},
|
| 1146 |
+
{
|
| 1147 |
+
"epoch": 0.6688551497743127,
|
| 1148 |
+
"grad_norm": 73.13316500710991,
|
| 1149 |
+
"learning_rate": 8.421064310065907e-07,
|
| 1150 |
+
"loss": 0.5201,
|
| 1151 |
+
"step": 1630
|
| 1152 |
+
},
|
| 1153 |
+
{
|
| 1154 |
+
"epoch": 0.6729585556011489,
|
| 1155 |
+
"grad_norm": 87.53036146795704,
|
| 1156 |
+
"learning_rate": 8.39485835094208e-07,
|
| 1157 |
+
"loss": 0.4742,
|
| 1158 |
+
"step": 1640
|
| 1159 |
+
},
|
| 1160 |
+
{
|
| 1161 |
+
"epoch": 0.6770619614279852,
|
| 1162 |
+
"grad_norm": 119.36149309228475,
|
| 1163 |
+
"learning_rate": 8.368478218232787e-07,
|
| 1164 |
+
"loss": 0.5373,
|
| 1165 |
+
"step": 1650
|
| 1166 |
+
},
|
| 1167 |
+
{
|
| 1168 |
+
"epoch": 0.6811653672548215,
|
| 1169 |
+
"grad_norm": 30.015490296841644,
|
| 1170 |
+
"learning_rate": 8.34192526537367e-07,
|
| 1171 |
+
"loss": 0.5367,
|
| 1172 |
+
"step": 1660
|
| 1173 |
+
},
|
| 1174 |
+
{
|
| 1175 |
+
"epoch": 0.6852687730816578,
|
| 1176 |
+
"grad_norm": 92.96683037962437,
|
| 1177 |
+
"learning_rate": 8.315200854666935e-07,
|
| 1178 |
+
"loss": 0.5031,
|
| 1179 |
+
"step": 1670
|
| 1180 |
+
},
|
| 1181 |
+
{
|
| 1182 |
+
"epoch": 0.689372178908494,
|
| 1183 |
+
"grad_norm": 1670.52799508349,
|
| 1184 |
+
"learning_rate": 8.288306357211443e-07,
|
| 1185 |
+
"loss": 0.5293,
|
| 1186 |
+
"step": 1680
|
| 1187 |
+
},
|
| 1188 |
+
{
|
| 1189 |
+
"epoch": 0.6934755847353303,
|
| 1190 |
+
"grad_norm": 87.88293348774992,
|
| 1191 |
+
"learning_rate": 8.261243152832381e-07,
|
| 1192 |
+
"loss": 0.4854,
|
| 1193 |
+
"step": 1690
|
| 1194 |
+
},
|
| 1195 |
+
{
|
| 1196 |
+
"epoch": 0.6975789905621665,
|
| 1197 |
+
"grad_norm": 53.16300598636501,
|
| 1198 |
+
"learning_rate": 8.23401263001046e-07,
|
| 1199 |
+
"loss": 0.5718,
|
| 1200 |
+
"step": 1700
|
| 1201 |
+
},
|
| 1202 |
+
{
|
| 1203 |
+
"epoch": 0.7016823963890029,
|
| 1204 |
+
"grad_norm": 28.225221453809066,
|
| 1205 |
+
"learning_rate": 8.206616185810686e-07,
|
| 1206 |
+
"loss": 0.5034,
|
| 1207 |
+
"step": 1710
|
| 1208 |
+
},
|
| 1209 |
+
{
|
| 1210 |
+
"epoch": 0.7057858022158392,
|
| 1211 |
+
"grad_norm": 54.47910813482222,
|
| 1212 |
+
"learning_rate": 8.179055225810673e-07,
|
| 1213 |
+
"loss": 0.5081,
|
| 1214 |
+
"step": 1720
|
| 1215 |
+
},
|
| 1216 |
+
{
|
| 1217 |
+
"epoch": 0.7098892080426754,
|
| 1218 |
+
"grad_norm": 44.67295873538953,
|
| 1219 |
+
"learning_rate": 8.151331164028543e-07,
|
| 1220 |
+
"loss": 0.4858,
|
| 1221 |
+
"step": 1730
|
| 1222 |
+
},
|
| 1223 |
+
{
|
| 1224 |
+
"epoch": 0.7139926138695117,
|
| 1225 |
+
"grad_norm": 72.87899176013894,
|
| 1226 |
+
"learning_rate": 8.123445422850371e-07,
|
| 1227 |
+
"loss": 0.4902,
|
| 1228 |
+
"step": 1740
|
| 1229 |
+
},
|
| 1230 |
+
{
|
| 1231 |
+
"epoch": 0.718096019696348,
|
| 1232 |
+
"grad_norm": 62.54101071531307,
|
| 1233 |
+
"learning_rate": 8.095399432957212e-07,
|
| 1234 |
+
"loss": 0.4929,
|
| 1235 |
+
"step": 1750
|
| 1236 |
+
},
|
| 1237 |
+
{
|
| 1238 |
+
"epoch": 0.7221994255231843,
|
| 1239 |
+
"grad_norm": 107.93869550679347,
|
| 1240 |
+
"learning_rate": 8.067194633251697e-07,
|
| 1241 |
+
"loss": 0.5355,
|
| 1242 |
+
"step": 1760
|
| 1243 |
+
},
|
| 1244 |
+
{
|
| 1245 |
+
"epoch": 0.7263028313500205,
|
| 1246 |
+
"grad_norm": 29.14635491442866,
|
| 1247 |
+
"learning_rate": 8.038832470784211e-07,
|
| 1248 |
+
"loss": 0.4859,
|
| 1249 |
+
"step": 1770
|
| 1250 |
+
},
|
| 1251 |
+
{
|
| 1252 |
+
"epoch": 0.7304062371768568,
|
| 1253 |
+
"grad_norm": 90.61363191956133,
|
| 1254 |
+
"learning_rate": 8.010314400678661e-07,
|
| 1255 |
+
"loss": 0.5418,
|
| 1256 |
+
"step": 1780
|
| 1257 |
+
},
|
| 1258 |
+
{
|
| 1259 |
+
"epoch": 0.734509643003693,
|
| 1260 |
+
"grad_norm": 43.9515804436771,
|
| 1261 |
+
"learning_rate": 7.981641886057805e-07,
|
| 1262 |
+
"loss": 0.4936,
|
| 1263 |
+
"step": 1790
|
| 1264 |
+
},
|
| 1265 |
+
{
|
| 1266 |
+
"epoch": 0.7386130488305294,
|
| 1267 |
+
"grad_norm": 169.58607329853962,
|
| 1268 |
+
"learning_rate": 7.952816397968194e-07,
|
| 1269 |
+
"loss": 0.4927,
|
| 1270 |
+
"step": 1800
|
| 1271 |
+
},
|
| 1272 |
+
{
|
| 1273 |
+
"epoch": 0.7427164546573656,
|
| 1274 |
+
"grad_norm": 388.3806356888841,
|
| 1275 |
+
"learning_rate": 7.923839415304705e-07,
|
| 1276 |
+
"loss": 0.4983,
|
| 1277 |
+
"step": 1810
|
| 1278 |
+
},
|
| 1279 |
+
{
|
| 1280 |
+
"epoch": 0.7468198604842019,
|
| 1281 |
+
"grad_norm": 33.716184621539206,
|
| 1282 |
+
"learning_rate": 7.894712424734656e-07,
|
| 1283 |
+
"loss": 0.5284,
|
| 1284 |
+
"step": 1820
|
| 1285 |
+
},
|
| 1286 |
+
{
|
| 1287 |
+
"epoch": 0.7509232663110381,
|
| 1288 |
+
"grad_norm": 35.473283439543806,
|
| 1289 |
+
"learning_rate": 7.86543692062154e-07,
|
| 1290 |
+
"loss": 0.513,
|
| 1291 |
+
"step": 1830
|
| 1292 |
+
},
|
| 1293 |
+
{
|
| 1294 |
+
"epoch": 0.7550266721378744,
|
| 1295 |
+
"grad_norm": 131.48742437608408,
|
| 1296 |
+
"learning_rate": 7.836014404948352e-07,
|
| 1297 |
+
"loss": 0.4973,
|
| 1298 |
+
"step": 1840
|
| 1299 |
+
},
|
| 1300 |
+
{
|
| 1301 |
+
"epoch": 0.7591300779647107,
|
| 1302 |
+
"grad_norm": 50.496115394339235,
|
| 1303 |
+
"learning_rate": 7.806446387240535e-07,
|
| 1304 |
+
"loss": 0.5059,
|
| 1305 |
+
"step": 1850
|
| 1306 |
+
},
|
| 1307 |
+
{
|
| 1308 |
+
"epoch": 0.763233483791547,
|
| 1309 |
+
"grad_norm": 212.63276048301475,
|
| 1310 |
+
"learning_rate": 7.776734384488528e-07,
|
| 1311 |
+
"loss": 0.4838,
|
| 1312 |
+
"step": 1860
|
| 1313 |
+
},
|
| 1314 |
+
{
|
| 1315 |
+
"epoch": 0.7673368896183833,
|
| 1316 |
+
"grad_norm": 180.8010454534307,
|
| 1317 |
+
"learning_rate": 7.746879921069936e-07,
|
| 1318 |
+
"loss": 0.5423,
|
| 1319 |
+
"step": 1870
|
| 1320 |
+
},
|
| 1321 |
+
{
|
| 1322 |
+
"epoch": 0.7714402954452195,
|
| 1323 |
+
"grad_norm": 65.31821689528813,
|
| 1324 |
+
"learning_rate": 7.716884528671328e-07,
|
| 1325 |
+
"loss": 0.499,
|
| 1326 |
+
"step": 1880
|
| 1327 |
+
},
|
| 1328 |
+
{
|
| 1329 |
+
"epoch": 0.7755437012720559,
|
| 1330 |
+
"grad_norm": 52.45249154808878,
|
| 1331 |
+
"learning_rate": 7.686749746209648e-07,
|
| 1332 |
+
"loss": 0.5326,
|
| 1333 |
+
"step": 1890
|
| 1334 |
+
},
|
| 1335 |
+
{
|
| 1336 |
+
"epoch": 0.7796471070988921,
|
| 1337 |
+
"grad_norm": 190.79316097565297,
|
| 1338 |
+
"learning_rate": 7.656477119753267e-07,
|
| 1339 |
+
"loss": 0.5079,
|
| 1340 |
+
"step": 1900
|
| 1341 |
+
},
|
| 1342 |
+
{
|
| 1343 |
+
"epoch": 0.7837505129257284,
|
| 1344 |
+
"grad_norm": 61.207259875432975,
|
| 1345 |
+
"learning_rate": 7.626068202442648e-07,
|
| 1346 |
+
"loss": 0.5099,
|
| 1347 |
+
"step": 1910
|
| 1348 |
+
},
|
| 1349 |
+
{
|
| 1350 |
+
"epoch": 0.7878539187525646,
|
| 1351 |
+
"grad_norm": 34.48470324227231,
|
| 1352 |
+
"learning_rate": 7.595524554410684e-07,
|
| 1353 |
+
"loss": 0.5085,
|
| 1354 |
+
"step": 1920
|
| 1355 |
+
},
|
| 1356 |
+
{
|
| 1357 |
+
"epoch": 0.7919573245794009,
|
| 1358 |
+
"grad_norm": 45.0397385179801,
|
| 1359 |
+
"learning_rate": 7.564847742702631e-07,
|
| 1360 |
+
"loss": 0.4627,
|
| 1361 |
+
"step": 1930
|
| 1362 |
+
},
|
| 1363 |
+
{
|
| 1364 |
+
"epoch": 0.7960607304062371,
|
| 1365 |
+
"grad_norm": 214.2080888975289,
|
| 1366 |
+
"learning_rate": 7.534039341195729e-07,
|
| 1367 |
+
"loss": 0.5067,
|
| 1368 |
+
"step": 1940
|
| 1369 |
+
},
|
| 1370 |
+
{
|
| 1371 |
+
"epoch": 0.8001641362330735,
|
| 1372 |
+
"grad_norm": 3097.417119917752,
|
| 1373 |
+
"learning_rate": 7.503100930518447e-07,
|
| 1374 |
+
"loss": 0.4911,
|
| 1375 |
+
"step": 1950
|
| 1376 |
+
},
|
| 1377 |
+
{
|
| 1378 |
+
"epoch": 0.8042675420599097,
|
| 1379 |
+
"grad_norm": 287.7565569292002,
|
| 1380 |
+
"learning_rate": 7.472034097969386e-07,
|
| 1381 |
+
"loss": 0.518,
|
| 1382 |
+
"step": 1960
|
| 1383 |
+
},
|
| 1384 |
+
{
|
| 1385 |
+
"epoch": 0.808370947886746,
|
| 1386 |
+
"grad_norm": 108.35304022745282,
|
| 1387 |
+
"learning_rate": 7.440840437435845e-07,
|
| 1388 |
+
"loss": 0.5078,
|
| 1389 |
+
"step": 1970
|
| 1390 |
+
},
|
| 1391 |
+
{
|
| 1392 |
+
"epoch": 0.8124743537135822,
|
| 1393 |
+
"grad_norm": 517.0565016600559,
|
| 1394 |
+
"learning_rate": 7.409521549312052e-07,
|
| 1395 |
+
"loss": 0.4784,
|
| 1396 |
+
"step": 1980
|
| 1397 |
+
},
|
| 1398 |
+
{
|
| 1399 |
+
"epoch": 0.8165777595404186,
|
| 1400 |
+
"grad_norm": 113.14957633358502,
|
| 1401 |
+
"learning_rate": 7.378079040417049e-07,
|
| 1402 |
+
"loss": 0.4796,
|
| 1403 |
+
"step": 1990
|
| 1404 |
+
},
|
| 1405 |
+
{
|
| 1406 |
+
"epoch": 0.8206811653672548,
|
| 1407 |
+
"grad_norm": 8154.809824038889,
|
| 1408 |
+
"learning_rate": 7.346514523912249e-07,
|
| 1409 |
+
"loss": 0.4734,
|
| 1410 |
+
"step": 2000
|
| 1411 |
+
},
|
| 1412 |
+
{
|
| 1413 |
+
"epoch": 0.8247845711940911,
|
| 1414 |
+
"grad_norm": 30.547532323989287,
|
| 1415 |
+
"learning_rate": 7.314829619218688e-07,
|
| 1416 |
+
"loss": 0.4783,
|
| 1417 |
+
"step": 2010
|
| 1418 |
+
},
|
| 1419 |
+
{
|
| 1420 |
+
"epoch": 0.8288879770209274,
|
| 1421 |
+
"grad_norm": 72.73073040083575,
|
| 1422 |
+
"learning_rate": 7.283025951933921e-07,
|
| 1423 |
+
"loss": 0.4581,
|
| 1424 |
+
"step": 2020
|
| 1425 |
+
},
|
| 1426 |
+
{
|
| 1427 |
+
"epoch": 0.8329913828477636,
|
| 1428 |
+
"grad_norm": 54.08665939596142,
|
| 1429 |
+
"learning_rate": 7.251105153748645e-07,
|
| 1430 |
+
"loss": 0.5047,
|
| 1431 |
+
"step": 2030
|
| 1432 |
+
},
|
| 1433 |
+
{
|
| 1434 |
+
"epoch": 0.8370947886746,
|
| 1435 |
+
"grad_norm": 104.12329817825308,
|
| 1436 |
+
"learning_rate": 7.219068862362956e-07,
|
| 1437 |
+
"loss": 0.4845,
|
| 1438 |
+
"step": 2040
|
| 1439 |
+
},
|
| 1440 |
+
{
|
| 1441 |
+
"epoch": 0.8411981945014362,
|
| 1442 |
+
"grad_norm": 43.10674676733033,
|
| 1443 |
+
"learning_rate": 7.186918721402353e-07,
|
| 1444 |
+
"loss": 0.5003,
|
| 1445 |
+
"step": 2050
|
| 1446 |
+
},
|
| 1447 |
+
{
|
| 1448 |
+
"epoch": 0.8453016003282725,
|
| 1449 |
+
"grad_norm": 52.06049882009285,
|
| 1450 |
+
"learning_rate": 7.154656380333393e-07,
|
| 1451 |
+
"loss": 0.4843,
|
| 1452 |
+
"step": 2060
|
| 1453 |
+
},
|
| 1454 |
+
{
|
| 1455 |
+
"epoch": 0.8494050061551087,
|
| 1456 |
+
"grad_norm": 55.29467618372937,
|
| 1457 |
+
"learning_rate": 7.122283494379075e-07,
|
| 1458 |
+
"loss": 0.499,
|
| 1459 |
+
"step": 2070
|
| 1460 |
+
},
|
| 1461 |
+
{
|
| 1462 |
+
"epoch": 0.853508411981945,
|
| 1463 |
+
"grad_norm": 64.95190067326928,
|
| 1464 |
+
"learning_rate": 7.089801724433917e-07,
|
| 1465 |
+
"loss": 0.4805,
|
| 1466 |
+
"step": 2080
|
| 1467 |
+
},
|
| 1468 |
+
{
|
| 1469 |
+
"epoch": 0.8576118178087813,
|
| 1470 |
+
"grad_norm": 161.8237435057531,
|
| 1471 |
+
"learning_rate": 7.057212736978738e-07,
|
| 1472 |
+
"loss": 0.4984,
|
| 1473 |
+
"step": 2090
|
| 1474 |
+
},
|
| 1475 |
+
{
|
| 1476 |
+
"epoch": 0.8617152236356176,
|
| 1477 |
+
"grad_norm": 878.1477694196799,
|
| 1478 |
+
"learning_rate": 7.024518203995169e-07,
|
| 1479 |
+
"loss": 0.4783,
|
| 1480 |
+
"step": 2100
|
| 1481 |
+
},
|
| 1482 |
+
{
|
| 1483 |
+
"epoch": 0.8658186294624538,
|
| 1484 |
+
"grad_norm": 82.9457850293471,
|
| 1485 |
+
"learning_rate": 6.99171980287986e-07,
|
| 1486 |
+
"loss": 0.4926,
|
| 1487 |
+
"step": 2110
|
| 1488 |
+
},
|
| 1489 |
+
{
|
| 1490 |
+
"epoch": 0.8699220352892901,
|
| 1491 |
+
"grad_norm": 117.340558840869,
|
| 1492 |
+
"learning_rate": 6.958819216358431e-07,
|
| 1493 |
+
"loss": 0.4978,
|
| 1494 |
+
"step": 2120
|
| 1495 |
+
},
|
| 1496 |
+
{
|
| 1497 |
+
"epoch": 0.8740254411161263,
|
| 1498 |
+
"grad_norm": 122.53128079108278,
|
| 1499 |
+
"learning_rate": 6.925818132399135e-07,
|
| 1500 |
+
"loss": 0.4995,
|
| 1501 |
+
"step": 2130
|
| 1502 |
+
},
|
| 1503 |
+
{
|
| 1504 |
+
"epoch": 0.8781288469429627,
|
| 1505 |
+
"grad_norm": 55.174594046663756,
|
| 1506 |
+
"learning_rate": 6.892718244126255e-07,
|
| 1507 |
+
"loss": 0.4804,
|
| 1508 |
+
"step": 2140
|
| 1509 |
+
},
|
| 1510 |
+
{
|
| 1511 |
+
"epoch": 0.8822322527697989,
|
| 1512 |
+
"grad_norm": 227.3820495002659,
|
| 1513 |
+
"learning_rate": 6.859521249733247e-07,
|
| 1514 |
+
"loss": 0.4886,
|
| 1515 |
+
"step": 2150
|
| 1516 |
+
},
|
| 1517 |
+
{
|
| 1518 |
+
"epoch": 0.8863356585966352,
|
| 1519 |
+
"grad_norm": 200.57767145843792,
|
| 1520 |
+
"learning_rate": 6.826228852395595e-07,
|
| 1521 |
+
"loss": 0.4664,
|
| 1522 |
+
"step": 2160
|
| 1523 |
+
},
|
| 1524 |
+
{
|
| 1525 |
+
"epoch": 0.8904390644234714,
|
| 1526 |
+
"grad_norm": 35.10895092891728,
|
| 1527 |
+
"learning_rate": 6.792842760183454e-07,
|
| 1528 |
+
"loss": 0.5123,
|
| 1529 |
+
"step": 2170
|
| 1530 |
+
},
|
| 1531 |
+
{
|
| 1532 |
+
"epoch": 0.8945424702503078,
|
| 1533 |
+
"grad_norm": 29.853326640241402,
|
| 1534 |
+
"learning_rate": 6.759364685973998e-07,
|
| 1535 |
+
"loss": 0.5317,
|
| 1536 |
+
"step": 2180
|
| 1537 |
+
},
|
| 1538 |
+
{
|
| 1539 |
+
"epoch": 0.8986458760771441,
|
| 1540 |
+
"grad_norm": 70.38539400256022,
|
| 1541 |
+
"learning_rate": 6.725796347363553e-07,
|
| 1542 |
+
"loss": 0.4825,
|
| 1543 |
+
"step": 2190
|
| 1544 |
+
},
|
| 1545 |
+
{
|
| 1546 |
+
"epoch": 0.9027492819039803,
|
| 1547 |
+
"grad_norm": 276.9277795864686,
|
| 1548 |
+
"learning_rate": 6.692139466579463e-07,
|
| 1549 |
+
"loss": 0.4977,
|
| 1550 |
+
"step": 2200
|
| 1551 |
+
},
|
| 1552 |
+
{
|
| 1553 |
+
"epoch": 0.9068526877308166,
|
| 1554 |
+
"grad_norm": 83.4189985945986,
|
| 1555 |
+
"learning_rate": 6.658395770391744e-07,
|
| 1556 |
+
"loss": 0.4765,
|
| 1557 |
+
"step": 2210
|
| 1558 |
+
},
|
| 1559 |
+
{
|
| 1560 |
+
"epoch": 0.9109560935576528,
|
| 1561 |
+
"grad_norm": 48.34539370326077,
|
| 1562 |
+
"learning_rate": 6.624566990024483e-07,
|
| 1563 |
+
"loss": 0.4598,
|
| 1564 |
+
"step": 2220
|
| 1565 |
+
},
|
| 1566 |
+
{
|
| 1567 |
+
"epoch": 0.9150594993844892,
|
| 1568 |
+
"grad_norm": 141.3386273568753,
|
| 1569 |
+
"learning_rate": 6.590654861067022e-07,
|
| 1570 |
+
"loss": 0.4683,
|
| 1571 |
+
"step": 2230
|
| 1572 |
+
},
|
| 1573 |
+
{
|
| 1574 |
+
"epoch": 0.9191629052113254,
|
| 1575 |
+
"grad_norm": 63.74077840579858,
|
| 1576 |
+
"learning_rate": 6.556661123384908e-07,
|
| 1577 |
+
"loss": 0.5042,
|
| 1578 |
+
"step": 2240
|
| 1579 |
+
},
|
| 1580 |
+
{
|
| 1581 |
+
"epoch": 0.9232663110381617,
|
| 1582 |
+
"grad_norm": 47.16998795737747,
|
| 1583 |
+
"learning_rate": 6.522587521030639e-07,
|
| 1584 |
+
"loss": 0.5194,
|
| 1585 |
+
"step": 2250
|
| 1586 |
+
},
|
| 1587 |
+
{
|
| 1588 |
+
"epoch": 0.9273697168649979,
|
| 1589 |
+
"grad_norm": 128.3336490723216,
|
| 1590 |
+
"learning_rate": 6.488435802154174e-07,
|
| 1591 |
+
"loss": 0.5078,
|
| 1592 |
+
"step": 2260
|
| 1593 |
+
},
|
| 1594 |
+
{
|
| 1595 |
+
"epoch": 0.9314731226918342,
|
| 1596 |
+
"grad_norm": 63.272280658186894,
|
| 1597 |
+
"learning_rate": 6.45420771891325e-07,
|
| 1598 |
+
"loss": 0.4952,
|
| 1599 |
+
"step": 2270
|
| 1600 |
+
},
|
| 1601 |
+
{
|
| 1602 |
+
"epoch": 0.9355765285186705,
|
| 1603 |
+
"grad_norm": 43.034529155992175,
|
| 1604 |
+
"learning_rate": 6.419905027383488e-07,
|
| 1605 |
+
"loss": 0.4962,
|
| 1606 |
+
"step": 2280
|
| 1607 |
+
},
|
| 1608 |
+
{
|
| 1609 |
+
"epoch": 0.9396799343455068,
|
| 1610 |
+
"grad_norm": 42.87783988551733,
|
| 1611 |
+
"learning_rate": 6.385529487468291e-07,
|
| 1612 |
+
"loss": 0.4669,
|
| 1613 |
+
"step": 2290
|
| 1614 |
+
},
|
| 1615 |
+
{
|
| 1616 |
+
"epoch": 0.943783340172343,
|
| 1617 |
+
"grad_norm": 136.16078104872184,
|
| 1618 |
+
"learning_rate": 6.351082862808562e-07,
|
| 1619 |
+
"loss": 0.5406,
|
| 1620 |
+
"step": 2300
|
| 1621 |
+
},
|
| 1622 |
+
{
|
| 1623 |
+
"epoch": 0.9478867459991793,
|
| 1624 |
+
"grad_norm": 72.63697693221036,
|
| 1625 |
+
"learning_rate": 6.316566920692213e-07,
|
| 1626 |
+
"loss": 0.4783,
|
| 1627 |
+
"step": 2310
|
| 1628 |
+
},
|
| 1629 |
+
{
|
| 1630 |
+
"epoch": 0.9519901518260155,
|
| 1631 |
+
"grad_norm": 39.5267201076134,
|
| 1632 |
+
"learning_rate": 6.281983431963492e-07,
|
| 1633 |
+
"loss": 0.4796,
|
| 1634 |
+
"step": 2320
|
| 1635 |
+
},
|
| 1636 |
+
{
|
| 1637 |
+
"epoch": 0.9560935576528519,
|
| 1638 |
+
"grad_norm": 69.62000172421212,
|
| 1639 |
+
"learning_rate": 6.247334170932139e-07,
|
| 1640 |
+
"loss": 0.4716,
|
| 1641 |
+
"step": 2330
|
| 1642 |
+
},
|
| 1643 |
+
{
|
| 1644 |
+
"epoch": 0.9601969634796882,
|
| 1645 |
+
"grad_norm": 49.017699184982284,
|
| 1646 |
+
"learning_rate": 6.212620915282347e-07,
|
| 1647 |
+
"loss": 0.5183,
|
| 1648 |
+
"step": 2340
|
| 1649 |
+
},
|
| 1650 |
+
{
|
| 1651 |
+
"epoch": 0.9643003693065244,
|
| 1652 |
+
"grad_norm": 27.9745642852495,
|
| 1653 |
+
"learning_rate": 6.177845445981559e-07,
|
| 1654 |
+
"loss": 0.4569,
|
| 1655 |
+
"step": 2350
|
| 1656 |
+
},
|
| 1657 |
+
{
|
| 1658 |
+
"epoch": 0.9684037751333607,
|
| 1659 |
+
"grad_norm": 424.5373915606079,
|
| 1660 |
+
"learning_rate": 6.143009547189096e-07,
|
| 1661 |
+
"loss": 0.4632,
|
| 1662 |
+
"step": 2360
|
| 1663 |
+
},
|
| 1664 |
+
{
|
| 1665 |
+
"epoch": 0.972507180960197,
|
| 1666 |
+
"grad_norm": 43.203857901814935,
|
| 1667 |
+
"learning_rate": 6.108115006164625e-07,
|
| 1668 |
+
"loss": 0.4633,
|
| 1669 |
+
"step": 2370
|
| 1670 |
+
},
|
| 1671 |
+
{
|
| 1672 |
+
"epoch": 0.9766105867870333,
|
| 1673 |
+
"grad_norm": 1063.5602598551616,
|
| 1674 |
+
"learning_rate": 6.073163613176455e-07,
|
| 1675 |
+
"loss": 0.524,
|
| 1676 |
+
"step": 2380
|
| 1677 |
+
},
|
| 1678 |
+
{
|
| 1679 |
+
"epoch": 0.9807139926138695,
|
| 1680 |
+
"grad_norm": 56.873130255923535,
|
| 1681 |
+
"learning_rate": 6.03815716140969e-07,
|
| 1682 |
+
"loss": 0.4844,
|
| 1683 |
+
"step": 2390
|
| 1684 |
+
},
|
| 1685 |
+
{
|
| 1686 |
+
"epoch": 0.9848173984407058,
|
| 1687 |
+
"grad_norm": 26.75194238771356,
|
| 1688 |
+
"learning_rate": 6.003097446874236e-07,
|
| 1689 |
+
"loss": 0.5168,
|
| 1690 |
+
"step": 2400
|
| 1691 |
+
},
|
| 1692 |
+
{
|
| 1693 |
+
"epoch": 0.988920804267542,
|
| 1694 |
+
"grad_norm": 99.65890371766814,
|
| 1695 |
+
"learning_rate": 5.96798626831265e-07,
|
| 1696 |
+
"loss": 0.4667,
|
| 1697 |
+
"step": 2410
|
| 1698 |
+
},
|
| 1699 |
+
{
|
| 1700 |
+
"epoch": 0.9930242100943784,
|
| 1701 |
+
"grad_norm": 35.483313971969075,
|
| 1702 |
+
"learning_rate": 5.932825427107852e-07,
|
| 1703 |
+
"loss": 0.4716,
|
| 1704 |
+
"step": 2420
|
| 1705 |
+
},
|
| 1706 |
+
{
|
| 1707 |
+
"epoch": 0.9971276159212146,
|
| 1708 |
+
"grad_norm": 18.29167469584004,
|
| 1709 |
+
"learning_rate": 5.897616727190714e-07,
|
| 1710 |
+
"loss": 0.4699,
|
| 1711 |
+
"step": 2430
|
| 1712 |
+
},
|
| 1713 |
+
{
|
| 1714 |
+
"epoch": 1.001231021748051,
|
| 1715 |
+
"grad_norm": 29.50749147147849,
|
| 1716 |
+
"learning_rate": 5.862361974947503e-07,
|
| 1717 |
+
"loss": 0.473,
|
| 1718 |
+
"step": 2440
|
| 1719 |
+
},
|
| 1720 |
+
{
|
| 1721 |
+
"epoch": 1.0053344275748872,
|
| 1722 |
+
"grad_norm": 128.593239377244,
|
| 1723 |
+
"learning_rate": 5.827062979127206e-07,
|
| 1724 |
+
"loss": 0.453,
|
| 1725 |
+
"step": 2450
|
| 1726 |
+
},
|
| 1727 |
+
{
|
| 1728 |
+
"epoch": 1.0094378334017233,
|
| 1729 |
+
"grad_norm": 26.91950443411517,
|
| 1730 |
+
"learning_rate": 5.791721550748732e-07,
|
| 1731 |
+
"loss": 0.4411,
|
| 1732 |
+
"step": 2460
|
| 1733 |
+
},
|
| 1734 |
+
{
|
| 1735 |
+
"epoch": 1.0135412392285597,
|
| 1736 |
+
"grad_norm": 51.61969150697206,
|
| 1737 |
+
"learning_rate": 5.756339503007997e-07,
|
| 1738 |
+
"loss": 0.4622,
|
| 1739 |
+
"step": 2470
|
| 1740 |
+
},
|
| 1741 |
+
{
|
| 1742 |
+
"epoch": 1.017644645055396,
|
| 1743 |
+
"grad_norm": 66.31404740096217,
|
| 1744 |
+
"learning_rate": 5.720918651184893e-07,
|
| 1745 |
+
"loss": 0.4579,
|
| 1746 |
+
"step": 2480
|
| 1747 |
+
},
|
| 1748 |
+
{
|
| 1749 |
+
"epoch": 1.0217480508822323,
|
| 1750 |
+
"grad_norm": 21.50652787220231,
|
| 1751 |
+
"learning_rate": 5.685460812550172e-07,
|
| 1752 |
+
"loss": 0.4625,
|
| 1753 |
+
"step": 2490
|
| 1754 |
+
},
|
| 1755 |
+
{
|
| 1756 |
+
"epoch": 1.0258514567090686,
|
| 1757 |
+
"grad_norm": 33.465020137535575,
|
| 1758 |
+
"learning_rate": 5.649967806272184e-07,
|
| 1759 |
+
"loss": 0.4762,
|
| 1760 |
+
"step": 2500
|
| 1761 |
+
},
|
| 1762 |
+
{
|
| 1763 |
+
"epoch": 1.0299548625359047,
|
| 1764 |
+
"grad_norm": 66.72299063261372,
|
| 1765 |
+
"learning_rate": 5.614441453323571e-07,
|
| 1766 |
+
"loss": 0.4552,
|
| 1767 |
+
"step": 2510
|
| 1768 |
+
},
|
| 1769 |
+
{
|
| 1770 |
+
"epoch": 1.034058268362741,
|
| 1771 |
+
"grad_norm": 29.800464855102692,
|
| 1772 |
+
"learning_rate": 5.578883576387821e-07,
|
| 1773 |
+
"loss": 0.4426,
|
| 1774 |
+
"step": 2520
|
| 1775 |
+
},
|
| 1776 |
+
{
|
| 1777 |
+
"epoch": 1.0381616741895774,
|
| 1778 |
+
"grad_norm": 22.83234109850039,
|
| 1779 |
+
"learning_rate": 5.543295999765767e-07,
|
| 1780 |
+
"loss": 0.4075,
|
| 1781 |
+
"step": 2530
|
| 1782 |
+
},
|
| 1783 |
+
{
|
| 1784 |
+
"epoch": 1.0422650800164137,
|
| 1785 |
+
"grad_norm": 1080.7937421416727,
|
| 1786 |
+
"learning_rate": 5.507680549281988e-07,
|
| 1787 |
+
"loss": 0.419,
|
| 1788 |
+
"step": 2540
|
| 1789 |
+
},
|
| 1790 |
+
{
|
| 1791 |
+
"epoch": 1.0463684858432498,
|
| 1792 |
+
"grad_norm": 117.84583811421234,
|
| 1793 |
+
"learning_rate": 5.472039052191138e-07,
|
| 1794 |
+
"loss": 0.4284,
|
| 1795 |
+
"step": 2550
|
| 1796 |
+
},
|
| 1797 |
+
{
|
| 1798 |
+
"epoch": 1.0504718916700861,
|
| 1799 |
+
"grad_norm": 125.34600535439446,
|
| 1800 |
+
"learning_rate": 5.436373337084184e-07,
|
| 1801 |
+
"loss": 0.4419,
|
| 1802 |
+
"step": 2560
|
| 1803 |
+
},
|
| 1804 |
+
{
|
| 1805 |
+
"epoch": 1.0545752974969225,
|
| 1806 |
+
"grad_norm": 106.21898341497733,
|
| 1807 |
+
"learning_rate": 5.400685233794614e-07,
|
| 1808 |
+
"loss": 0.4572,
|
| 1809 |
+
"step": 2570
|
| 1810 |
+
},
|
| 1811 |
+
{
|
| 1812 |
+
"epoch": 1.0586787033237588,
|
| 1813 |
+
"grad_norm": 26.341005235998665,
|
| 1814 |
+
"learning_rate": 5.364976573304537e-07,
|
| 1815 |
+
"loss": 0.437,
|
| 1816 |
+
"step": 2580
|
| 1817 |
+
},
|
| 1818 |
+
{
|
| 1819 |
+
"epoch": 1.062782109150595,
|
| 1820 |
+
"grad_norm": 19.50719289052091,
|
| 1821 |
+
"learning_rate": 5.329249187650755e-07,
|
| 1822 |
+
"loss": 0.4506,
|
| 1823 |
+
"step": 2590
|
| 1824 |
+
},
|
| 1825 |
+
{
|
| 1826 |
+
"epoch": 1.0668855149774312,
|
| 1827 |
+
"grad_norm": 27.361936584143997,
|
| 1828 |
+
"learning_rate": 5.29350490983076e-07,
|
| 1829 |
+
"loss": 0.45,
|
| 1830 |
+
"step": 2600
|
| 1831 |
+
},
|
| 1832 |
+
{
|
| 1833 |
+
"epoch": 1.0709889208042676,
|
| 1834 |
+
"grad_norm": 79.41424305021904,
|
| 1835 |
+
"learning_rate": 5.257745573708707e-07,
|
| 1836 |
+
"loss": 0.421,
|
| 1837 |
+
"step": 2610
|
| 1838 |
+
},
|
| 1839 |
+
{
|
| 1840 |
+
"epoch": 1.0750923266311039,
|
| 1841 |
+
"grad_norm": 25.446814533115,
|
| 1842 |
+
"learning_rate": 5.221973013921312e-07,
|
| 1843 |
+
"loss": 0.4741,
|
| 1844 |
+
"step": 2620
|
| 1845 |
+
},
|
| 1846 |
+
{
|
| 1847 |
+
"epoch": 1.07919573245794,
|
| 1848 |
+
"grad_norm": 52.09822126657437,
|
| 1849 |
+
"learning_rate": 5.18618906578374e-07,
|
| 1850 |
+
"loss": 0.3984,
|
| 1851 |
+
"step": 2630
|
| 1852 |
+
},
|
| 1853 |
+
{
|
| 1854 |
+
"epoch": 1.0832991382847763,
|
| 1855 |
+
"grad_norm": 22.065581097019006,
|
| 1856 |
+
"learning_rate": 5.150395565195425e-07,
|
| 1857 |
+
"loss": 0.4932,
|
| 1858 |
+
"step": 2640
|
| 1859 |
+
},
|
| 1860 |
+
{
|
| 1861 |
+
"epoch": 1.0874025441116126,
|
| 1862 |
+
"grad_norm": 48.268485532105615,
|
| 1863 |
+
"learning_rate": 5.114594348545905e-07,
|
| 1864 |
+
"loss": 0.4284,
|
| 1865 |
+
"step": 2650
|
| 1866 |
+
},
|
| 1867 |
+
{
|
| 1868 |
+
"epoch": 1.091505949938449,
|
| 1869 |
+
"grad_norm": 49.61144670770068,
|
| 1870 |
+
"learning_rate": 5.078787252620581e-07,
|
| 1871 |
+
"loss": 0.4272,
|
| 1872 |
+
"step": 2660
|
| 1873 |
+
},
|
| 1874 |
+
{
|
| 1875 |
+
"epoch": 1.0956093557652853,
|
| 1876 |
+
"grad_norm": 202.81654016527858,
|
| 1877 |
+
"learning_rate": 5.042976114506495e-07,
|
| 1878 |
+
"loss": 0.4019,
|
| 1879 |
+
"step": 2670
|
| 1880 |
+
},
|
| 1881 |
+
{
|
| 1882 |
+
"epoch": 1.0997127615921214,
|
| 1883 |
+
"grad_norm": 32.369845437622004,
|
| 1884 |
+
"learning_rate": 5.00716277149807e-07,
|
| 1885 |
+
"loss": 0.401,
|
| 1886 |
+
"step": 2680
|
| 1887 |
+
},
|
| 1888 |
+
{
|
| 1889 |
+
"epoch": 1.1038161674189577,
|
| 1890 |
+
"grad_norm": 56.335821775103824,
|
| 1891 |
+
"learning_rate": 4.971349061002856e-07,
|
| 1892 |
+
"loss": 0.4364,
|
| 1893 |
+
"step": 2690
|
| 1894 |
+
},
|
| 1895 |
+
{
|
| 1896 |
+
"epoch": 1.107919573245794,
|
| 1897 |
+
"grad_norm": 70.10248664048731,
|
| 1898 |
+
"learning_rate": 4.935536820447257e-07,
|
| 1899 |
+
"loss": 0.4352,
|
| 1900 |
+
"step": 2700
|
| 1901 |
+
},
|
| 1902 |
+
{
|
| 1903 |
+
"epoch": 1.1120229790726304,
|
| 1904 |
+
"grad_norm": 59.01602798554861,
|
| 1905 |
+
"learning_rate": 4.89972788718226e-07,
|
| 1906 |
+
"loss": 0.4505,
|
| 1907 |
+
"step": 2710
|
| 1908 |
+
},
|
| 1909 |
+
{
|
| 1910 |
+
"epoch": 1.1161263848994665,
|
| 1911 |
+
"grad_norm": 36.57807498415066,
|
| 1912 |
+
"learning_rate": 4.863924098389166e-07,
|
| 1913 |
+
"loss": 0.4531,
|
| 1914 |
+
"step": 2720
|
| 1915 |
+
},
|
| 1916 |
+
{
|
| 1917 |
+
"epoch": 1.1202297907263028,
|
| 1918 |
+
"grad_norm": 41.00326995033849,
|
| 1919 |
+
"learning_rate": 4.828127290985347e-07,
|
| 1920 |
+
"loss": 0.4581,
|
| 1921 |
+
"step": 2730
|
| 1922 |
+
},
|
| 1923 |
+
{
|
| 1924 |
+
"epoch": 1.1243331965531391,
|
| 1925 |
+
"grad_norm": 87.17741977963101,
|
| 1926 |
+
"learning_rate": 4.79233930152999e-07,
|
| 1927 |
+
"loss": 0.4435,
|
| 1928 |
+
"step": 2740
|
| 1929 |
+
},
|
| 1930 |
+
{
|
| 1931 |
+
"epoch": 1.1284366023799755,
|
| 1932 |
+
"grad_norm": 35.767477305978375,
|
| 1933 |
+
"learning_rate": 4.756561966129875e-07,
|
| 1934 |
+
"loss": 0.4177,
|
| 1935 |
+
"step": 2750
|
| 1936 |
+
},
|
| 1937 |
+
{
|
| 1938 |
+
"epoch": 1.1325400082068118,
|
| 1939 |
+
"grad_norm": 33.25922254163945,
|
| 1940 |
+
"learning_rate": 4.7207971203451775e-07,
|
| 1941 |
+
"loss": 0.4462,
|
| 1942 |
+
"step": 2760
|
| 1943 |
+
},
|
| 1944 |
+
{
|
| 1945 |
+
"epoch": 1.1366434140336479,
|
| 1946 |
+
"grad_norm": 92.59072606017334,
|
| 1947 |
+
"learning_rate": 4.6850465990952884e-07,
|
| 1948 |
+
"loss": 0.4215,
|
| 1949 |
+
"step": 2770
|
| 1950 |
+
},
|
| 1951 |
+
{
|
| 1952 |
+
"epoch": 1.1407468198604842,
|
| 1953 |
+
"grad_norm": 49.64346001395328,
|
| 1954 |
+
"learning_rate": 4.6493122365646823e-07,
|
| 1955 |
+
"loss": 0.4161,
|
| 1956 |
+
"step": 2780
|
| 1957 |
+
},
|
| 1958 |
+
{
|
| 1959 |
+
"epoch": 1.1448502256873205,
|
| 1960 |
+
"grad_norm": 36.139372155055646,
|
| 1961 |
+
"learning_rate": 4.6135958661088014e-07,
|
| 1962 |
+
"loss": 0.4236,
|
| 1963 |
+
"step": 2790
|
| 1964 |
+
},
|
| 1965 |
+
{
|
| 1966 |
+
"epoch": 1.1489536315141566,
|
| 1967 |
+
"grad_norm": 74.44177678161755,
|
| 1968 |
+
"learning_rate": 4.5778993201600076e-07,
|
| 1969 |
+
"loss": 0.4417,
|
| 1970 |
+
"step": 2800
|
| 1971 |
+
},
|
| 1972 |
+
{
|
| 1973 |
+
"epoch": 1.153057037340993,
|
| 1974 |
+
"grad_norm": 166.39258126653112,
|
| 1975 |
+
"learning_rate": 4.5422244301335646e-07,
|
| 1976 |
+
"loss": 0.4628,
|
| 1977 |
+
"step": 2810
|
| 1978 |
+
},
|
| 1979 |
+
{
|
| 1980 |
+
"epoch": 1.1571604431678293,
|
| 1981 |
+
"grad_norm": 27.781113599567934,
|
| 1982 |
+
"learning_rate": 4.5065730263336724e-07,
|
| 1983 |
+
"loss": 0.4328,
|
| 1984 |
+
"step": 2820
|
| 1985 |
+
},
|
| 1986 |
+
{
|
| 1987 |
+
"epoch": 1.1612638489946656,
|
| 1988 |
+
"grad_norm": 44.497176768636386,
|
| 1989 |
+
"learning_rate": 4.470946937859571e-07,
|
| 1990 |
+
"loss": 0.4305,
|
| 1991 |
+
"step": 2830
|
| 1992 |
+
},
|
| 1993 |
+
{
|
| 1994 |
+
"epoch": 1.165367254821502,
|
| 1995 |
+
"grad_norm": 25.02848809638582,
|
| 1996 |
+
"learning_rate": 4.4353479925116967e-07,
|
| 1997 |
+
"loss": 0.4403,
|
| 1998 |
+
"step": 2840
|
| 1999 |
+
},
|
| 2000 |
+
{
|
| 2001 |
+
"epoch": 1.169470660648338,
|
| 2002 |
+
"grad_norm": 25.669915207071796,
|
| 2003 |
+
"learning_rate": 4.399778016697896e-07,
|
| 2004 |
+
"loss": 0.4036,
|
| 2005 |
+
"step": 2850
|
| 2006 |
+
},
|
| 2007 |
+
{
|
| 2008 |
+
"epoch": 1.1735740664751744,
|
| 2009 |
+
"grad_norm": 113.8335252288857,
|
| 2010 |
+
"learning_rate": 4.364238835339743e-07,
|
| 2011 |
+
"loss": 0.4524,
|
| 2012 |
+
"step": 2860
|
| 2013 |
+
},
|
| 2014 |
+
{
|
| 2015 |
+
"epoch": 1.1776774723020107,
|
| 2016 |
+
"grad_norm": 66.36304848407511,
|
| 2017 |
+
"learning_rate": 4.3287322717788877e-07,
|
| 2018 |
+
"loss": 0.4047,
|
| 2019 |
+
"step": 2870
|
| 2020 |
+
},
|
| 2021 |
+
{
|
| 2022 |
+
"epoch": 1.181780878128847,
|
| 2023 |
+
"grad_norm": 23.031231966474053,
|
| 2024 |
+
"learning_rate": 4.2932601476835247e-07,
|
| 2025 |
+
"loss": 0.426,
|
| 2026 |
+
"step": 2880
|
| 2027 |
+
},
|
| 2028 |
+
{
|
| 2029 |
+
"epoch": 1.1858842839556831,
|
| 2030 |
+
"grad_norm": 24.039386562708366,
|
| 2031 |
+
"learning_rate": 4.2578242829549307e-07,
|
| 2032 |
+
"loss": 0.4413,
|
| 2033 |
+
"step": 2890
|
| 2034 |
+
},
|
| 2035 |
+
{
|
| 2036 |
+
"epoch": 1.1899876897825195,
|
| 2037 |
+
"grad_norm": 811.7295967570682,
|
| 2038 |
+
"learning_rate": 4.222426495634086e-07,
|
| 2039 |
+
"loss": 0.4538,
|
| 2040 |
+
"step": 2900
|
| 2041 |
+
},
|
| 2042 |
+
{
|
| 2043 |
+
"epoch": 1.1940910956093558,
|
| 2044 |
+
"grad_norm": 190.53953412337876,
|
| 2045 |
+
"learning_rate": 4.187068601808408e-07,
|
| 2046 |
+
"loss": 0.4255,
|
| 2047 |
+
"step": 2910
|
| 2048 |
+
},
|
| 2049 |
+
{
|
| 2050 |
+
"epoch": 1.198194501436192,
|
| 2051 |
+
"grad_norm": 41.34275154292018,
|
| 2052 |
+
"learning_rate": 4.1517524155185767e-07,
|
| 2053 |
+
"loss": 0.386,
|
| 2054 |
+
"step": 2920
|
| 2055 |
+
},
|
| 2056 |
+
{
|
| 2057 |
+
"epoch": 1.2022979072630284,
|
| 2058 |
+
"grad_norm": 142.13166156654108,
|
| 2059 |
+
"learning_rate": 4.116479748665457e-07,
|
| 2060 |
+
"loss": 0.4446,
|
| 2061 |
+
"step": 2930
|
| 2062 |
+
},
|
| 2063 |
+
{
|
| 2064 |
+
"epoch": 1.2064013130898645,
|
| 2065 |
+
"grad_norm": 153.14012713197417,
|
| 2066 |
+
"learning_rate": 4.0812524109171475e-07,
|
| 2067 |
+
"loss": 0.4226,
|
| 2068 |
+
"step": 2940
|
| 2069 |
+
},
|
| 2070 |
+
{
|
| 2071 |
+
"epoch": 1.2105047189167009,
|
| 2072 |
+
"grad_norm": 62.45909326404296,
|
| 2073 |
+
"learning_rate": 4.0460722096161337e-07,
|
| 2074 |
+
"loss": 0.4596,
|
| 2075 |
+
"step": 2950
|
| 2076 |
+
},
|
| 2077 |
+
{
|
| 2078 |
+
"epoch": 1.2146081247435372,
|
| 2079 |
+
"grad_norm": 23.77290733458986,
|
| 2080 |
+
"learning_rate": 4.0109409496865537e-07,
|
| 2081 |
+
"loss": 0.4225,
|
| 2082 |
+
"step": 2960
|
| 2083 |
+
},
|
| 2084 |
+
{
|
| 2085 |
+
"epoch": 1.2187115305703733,
|
| 2086 |
+
"grad_norm": 35.1600385390596,
|
| 2087 |
+
"learning_rate": 3.9758604335416125e-07,
|
| 2088 |
+
"loss": 0.4413,
|
| 2089 |
+
"step": 2970
|
| 2090 |
+
},
|
| 2091 |
+
{
|
| 2092 |
+
"epoch": 1.2228149363972096,
|
| 2093 |
+
"grad_norm": 126.38427103418606,
|
| 2094 |
+
"learning_rate": 3.94083246099109e-07,
|
| 2095 |
+
"loss": 0.4438,
|
| 2096 |
+
"step": 2980
|
| 2097 |
+
},
|
| 2098 |
+
{
|
| 2099 |
+
"epoch": 1.226918342224046,
|
| 2100 |
+
"grad_norm": 60.803658083178604,
|
| 2101 |
+
"learning_rate": 3.905858829149017e-07,
|
| 2102 |
+
"loss": 0.4335,
|
| 2103 |
+
"step": 2990
|
| 2104 |
+
},
|
| 2105 |
+
{
|
| 2106 |
+
"epoch": 1.2310217480508823,
|
| 2107 |
+
"grad_norm": 148.96195205668346,
|
| 2108 |
+
"learning_rate": 3.8709413323414703e-07,
|
| 2109 |
+
"loss": 0.443,
|
| 2110 |
+
"step": 3000
|
| 2111 |
+
},
|
| 2112 |
+
{
|
| 2113 |
+
"epoch": 1.2351251538777186,
|
| 2114 |
+
"grad_norm": 77.37949064244499,
|
| 2115 |
+
"learning_rate": 3.8360817620145035e-07,
|
| 2116 |
+
"loss": 0.4594,
|
| 2117 |
+
"step": 3010
|
| 2118 |
+
},
|
| 2119 |
+
{
|
| 2120 |
+
"epoch": 1.2392285597045547,
|
| 2121 |
+
"grad_norm": 21.06404268940335,
|
| 2122 |
+
"learning_rate": 3.801281906642256e-07,
|
| 2123 |
+
"loss": 0.4291,
|
| 2124 |
+
"step": 3020
|
| 2125 |
+
},
|
| 2126 |
+
{
|
| 2127 |
+
"epoch": 1.243331965531391,
|
| 2128 |
+
"grad_norm": 22.119427881262386,
|
| 2129 |
+
"learning_rate": 3.7665435516351796e-07,
|
| 2130 |
+
"loss": 0.4399,
|
| 2131 |
+
"step": 3030
|
| 2132 |
+
},
|
| 2133 |
+
{
|
| 2134 |
+
"epoch": 1.2474353713582274,
|
| 2135 |
+
"grad_norm": 49.96339211294694,
|
| 2136 |
+
"learning_rate": 3.7318684792484395e-07,
|
| 2137 |
+
"loss": 0.4308,
|
| 2138 |
+
"step": 3040
|
| 2139 |
+
},
|
| 2140 |
+
{
|
| 2141 |
+
"epoch": 1.2515387771850637,
|
| 2142 |
+
"grad_norm": 84.3868370495459,
|
| 2143 |
+
"learning_rate": 3.697258468490487e-07,
|
| 2144 |
+
"loss": 0.4643,
|
| 2145 |
+
"step": 3050
|
| 2146 |
+
},
|
| 2147 |
+
{
|
| 2148 |
+
"epoch": 1.2556421830118998,
|
| 2149 |
+
"grad_norm": 71.3661287200837,
|
| 2150 |
+
"learning_rate": 3.662715295031776e-07,
|
| 2151 |
+
"loss": 0.4506,
|
| 2152 |
+
"step": 3060
|
| 2153 |
+
},
|
| 2154 |
+
{
|
| 2155 |
+
"epoch": 1.259745588838736,
|
| 2156 |
+
"grad_norm": 88.78446622278861,
|
| 2157 |
+
"learning_rate": 3.62824073111366e-07,
|
| 2158 |
+
"loss": 0.3933,
|
| 2159 |
+
"step": 3070
|
| 2160 |
+
},
|
| 2161 |
+
{
|
| 2162 |
+
"epoch": 1.2638489946655724,
|
| 2163 |
+
"grad_norm": 102.47635166367932,
|
| 2164 |
+
"learning_rate": 3.5938365454574825e-07,
|
| 2165 |
+
"loss": 0.4548,
|
| 2166 |
+
"step": 3080
|
| 2167 |
+
},
|
| 2168 |
+
{
|
| 2169 |
+
"epoch": 1.2679524004924088,
|
| 2170 |
+
"grad_norm": 56.91445119022746,
|
| 2171 |
+
"learning_rate": 3.559504503173812e-07,
|
| 2172 |
+
"loss": 0.4258,
|
| 2173 |
+
"step": 3090
|
| 2174 |
+
},
|
| 2175 |
+
{
|
| 2176 |
+
"epoch": 1.272055806319245,
|
| 2177 |
+
"grad_norm": 118.22776491838059,
|
| 2178 |
+
"learning_rate": 3.525246365671902e-07,
|
| 2179 |
+
"loss": 0.4282,
|
| 2180 |
+
"step": 3100
|
| 2181 |
+
},
|
| 2182 |
+
{
|
| 2183 |
+
"epoch": 1.2761592121460812,
|
| 2184 |
+
"grad_norm": 27.724690528630045,
|
| 2185 |
+
"learning_rate": 3.491063890569309e-07,
|
| 2186 |
+
"loss": 0.4513,
|
| 2187 |
+
"step": 3110
|
| 2188 |
+
},
|
| 2189 |
+
{
|
| 2190 |
+
"epoch": 1.2802626179729175,
|
| 2191 |
+
"grad_norm": 84.70122869205237,
|
| 2192 |
+
"learning_rate": 3.4569588316017184e-07,
|
| 2193 |
+
"loss": 0.4406,
|
| 2194 |
+
"step": 3120
|
| 2195 |
+
},
|
| 2196 |
+
{
|
| 2197 |
+
"epoch": 1.2843660237997538,
|
| 2198 |
+
"grad_norm": 61.853986552766564,
|
| 2199 |
+
"learning_rate": 3.422932938532979e-07,
|
| 2200 |
+
"loss": 0.4166,
|
| 2201 |
+
"step": 3130
|
| 2202 |
+
},
|
| 2203 |
+
{
|
| 2204 |
+
"epoch": 1.28846942962659,
|
| 2205 |
+
"grad_norm": 31.15716934476649,
|
| 2206 |
+
"learning_rate": 3.388987957065321e-07,
|
| 2207 |
+
"loss": 0.4297,
|
| 2208 |
+
"step": 3140
|
| 2209 |
+
},
|
| 2210 |
+
{
|
| 2211 |
+
"epoch": 1.2925728354534263,
|
| 2212 |
+
"grad_norm": 52.5306996870366,
|
| 2213 |
+
"learning_rate": 3.355125628749793e-07,
|
| 2214 |
+
"loss": 0.445,
|
| 2215 |
+
"step": 3150
|
| 2216 |
+
},
|
| 2217 |
+
{
|
| 2218 |
+
"epoch": 1.2966762412802626,
|
| 2219 |
+
"grad_norm": 62.93608102287494,
|
| 2220 |
+
"learning_rate": 3.321347690896921e-07,
|
| 2221 |
+
"loss": 0.432,
|
| 2222 |
+
"step": 3160
|
| 2223 |
+
},
|
| 2224 |
+
{
|
| 2225 |
+
"epoch": 1.300779647107099,
|
| 2226 |
+
"grad_norm": 15.973984347462183,
|
| 2227 |
+
"learning_rate": 3.287655876487561e-07,
|
| 2228 |
+
"loss": 0.411,
|
| 2229 |
+
"step": 3170
|
| 2230 |
+
},
|
| 2231 |
+
{
|
| 2232 |
+
"epoch": 1.3048830529339353,
|
| 2233 |
+
"grad_norm": 266.48264572318953,
|
| 2234 |
+
"learning_rate": 3.254051914084006e-07,
|
| 2235 |
+
"loss": 0.4937,
|
| 2236 |
+
"step": 3180
|
| 2237 |
+
},
|
| 2238 |
+
{
|
| 2239 |
+
"epoch": 1.3089864587607716,
|
| 2240 |
+
"grad_norm": 310.44434106824707,
|
| 2241 |
+
"learning_rate": 3.220537527741283e-07,
|
| 2242 |
+
"loss": 0.4253,
|
| 2243 |
+
"step": 3190
|
| 2244 |
+
},
|
| 2245 |
+
{
|
| 2246 |
+
"epoch": 1.3130898645876077,
|
| 2247 |
+
"grad_norm": 112.78890445135909,
|
| 2248 |
+
"learning_rate": 3.187114436918711e-07,
|
| 2249 |
+
"loss": 0.4464,
|
| 2250 |
+
"step": 3200
|
| 2251 |
+
},
|
| 2252 |
+
{
|
| 2253 |
+
"epoch": 1.317193270414444,
|
| 2254 |
+
"grad_norm": 28.116255844722104,
|
| 2255 |
+
"learning_rate": 3.1537843563916873e-07,
|
| 2256 |
+
"loss": 0.4455,
|
| 2257 |
+
"step": 3210
|
| 2258 |
+
},
|
| 2259 |
+
{
|
| 2260 |
+
"epoch": 1.3212966762412803,
|
| 2261 |
+
"grad_norm": 74.72169265223825,
|
| 2262 |
+
"learning_rate": 3.120548996163702e-07,
|
| 2263 |
+
"loss": 0.4462,
|
| 2264 |
+
"step": 3220
|
| 2265 |
+
},
|
| 2266 |
+
{
|
| 2267 |
+
"epoch": 1.3254000820681164,
|
| 2268 |
+
"grad_norm": 205.4712244975287,
|
| 2269 |
+
"learning_rate": 3.0874100613786064e-07,
|
| 2270 |
+
"loss": 0.44,
|
| 2271 |
+
"step": 3230
|
| 2272 |
+
},
|
| 2273 |
+
{
|
| 2274 |
+
"epoch": 1.3295034878949528,
|
| 2275 |
+
"grad_norm": 52.54943592136712,
|
| 2276 |
+
"learning_rate": 3.054369252233141e-07,
|
| 2277 |
+
"loss": 0.4088,
|
| 2278 |
+
"step": 3240
|
| 2279 |
+
},
|
| 2280 |
+
{
|
| 2281 |
+
"epoch": 1.333606893721789,
|
| 2282 |
+
"grad_norm": 154.7844667083448,
|
| 2283 |
+
"learning_rate": 3.0214282638896924e-07,
|
| 2284 |
+
"loss": 0.4339,
|
| 2285 |
+
"step": 3250
|
| 2286 |
+
},
|
| 2287 |
+
{
|
| 2288 |
+
"epoch": 1.3377102995486254,
|
| 2289 |
+
"grad_norm": 18.9723387624258,
|
| 2290 |
+
"learning_rate": 2.9885887863893386e-07,
|
| 2291 |
+
"loss": 0.4418,
|
| 2292 |
+
"step": 3260
|
| 2293 |
+
},
|
| 2294 |
+
{
|
| 2295 |
+
"epoch": 1.3418137053754617,
|
| 2296 |
+
"grad_norm": 321.0023153467011,
|
| 2297 |
+
"learning_rate": 2.955852504565122e-07,
|
| 2298 |
+
"loss": 0.4132,
|
| 2299 |
+
"step": 3270
|
| 2300 |
+
},
|
| 2301 |
+
{
|
| 2302 |
+
"epoch": 1.3459171112022978,
|
| 2303 |
+
"grad_norm": 97.71368859137935,
|
| 2304 |
+
"learning_rate": 2.923221097955625e-07,
|
| 2305 |
+
"loss": 0.469,
|
| 2306 |
+
"step": 3280
|
| 2307 |
+
},
|
| 2308 |
+
{
|
| 2309 |
+
"epoch": 1.3500205170291342,
|
| 2310 |
+
"grad_norm": 144.0089889661342,
|
| 2311 |
+
"learning_rate": 2.890696240718798e-07,
|
| 2312 |
+
"loss": 0.4514,
|
| 2313 |
+
"step": 3290
|
| 2314 |
+
},
|
| 2315 |
+
{
|
| 2316 |
+
"epoch": 1.3541239228559705,
|
| 2317 |
+
"grad_norm": 29.068490887008544,
|
| 2318 |
+
"learning_rate": 2.858279601546059e-07,
|
| 2319 |
+
"loss": 0.453,
|
| 2320 |
+
"step": 3300
|
| 2321 |
+
},
|
| 2322 |
+
{
|
| 2323 |
+
"epoch": 1.3582273286828066,
|
| 2324 |
+
"grad_norm": 83.43025317867443,
|
| 2325 |
+
"learning_rate": 2.825972843576685e-07,
|
| 2326 |
+
"loss": 0.4114,
|
| 2327 |
+
"step": 3310
|
| 2328 |
+
},
|
| 2329 |
+
{
|
| 2330 |
+
"epoch": 1.362330734509643,
|
| 2331 |
+
"grad_norm": 76.68603800033713,
|
| 2332 |
+
"learning_rate": 2.7937776243124934e-07,
|
| 2333 |
+
"loss": 0.4359,
|
| 2334 |
+
"step": 3320
|
| 2335 |
+
},
|
| 2336 |
+
{
|
| 2337 |
+
"epoch": 1.3664341403364793,
|
| 2338 |
+
"grad_norm": 79.58148941128266,
|
| 2339 |
+
"learning_rate": 2.761695595532787e-07,
|
| 2340 |
+
"loss": 0.4175,
|
| 2341 |
+
"step": 3330
|
| 2342 |
+
},
|
| 2343 |
+
{
|
| 2344 |
+
"epoch": 1.3705375461633156,
|
| 2345 |
+
"grad_norm": 40.75555964238306,
|
| 2346 |
+
"learning_rate": 2.729728403209624e-07,
|
| 2347 |
+
"loss": 0.4107,
|
| 2348 |
+
"step": 3340
|
| 2349 |
+
},
|
| 2350 |
+
{
|
| 2351 |
+
"epoch": 1.374640951990152,
|
| 2352 |
+
"grad_norm": 25.38771347910188,
|
| 2353 |
+
"learning_rate": 2.6978776874233664e-07,
|
| 2354 |
+
"loss": 0.416,
|
| 2355 |
+
"step": 3350
|
| 2356 |
+
},
|
| 2357 |
+
{
|
| 2358 |
+
"epoch": 1.3787443578169882,
|
| 2359 |
+
"grad_norm": 265.7662820976626,
|
| 2360 |
+
"learning_rate": 2.666145082278528e-07,
|
| 2361 |
+
"loss": 0.4054,
|
| 2362 |
+
"step": 3360
|
| 2363 |
+
},
|
| 2364 |
+
{
|
| 2365 |
+
"epoch": 1.3828477636438243,
|
| 2366 |
+
"grad_norm": 20.348111665356793,
|
| 2367 |
+
"learning_rate": 2.63453221581995e-07,
|
| 2368 |
+
"loss": 0.4398,
|
| 2369 |
+
"step": 3370
|
| 2370 |
+
},
|
| 2371 |
+
{
|
| 2372 |
+
"epoch": 1.3869511694706607,
|
| 2373 |
+
"grad_norm": 36.55148493189372,
|
| 2374 |
+
"learning_rate": 2.6030407099492624e-07,
|
| 2375 |
+
"loss": 0.4337,
|
| 2376 |
+
"step": 3380
|
| 2377 |
+
},
|
| 2378 |
+
{
|
| 2379 |
+
"epoch": 1.391054575297497,
|
| 2380 |
+
"grad_norm": 77.43643107593765,
|
| 2381 |
+
"learning_rate": 2.5716721803416765e-07,
|
| 2382 |
+
"loss": 0.4221,
|
| 2383 |
+
"step": 3390
|
| 2384 |
+
},
|
| 2385 |
+
{
|
| 2386 |
+
"epoch": 1.395157981124333,
|
| 2387 |
+
"grad_norm": 40.70221637165112,
|
| 2388 |
+
"learning_rate": 2.5404282363630956e-07,
|
| 2389 |
+
"loss": 0.4417,
|
| 2390 |
+
"step": 3400
|
| 2391 |
+
},
|
| 2392 |
+
{
|
| 2393 |
+
"epoch": 1.3992613869511694,
|
| 2394 |
+
"grad_norm": 76.28634674041872,
|
| 2395 |
+
"learning_rate": 2.5093104809875433e-07,
|
| 2396 |
+
"loss": 0.4079,
|
| 2397 |
+
"step": 3410
|
| 2398 |
+
},
|
| 2399 |
+
{
|
| 2400 |
+
"epoch": 1.4033647927780057,
|
| 2401 |
+
"grad_norm": 85.13815445124703,
|
| 2402 |
+
"learning_rate": 2.478320510714922e-07,
|
| 2403 |
+
"loss": 0.3967,
|
| 2404 |
+
"step": 3420
|
| 2405 |
+
},
|
| 2406 |
+
{
|
| 2407 |
+
"epoch": 1.407468198604842,
|
| 2408 |
+
"grad_norm": 27.91627104299842,
|
| 2409 |
+
"learning_rate": 2.447459915489106e-07,
|
| 2410 |
+
"loss": 0.4239,
|
| 2411 |
+
"step": 3430
|
| 2412 |
+
},
|
| 2413 |
+
{
|
| 2414 |
+
"epoch": 1.4115716044316784,
|
| 2415 |
+
"grad_norm": 28.122084037414176,
|
| 2416 |
+
"learning_rate": 2.416730278616363e-07,
|
| 2417 |
+
"loss": 0.4043,
|
| 2418 |
+
"step": 3440
|
| 2419 |
+
},
|
| 2420 |
+
{
|
| 2421 |
+
"epoch": 1.4156750102585145,
|
| 2422 |
+
"grad_norm": 56.41072384892292,
|
| 2423 |
+
"learning_rate": 2.3861331766841366e-07,
|
| 2424 |
+
"loss": 0.4376,
|
| 2425 |
+
"step": 3450
|
| 2426 |
+
},
|
| 2427 |
+
{
|
| 2428 |
+
"epoch": 1.4197784160853508,
|
| 2429 |
+
"grad_norm": 28.705663828792556,
|
| 2430 |
+
"learning_rate": 2.3556701794801448e-07,
|
| 2431 |
+
"loss": 0.4329,
|
| 2432 |
+
"step": 3460
|
| 2433 |
+
},
|
| 2434 |
+
{
|
| 2435 |
+
"epoch": 1.4238818219121872,
|
| 2436 |
+
"grad_norm": 72.85647377507192,
|
| 2437 |
+
"learning_rate": 2.3253428499118465e-07,
|
| 2438 |
+
"loss": 0.4249,
|
| 2439 |
+
"step": 3470
|
| 2440 |
+
},
|
| 2441 |
+
{
|
| 2442 |
+
"epoch": 1.4279852277390233,
|
| 2443 |
+
"grad_norm": 19.74007408423277,
|
| 2444 |
+
"learning_rate": 2.2951527439262626e-07,
|
| 2445 |
+
"loss": 0.4224,
|
| 2446 |
+
"step": 3480
|
| 2447 |
+
},
|
| 2448 |
+
{
|
| 2449 |
+
"epoch": 1.4320886335658596,
|
| 2450 |
+
"grad_norm": 88.01443864013412,
|
| 2451 |
+
"learning_rate": 2.2651014104301396e-07,
|
| 2452 |
+
"loss": 0.431,
|
| 2453 |
+
"step": 3490
|
| 2454 |
+
},
|
| 2455 |
+
{
|
| 2456 |
+
"epoch": 1.436192039392696,
|
| 2457 |
+
"grad_norm": 97.30696687141679,
|
| 2458 |
+
"learning_rate": 2.235190391210489e-07,
|
| 2459 |
+
"loss": 0.4269,
|
| 2460 |
+
"step": 3500
|
| 2461 |
+
},
|
| 2462 |
+
{
|
| 2463 |
+
"epoch": 1.4402954452195322,
|
| 2464 |
+
"grad_norm": 56.673363862081146,
|
| 2465 |
+
"learning_rate": 2.205421220855478e-07,
|
| 2466 |
+
"loss": 0.4334,
|
| 2467 |
+
"step": 3510
|
| 2468 |
+
},
|
| 2469 |
+
{
|
| 2470 |
+
"epoch": 1.4443988510463686,
|
| 2471 |
+
"grad_norm": 126.18875615852781,
|
| 2472 |
+
"learning_rate": 2.1757954266757017e-07,
|
| 2473 |
+
"loss": 0.4587,
|
| 2474 |
+
"step": 3520
|
| 2475 |
+
},
|
| 2476 |
+
{
|
| 2477 |
+
"epoch": 1.4485022568732049,
|
| 2478 |
+
"grad_norm": 28.15470805643783,
|
| 2479 |
+
"learning_rate": 2.146314528625832e-07,
|
| 2480 |
+
"loss": 0.4267,
|
| 2481 |
+
"step": 3530
|
| 2482 |
+
},
|
| 2483 |
+
{
|
| 2484 |
+
"epoch": 1.452605662700041,
|
| 2485 |
+
"grad_norm": 55.18698152543268,
|
| 2486 |
+
"learning_rate": 2.1169800392266206e-07,
|
| 2487 |
+
"loss": 0.4829,
|
| 2488 |
+
"step": 3540
|
| 2489 |
+
},
|
| 2490 |
+
{
|
| 2491 |
+
"epoch": 1.4567090685268773,
|
| 2492 |
+
"grad_norm": 51.381578012157995,
|
| 2493 |
+
"learning_rate": 2.0877934634873107e-07,
|
| 2494 |
+
"loss": 0.4236,
|
| 2495 |
+
"step": 3550
|
| 2496 |
+
},
|
| 2497 |
+
{
|
| 2498 |
+
"epoch": 1.4608124743537136,
|
| 2499 |
+
"grad_norm": 55.663749514008956,
|
| 2500 |
+
"learning_rate": 2.0587562988284213e-07,
|
| 2501 |
+
"loss": 0.4075,
|
| 2502 |
+
"step": 3560
|
| 2503 |
+
},
|
| 2504 |
+
{
|
| 2505 |
+
"epoch": 1.4649158801805497,
|
| 2506 |
+
"grad_norm": 23.81706142413667,
|
| 2507 |
+
"learning_rate": 2.0298700350049126e-07,
|
| 2508 |
+
"loss": 0.4346,
|
| 2509 |
+
"step": 3570
|
| 2510 |
+
},
|
| 2511 |
+
{
|
| 2512 |
+
"epoch": 1.469019286007386,
|
| 2513 |
+
"grad_norm": 18.751390148129666,
|
| 2514 |
+
"learning_rate": 2.0011361540297677e-07,
|
| 2515 |
+
"loss": 0.4258,
|
| 2516 |
+
"step": 3580
|
| 2517 |
+
},
|
| 2518 |
+
{
|
| 2519 |
+
"epoch": 1.4731226918342224,
|
| 2520 |
+
"grad_norm": 42.29524409036339,
|
| 2521 |
+
"learning_rate": 1.972556130097946e-07,
|
| 2522 |
+
"loss": 0.3865,
|
| 2523 |
+
"step": 3590
|
| 2524 |
+
},
|
| 2525 |
+
{
|
| 2526 |
+
"epoch": 1.4772260976610587,
|
| 2527 |
+
"grad_norm": 88.17315128049029,
|
| 2528 |
+
"learning_rate": 1.9441314295107535e-07,
|
| 2529 |
+
"loss": 0.4289,
|
| 2530 |
+
"step": 3600
|
| 2531 |
+
},
|
| 2532 |
+
{
|
| 2533 |
+
"epoch": 1.481329503487895,
|
| 2534 |
+
"grad_norm": 734.8095866137935,
|
| 2535 |
+
"learning_rate": 1.915863510600621e-07,
|
| 2536 |
+
"loss": 0.4469,
|
| 2537 |
+
"step": 3610
|
| 2538 |
+
},
|
| 2539 |
+
{
|
| 2540 |
+
"epoch": 1.4854329093147312,
|
| 2541 |
+
"grad_norm": 122.77311301809671,
|
| 2542 |
+
"learning_rate": 1.8877538236562696e-07,
|
| 2543 |
+
"loss": 0.4422,
|
| 2544 |
+
"step": 3620
|
| 2545 |
+
},
|
| 2546 |
+
{
|
| 2547 |
+
"epoch": 1.4895363151415675,
|
| 2548 |
+
"grad_norm": 139.24616249691908,
|
| 2549 |
+
"learning_rate": 1.8598038108483184e-07,
|
| 2550 |
+
"loss": 0.4198,
|
| 2551 |
+
"step": 3630
|
| 2552 |
+
},
|
| 2553 |
+
{
|
| 2554 |
+
"epoch": 1.4936397209684038,
|
| 2555 |
+
"grad_norm": 123.30667073508305,
|
| 2556 |
+
"learning_rate": 1.8320149061552858e-07,
|
| 2557 |
+
"loss": 0.4351,
|
| 2558 |
+
"step": 3640
|
| 2559 |
+
},
|
| 2560 |
+
{
|
| 2561 |
+
"epoch": 1.4977431267952401,
|
| 2562 |
+
"grad_norm": 154.13025571643425,
|
| 2563 |
+
"learning_rate": 1.8043885352900163e-07,
|
| 2564 |
+
"loss": 0.3949,
|
| 2565 |
+
"step": 3650
|
| 2566 |
+
},
|
| 2567 |
+
{
|
| 2568 |
+
"epoch": 1.5018465326220762,
|
| 2569 |
+
"grad_norm": 137.73093396161374,
|
| 2570 |
+
"learning_rate": 1.7769261156265447e-07,
|
| 2571 |
+
"loss": 0.4095,
|
| 2572 |
+
"step": 3660
|
| 2573 |
+
},
|
| 2574 |
+
{
|
| 2575 |
+
"epoch": 1.5059499384489126,
|
| 2576 |
+
"grad_norm": 93.49218422504592,
|
| 2577 |
+
"learning_rate": 1.7496290561273657e-07,
|
| 2578 |
+
"loss": 0.449,
|
| 2579 |
+
"step": 3670
|
| 2580 |
+
},
|
| 2581 |
+
{
|
| 2582 |
+
"epoch": 1.5100533442757489,
|
| 2583 |
+
"grad_norm": 73.91814945964236,
|
| 2584 |
+
"learning_rate": 1.722498757271153e-07,
|
| 2585 |
+
"loss": 0.4377,
|
| 2586 |
+
"step": 3680
|
| 2587 |
+
},
|
| 2588 |
+
{
|
| 2589 |
+
"epoch": 1.5141567501025852,
|
| 2590 |
+
"grad_norm": 79.2674062038899,
|
| 2591 |
+
"learning_rate": 1.695536610980912e-07,
|
| 2592 |
+
"loss": 0.3979,
|
| 2593 |
+
"step": 3690
|
| 2594 |
+
},
|
| 2595 |
+
{
|
| 2596 |
+
"epoch": 1.5182601559294215,
|
| 2597 |
+
"grad_norm": 89.27621775926617,
|
| 2598 |
+
"learning_rate": 1.668744000552555e-07,
|
| 2599 |
+
"loss": 0.4164,
|
| 2600 |
+
"step": 3700
|
| 2601 |
+
},
|
| 2602 |
+
{
|
| 2603 |
+
"epoch": 1.5223635617562576,
|
| 2604 |
+
"grad_norm": 16.16512901582305,
|
| 2605 |
+
"learning_rate": 1.6421223005839424e-07,
|
| 2606 |
+
"loss": 0.4391,
|
| 2607 |
+
"step": 3710
|
| 2608 |
+
},
|
| 2609 |
+
{
|
| 2610 |
+
"epoch": 1.526466967583094,
|
| 2611 |
+
"grad_norm": 29.93448294879551,
|
| 2612 |
+
"learning_rate": 1.6156728769043566e-07,
|
| 2613 |
+
"loss": 0.4098,
|
| 2614 |
+
"step": 3720
|
| 2615 |
+
},
|
| 2616 |
+
{
|
| 2617 |
+
"epoch": 1.5305703734099303,
|
| 2618 |
+
"grad_norm": 148.77168698498497,
|
| 2619 |
+
"learning_rate": 1.5893970865044175e-07,
|
| 2620 |
+
"loss": 0.425,
|
| 2621 |
+
"step": 3730
|
| 2622 |
+
},
|
| 2623 |
+
{
|
| 2624 |
+
"epoch": 1.5346737792367664,
|
| 2625 |
+
"grad_norm": 191.56354280097543,
|
| 2626 |
+
"learning_rate": 1.5632962774664805e-07,
|
| 2627 |
+
"loss": 0.3678,
|
| 2628 |
+
"step": 3740
|
| 2629 |
+
},
|
| 2630 |
+
{
|
| 2631 |
+
"epoch": 1.5387771850636027,
|
| 2632 |
+
"grad_norm": 23.78551256099493,
|
| 2633 |
+
"learning_rate": 1.537371788895455e-07,
|
| 2634 |
+
"loss": 0.4172,
|
| 2635 |
+
"step": 3750
|
| 2636 |
+
},
|
| 2637 |
+
{
|
| 2638 |
+
"epoch": 1.542880590890439,
|
| 2639 |
+
"grad_norm": 21.800948941339687,
|
| 2640 |
+
"learning_rate": 1.5116249508501112e-07,
|
| 2641 |
+
"loss": 0.3844,
|
| 2642 |
+
"step": 3760
|
| 2643 |
+
},
|
| 2644 |
+
{
|
| 2645 |
+
"epoch": 1.5469839967172754,
|
| 2646 |
+
"grad_norm": 52.93516332064744,
|
| 2647 |
+
"learning_rate": 1.486057084274841e-07,
|
| 2648 |
+
"loss": 0.4355,
|
| 2649 |
+
"step": 3770
|
| 2650 |
+
},
|
| 2651 |
+
{
|
| 2652 |
+
"epoch": 1.5510874025441117,
|
| 2653 |
+
"grad_norm": 725.3391524642909,
|
| 2654 |
+
"learning_rate": 1.4606695009318854e-07,
|
| 2655 |
+
"loss": 0.4262,
|
| 2656 |
+
"step": 3780
|
| 2657 |
+
},
|
| 2658 |
+
{
|
| 2659 |
+
"epoch": 1.555190808370948,
|
| 2660 |
+
"grad_norm": 204.09252403482603,
|
| 2661 |
+
"learning_rate": 1.4354635033340305e-07,
|
| 2662 |
+
"loss": 0.3866,
|
| 2663 |
+
"step": 3790
|
| 2664 |
+
},
|
| 2665 |
+
{
|
| 2666 |
+
"epoch": 1.5592942141977841,
|
| 2667 |
+
"grad_norm": 271.45861751317847,
|
| 2668 |
+
"learning_rate": 1.4104403846777906e-07,
|
| 2669 |
+
"loss": 0.3842,
|
| 2670 |
+
"step": 3800
|
| 2671 |
+
},
|
| 2672 |
+
{
|
| 2673 |
+
"epoch": 1.5633976200246205,
|
| 2674 |
+
"grad_norm": 23.623240438639296,
|
| 2675 |
+
"learning_rate": 1.3856014287770502e-07,
|
| 2676 |
+
"loss": 0.3904,
|
| 2677 |
+
"step": 3810
|
| 2678 |
+
},
|
| 2679 |
+
{
|
| 2680 |
+
"epoch": 1.5675010258514566,
|
| 2681 |
+
"grad_norm": 25.80532779059473,
|
| 2682 |
+
"learning_rate": 1.360947909997209e-07,
|
| 2683 |
+
"loss": 0.4254,
|
| 2684 |
+
"step": 3820
|
| 2685 |
+
},
|
| 2686 |
+
{
|
| 2687 |
+
"epoch": 1.571604431678293,
|
| 2688 |
+
"grad_norm": 134.65973742152963,
|
| 2689 |
+
"learning_rate": 1.3364810931897885e-07,
|
| 2690 |
+
"loss": 0.4283,
|
| 2691 |
+
"step": 3830
|
| 2692 |
+
},
|
| 2693 |
+
{
|
| 2694 |
+
"epoch": 1.5757078375051292,
|
| 2695 |
+
"grad_norm": 52.13065478917333,
|
| 2696 |
+
"learning_rate": 1.3122022336275475e-07,
|
| 2697 |
+
"loss": 0.4295,
|
| 2698 |
+
"step": 3840
|
| 2699 |
+
},
|
| 2700 |
+
{
|
| 2701 |
+
"epoch": 1.5798112433319655,
|
| 2702 |
+
"grad_norm": 3311.2383704845356,
|
| 2703 |
+
"learning_rate": 1.2881125769400785e-07,
|
| 2704 |
+
"loss": 0.4312,
|
| 2705 |
+
"step": 3850
|
| 2706 |
+
},
|
| 2707 |
+
{
|
| 2708 |
+
"epoch": 1.5839146491588019,
|
| 2709 |
+
"grad_norm": 101.8145706410286,
|
| 2710 |
+
"learning_rate": 1.2642133590499005e-07,
|
| 2711 |
+
"loss": 0.3986,
|
| 2712 |
+
"step": 3860
|
| 2713 |
+
},
|
| 2714 |
+
{
|
| 2715 |
+
"epoch": 1.5880180549856382,
|
| 2716 |
+
"grad_norm": 97.75068168003979,
|
| 2717 |
+
"learning_rate": 1.240505806109043e-07,
|
| 2718 |
+
"loss": 0.3693,
|
| 2719 |
+
"step": 3870
|
| 2720 |
+
},
|
| 2721 |
+
{
|
| 2722 |
+
"epoch": 1.5921214608124743,
|
| 2723 |
+
"grad_norm": 25.390546622624026,
|
| 2724 |
+
"learning_rate": 1.216991134436151e-07,
|
| 2725 |
+
"loss": 0.4272,
|
| 2726 |
+
"step": 3880
|
| 2727 |
+
},
|
| 2728 |
+
{
|
| 2729 |
+
"epoch": 1.5962248666393106,
|
| 2730 |
+
"grad_norm": 145.43042649972676,
|
| 2731 |
+
"learning_rate": 1.1936705504540684e-07,
|
| 2732 |
+
"loss": 0.4347,
|
| 2733 |
+
"step": 3890
|
| 2734 |
+
},
|
| 2735 |
+
{
|
| 2736 |
+
"epoch": 1.600328272466147,
|
| 2737 |
+
"grad_norm": 205.82119199822708,
|
| 2738 |
+
"learning_rate": 1.1705452506279545e-07,
|
| 2739 |
+
"loss": 0.4363,
|
| 2740 |
+
"step": 3900
|
| 2741 |
+
},
|
| 2742 |
+
{
|
| 2743 |
+
"epoch": 1.604431678292983,
|
| 2744 |
+
"grad_norm": 658.4322824736582,
|
| 2745 |
+
"learning_rate": 1.1476164214038891e-07,
|
| 2746 |
+
"loss": 0.4067,
|
| 2747 |
+
"step": 3910
|
| 2748 |
+
},
|
| 2749 |
+
{
|
| 2750 |
+
"epoch": 1.6085350841198194,
|
| 2751 |
+
"grad_norm": 68.71455402259994,
|
| 2752 |
+
"learning_rate": 1.1248852391480047e-07,
|
| 2753 |
+
"loss": 0.4286,
|
| 2754 |
+
"step": 3920
|
| 2755 |
+
},
|
| 2756 |
+
{
|
| 2757 |
+
"epoch": 1.6126384899466557,
|
| 2758 |
+
"grad_norm": 46.60551181159029,
|
| 2759 |
+
"learning_rate": 1.1023528700861384e-07,
|
| 2760 |
+
"loss": 0.4155,
|
| 2761 |
+
"step": 3930
|
| 2762 |
+
},
|
| 2763 |
+
{
|
| 2764 |
+
"epoch": 1.616741895773492,
|
| 2765 |
+
"grad_norm": 23.87320693964512,
|
| 2766 |
+
"learning_rate": 1.0800204702439935e-07,
|
| 2767 |
+
"loss": 0.485,
|
| 2768 |
+
"step": 3940
|
| 2769 |
+
},
|
| 2770 |
+
{
|
| 2771 |
+
"epoch": 1.6208453016003284,
|
| 2772 |
+
"grad_norm": 2181.811952398405,
|
| 2773 |
+
"learning_rate": 1.0578891853878264e-07,
|
| 2774 |
+
"loss": 0.4327,
|
| 2775 |
+
"step": 3950
|
| 2776 |
+
},
|
| 2777 |
+
{
|
| 2778 |
+
"epoch": 1.6249487074271647,
|
| 2779 |
+
"grad_norm": 119.3349899078856,
|
| 2780 |
+
"learning_rate": 1.0359601509656723e-07,
|
| 2781 |
+
"loss": 0.4414,
|
| 2782 |
+
"step": 3960
|
| 2783 |
+
},
|
| 2784 |
+
{
|
| 2785 |
+
"epoch": 1.6290521132540008,
|
| 2786 |
+
"grad_norm": 62.68332703029713,
|
| 2787 |
+
"learning_rate": 1.0142344920490787e-07,
|
| 2788 |
+
"loss": 0.4014,
|
| 2789 |
+
"step": 3970
|
| 2790 |
+
},
|
| 2791 |
+
{
|
| 2792 |
+
"epoch": 1.6331555190808371,
|
| 2793 |
+
"grad_norm": 59.02524302449068,
|
| 2794 |
+
"learning_rate": 9.927133232753976e-08,
|
| 2795 |
+
"loss": 0.4124,
|
| 2796 |
+
"step": 3980
|
| 2797 |
+
},
|
| 2798 |
+
{
|
| 2799 |
+
"epoch": 1.6372589249076732,
|
| 2800 |
+
"grad_norm": 25.960859515979315,
|
| 2801 |
+
"learning_rate": 9.71397748790585e-08,
|
| 2802 |
+
"loss": 0.4061,
|
| 2803 |
+
"step": 3990
|
| 2804 |
+
},
|
| 2805 |
+
{
|
| 2806 |
+
"epoch": 1.6413623307345095,
|
| 2807 |
+
"grad_norm": 32.993050437393975,
|
| 2808 |
+
"learning_rate": 9.502888621925626e-08,
|
| 2809 |
+
"loss": 0.4102,
|
| 2810 |
+
"step": 4000
|
| 2811 |
+
},
|
| 2812 |
+
{
|
| 2813 |
+
"epoch": 1.6454657365613459,
|
| 2814 |
+
"grad_norm": 41.219724370237884,
|
| 2815 |
+
"learning_rate": 9.293877464751076e-08,
|
| 2816 |
+
"loss": 0.3875,
|
| 2817 |
+
"step": 4010
|
| 2818 |
+
},
|
| 2819 |
+
{
|
| 2820 |
+
"epoch": 1.6495691423881822,
|
| 2821 |
+
"grad_norm": 154.4264486533407,
|
| 2822 |
+
"learning_rate": 9.086954739722869e-08,
|
| 2823 |
+
"loss": 0.4093,
|
| 2824 |
+
"step": 4020
|
| 2825 |
+
},
|
| 2826 |
+
{
|
| 2827 |
+
"epoch": 1.6536725482150185,
|
| 2828 |
+
"grad_norm": 49.253526126815316,
|
| 2829 |
+
"learning_rate": 8.882131063034426e-08,
|
| 2830 |
+
"loss": 0.4095,
|
| 2831 |
+
"step": 4030
|
| 2832 |
+
},
|
| 2833 |
+
{
|
| 2834 |
+
"epoch": 1.6577759540418548,
|
| 2835 |
+
"grad_norm": 200.09521521423284,
|
| 2836 |
+
"learning_rate": 8.67941694318729e-08,
|
| 2837 |
+
"loss": 0.3839,
|
| 2838 |
+
"step": 4040
|
| 2839 |
+
},
|
| 2840 |
+
{
|
| 2841 |
+
"epoch": 1.6618793598686912,
|
| 2842 |
+
"grad_norm": 48.81181085938524,
|
| 2843 |
+
"learning_rate": 8.478822780451917e-08,
|
| 2844 |
+
"loss": 0.42,
|
| 2845 |
+
"step": 4050
|
| 2846 |
+
},
|
| 2847 |
+
{
|
| 2848 |
+
"epoch": 1.6659827656955273,
|
| 2849 |
+
"grad_norm": 135.1003218864024,
|
| 2850 |
+
"learning_rate": 8.28035886633417e-08,
|
| 2851 |
+
"loss": 0.4056,
|
| 2852 |
+
"step": 4060
|
| 2853 |
+
},
|
| 2854 |
+
{
|
| 2855 |
+
"epoch": 1.6700861715223636,
|
| 2856 |
+
"grad_norm": 28.822642214800094,
|
| 2857 |
+
"learning_rate": 8.084035383047222e-08,
|
| 2858 |
+
"loss": 0.4272,
|
| 2859 |
+
"step": 4070
|
| 2860 |
+
},
|
| 2861 |
+
{
|
| 2862 |
+
"epoch": 1.6741895773491997,
|
| 2863 |
+
"grad_norm": 66.49049754656362,
|
| 2864 |
+
"learning_rate": 7.88986240298925e-08,
|
| 2865 |
+
"loss": 0.4611,
|
| 2866 |
+
"step": 4080
|
| 2867 |
+
},
|
| 2868 |
+
{
|
| 2869 |
+
"epoch": 1.678292983176036,
|
| 2870 |
+
"grad_norm": 30.83410050587949,
|
| 2871 |
+
"learning_rate": 7.697849888226605e-08,
|
| 2872 |
+
"loss": 0.4232,
|
| 2873 |
+
"step": 4090
|
| 2874 |
+
},
|
| 2875 |
+
{
|
| 2876 |
+
"epoch": 1.6823963890028724,
|
| 2877 |
+
"grad_norm": 32.817857754522564,
|
| 2878 |
+
"learning_rate": 7.508007689982715e-08,
|
| 2879 |
+
"loss": 0.4206,
|
| 2880 |
+
"step": 4100
|
| 2881 |
+
},
|
| 2882 |
+
{
|
| 2883 |
+
"epoch": 1.6864997948297087,
|
| 2884 |
+
"grad_norm": 27.12124911007929,
|
| 2885 |
+
"learning_rate": 7.320345548132678e-08,
|
| 2886 |
+
"loss": 0.4029,
|
| 2887 |
+
"step": 4110
|
| 2888 |
+
},
|
| 2889 |
+
{
|
| 2890 |
+
"epoch": 1.690603200656545,
|
| 2891 |
+
"grad_norm": 82.03619305744303,
|
| 2892 |
+
"learning_rate": 7.134873090703586e-08,
|
| 2893 |
+
"loss": 0.3938,
|
| 2894 |
+
"step": 4120
|
| 2895 |
+
},
|
| 2896 |
+
{
|
| 2897 |
+
"epoch": 1.6947066064833813,
|
| 2898 |
+
"grad_norm": 65.49997065587876,
|
| 2899 |
+
"learning_rate": 6.951599833380478e-08,
|
| 2900 |
+
"loss": 0.4319,
|
| 2901 |
+
"step": 4130
|
| 2902 |
+
},
|
| 2903 |
+
{
|
| 2904 |
+
"epoch": 1.6988100123102174,
|
| 2905 |
+
"grad_norm": 64.34526390593851,
|
| 2906 |
+
"learning_rate": 6.770535179018228e-08,
|
| 2907 |
+
"loss": 0.4568,
|
| 2908 |
+
"step": 4140
|
| 2909 |
+
},
|
| 2910 |
+
{
|
| 2911 |
+
"epoch": 1.7029134181370538,
|
| 2912 |
+
"grad_norm": 47.847369355772585,
|
| 2913 |
+
"learning_rate": 6.591688417159091e-08,
|
| 2914 |
+
"loss": 0.418,
|
| 2915 |
+
"step": 4150
|
| 2916 |
+
},
|
| 2917 |
+
{
|
| 2918 |
+
"epoch": 1.7070168239638899,
|
| 2919 |
+
"grad_norm": 28.145036659103386,
|
| 2920 |
+
"learning_rate": 6.415068723556066e-08,
|
| 2921 |
+
"loss": 0.4234,
|
| 2922 |
+
"step": 4160
|
| 2923 |
+
},
|
| 2924 |
+
{
|
| 2925 |
+
"epoch": 1.7111202297907262,
|
| 2926 |
+
"grad_norm": 46.156398368899836,
|
| 2927 |
+
"learning_rate": 6.240685159702203e-08,
|
| 2928 |
+
"loss": 0.4304,
|
| 2929 |
+
"step": 4170
|
| 2930 |
+
},
|
| 2931 |
+
{
|
| 2932 |
+
"epoch": 1.7152236356175625,
|
| 2933 |
+
"grad_norm": 43.54941616036072,
|
| 2934 |
+
"learning_rate": 6.068546672365643e-08,
|
| 2935 |
+
"loss": 0.4771,
|
| 2936 |
+
"step": 4180
|
| 2937 |
+
},
|
| 2938 |
+
{
|
| 2939 |
+
"epoch": 1.7193270414443989,
|
| 2940 |
+
"grad_norm": 331.71722434486634,
|
| 2941 |
+
"learning_rate": 5.898662093130618e-08,
|
| 2942 |
+
"loss": 0.4194,
|
| 2943 |
+
"step": 4190
|
| 2944 |
+
},
|
| 2945 |
+
{
|
| 2946 |
+
"epoch": 1.7234304472712352,
|
| 2947 |
+
"grad_norm": 227.63390384255737,
|
| 2948 |
+
"learning_rate": 5.7310401379443987e-08,
|
| 2949 |
+
"loss": 0.4224,
|
| 2950 |
+
"step": 4200
|
| 2951 |
+
},
|
| 2952 |
+
{
|
| 2953 |
+
"epoch": 1.7275338530980715,
|
| 2954 |
+
"grad_norm": 58.81437969033111,
|
| 2955 |
+
"learning_rate": 5.5656894066700264e-08,
|
| 2956 |
+
"loss": 0.4054,
|
| 2957 |
+
"step": 4210
|
| 2958 |
+
},
|
| 2959 |
+
{
|
| 2960 |
+
"epoch": 1.7316372589249078,
|
| 2961 |
+
"grad_norm": 23.061728142357317,
|
| 2962 |
+
"learning_rate": 5.4026183826451746e-08,
|
| 2963 |
+
"loss": 0.4129,
|
| 2964 |
+
"step": 4220
|
| 2965 |
+
},
|
| 2966 |
+
{
|
| 2967 |
+
"epoch": 1.735740664751744,
|
| 2968 |
+
"grad_norm": 23.823483507906158,
|
| 2969 |
+
"learning_rate": 5.2418354322468884e-08,
|
| 2970 |
+
"loss": 0.4162,
|
| 2971 |
+
"step": 4230
|
| 2972 |
+
},
|
| 2973 |
+
{
|
| 2974 |
+
"epoch": 1.7398440705785803,
|
| 2975 |
+
"grad_norm": 632.7990421227291,
|
| 2976 |
+
"learning_rate": 5.083348804462312e-08,
|
| 2977 |
+
"loss": 0.4289,
|
| 2978 |
+
"step": 4240
|
| 2979 |
+
},
|
| 2980 |
+
{
|
| 2981 |
+
"epoch": 1.7439474764054164,
|
| 2982 |
+
"grad_norm": 28.767078917022214,
|
| 2983 |
+
"learning_rate": 4.927166630465534e-08,
|
| 2984 |
+
"loss": 0.3888,
|
| 2985 |
+
"step": 4250
|
| 2986 |
+
},
|
| 2987 |
+
{
|
| 2988 |
+
"epoch": 1.7480508822322527,
|
| 2989 |
+
"grad_norm": 122.53585282970855,
|
| 2990 |
+
"learning_rate": 4.773296923200371e-08,
|
| 2991 |
+
"loss": 0.3999,
|
| 2992 |
+
"step": 4260
|
| 2993 |
+
},
|
| 2994 |
+
{
|
| 2995 |
+
"epoch": 1.752154288059089,
|
| 2996 |
+
"grad_norm": 24.40357397389423,
|
| 2997 |
+
"learning_rate": 4.621747576969259e-08,
|
| 2998 |
+
"loss": 0.4038,
|
| 2999 |
+
"step": 4270
|
| 3000 |
+
},
|
| 3001 |
+
{
|
| 3002 |
+
"epoch": 1.7562576938859253,
|
| 3003 |
+
"grad_norm": 46.933111305330726,
|
| 3004 |
+
"learning_rate": 4.47252636702829e-08,
|
| 3005 |
+
"loss": 0.3873,
|
| 3006 |
+
"step": 4280
|
| 3007 |
+
},
|
| 3008 |
+
{
|
| 3009 |
+
"epoch": 1.7603610997127617,
|
| 3010 |
+
"grad_norm": 344.92793874089887,
|
| 3011 |
+
"learning_rate": 4.325640949188225e-08,
|
| 3012 |
+
"loss": 0.4061,
|
| 3013 |
+
"step": 4290
|
| 3014 |
+
},
|
| 3015 |
+
{
|
| 3016 |
+
"epoch": 1.764464505539598,
|
| 3017 |
+
"grad_norm": 102.7628536857103,
|
| 3018 |
+
"learning_rate": 4.181098859421789e-08,
|
| 3019 |
+
"loss": 0.4481,
|
| 3020 |
+
"step": 4300
|
| 3021 |
+
},
|
| 3022 |
+
{
|
| 3023 |
+
"epoch": 1.768567911366434,
|
| 3024 |
+
"grad_norm": 27.284536483789637,
|
| 3025 |
+
"learning_rate": 4.0389075134769856e-08,
|
| 3026 |
+
"loss": 0.4194,
|
| 3027 |
+
"step": 4310
|
| 3028 |
+
},
|
| 3029 |
+
{
|
| 3030 |
+
"epoch": 1.7726713171932704,
|
| 3031 |
+
"grad_norm": 234.61139196852804,
|
| 3032 |
+
"learning_rate": 3.899074206496616e-08,
|
| 3033 |
+
"loss": 0.4082,
|
| 3034 |
+
"step": 4320
|
| 3035 |
+
},
|
| 3036 |
+
{
|
| 3037 |
+
"epoch": 1.7767747230201065,
|
| 3038 |
+
"grad_norm": 16.201147779957868,
|
| 3039 |
+
"learning_rate": 3.761606112644089e-08,
|
| 3040 |
+
"loss": 0.4287,
|
| 3041 |
+
"step": 4330
|
| 3042 |
+
},
|
| 3043 |
+
{
|
| 3044 |
+
"epoch": 1.7808781288469429,
|
| 3045 |
+
"grad_norm": 83.38729293634881,
|
| 3046 |
+
"learning_rate": 3.626510284735229e-08,
|
| 3047 |
+
"loss": 0.4471,
|
| 3048 |
+
"step": 4340
|
| 3049 |
+
},
|
| 3050 |
+
{
|
| 3051 |
+
"epoch": 1.7849815346737792,
|
| 3052 |
+
"grad_norm": 36.2553001909207,
|
| 3053 |
+
"learning_rate": 3.4937936538765256e-08,
|
| 3054 |
+
"loss": 0.4098,
|
| 3055 |
+
"step": 4350
|
| 3056 |
+
},
|
| 3057 |
+
{
|
| 3058 |
+
"epoch": 1.7890849405006155,
|
| 3059 |
+
"grad_norm": 54.74769955382213,
|
| 3060 |
+
"learning_rate": 3.363463029109498e-08,
|
| 3061 |
+
"loss": 0.4142,
|
| 3062 |
+
"step": 4360
|
| 3063 |
+
},
|
| 3064 |
+
{
|
| 3065 |
+
"epoch": 1.7931883463274518,
|
| 3066 |
+
"grad_norm": 32.328001657373804,
|
| 3067 |
+
"learning_rate": 3.2355250970613533e-08,
|
| 3068 |
+
"loss": 0.4015,
|
| 3069 |
+
"step": 4370
|
| 3070 |
+
},
|
| 3071 |
+
{
|
| 3072 |
+
"epoch": 1.7972917521542882,
|
| 3073 |
+
"grad_norm": 78.58271605188096,
|
| 3074 |
+
"learning_rate": 3.1099864216019345e-08,
|
| 3075 |
+
"loss": 0.4237,
|
| 3076 |
+
"step": 4380
|
| 3077 |
+
},
|
| 3078 |
+
{
|
| 3079 |
+
"epoch": 1.8013951579811245,
|
| 3080 |
+
"grad_norm": 80.46887552678375,
|
| 3081 |
+
"learning_rate": 2.986853443506954e-08,
|
| 3082 |
+
"loss": 0.3938,
|
| 3083 |
+
"step": 4390
|
| 3084 |
+
},
|
| 3085 |
+
{
|
| 3086 |
+
"epoch": 1.8054985638079606,
|
| 3087 |
+
"grad_norm": 29.45828646127695,
|
| 3088 |
+
"learning_rate": 2.8661324801275422e-08,
|
| 3089 |
+
"loss": 0.4114,
|
| 3090 |
+
"step": 4400
|
| 3091 |
+
},
|
| 3092 |
+
{
|
| 3093 |
+
"epoch": 1.809601969634797,
|
| 3094 |
+
"grad_norm": 103.69552161722473,
|
| 3095 |
+
"learning_rate": 2.747829725066181e-08,
|
| 3096 |
+
"loss": 0.4366,
|
| 3097 |
+
"step": 4410
|
| 3098 |
+
},
|
| 3099 |
+
{
|
| 3100 |
+
"epoch": 1.813705375461633,
|
| 3101 |
+
"grad_norm": 23.19149469811039,
|
| 3102 |
+
"learning_rate": 2.6319512478588657e-08,
|
| 3103 |
+
"loss": 0.4087,
|
| 3104 |
+
"step": 4420
|
| 3105 |
+
},
|
| 3106 |
+
{
|
| 3107 |
+
"epoch": 1.8178087812884693,
|
| 3108 |
+
"grad_norm": 75.73282971489475,
|
| 3109 |
+
"learning_rate": 2.518502993663768e-08,
|
| 3110 |
+
"loss": 0.4451,
|
| 3111 |
+
"step": 4430
|
| 3112 |
+
},
|
| 3113 |
+
{
|
| 3114 |
+
"epoch": 1.8219121871153057,
|
| 3115 |
+
"grad_norm": 27.441560179061018,
|
| 3116 |
+
"learning_rate": 2.4074907829561952e-08,
|
| 3117 |
+
"loss": 0.4727,
|
| 3118 |
+
"step": 4440
|
| 3119 |
+
},
|
| 3120 |
+
{
|
| 3121 |
+
"epoch": 1.826015592942142,
|
| 3122 |
+
"grad_norm": 35.59157075849216,
|
| 3123 |
+
"learning_rate": 2.298920311229968e-08,
|
| 3124 |
+
"loss": 0.4531,
|
| 3125 |
+
"step": 4450
|
| 3126 |
+
},
|
| 3127 |
+
{
|
| 3128 |
+
"epoch": 1.8301189987689783,
|
| 3129 |
+
"grad_norm": 55.57776840001702,
|
| 3130 |
+
"learning_rate": 2.1927971487052276e-08,
|
| 3131 |
+
"loss": 0.4263,
|
| 3132 |
+
"step": 4460
|
| 3133 |
+
},
|
| 3134 |
+
{
|
| 3135 |
+
"epoch": 1.8342224045958146,
|
| 3136 |
+
"grad_norm": 55.095543592811346,
|
| 3137 |
+
"learning_rate": 2.089126740042635e-08,
|
| 3138 |
+
"loss": 0.424,
|
| 3139 |
+
"step": 4470
|
| 3140 |
+
},
|
| 3141 |
+
{
|
| 3142 |
+
"epoch": 1.8383258104226508,
|
| 3143 |
+
"grad_norm": 239.0565166545146,
|
| 3144 |
+
"learning_rate": 1.9879144040640338e-08,
|
| 3145 |
+
"loss": 0.4132,
|
| 3146 |
+
"step": 4480
|
| 3147 |
+
},
|
| 3148 |
+
{
|
| 3149 |
+
"epoch": 1.842429216249487,
|
| 3150 |
+
"grad_norm": 26.08139671080947,
|
| 3151 |
+
"learning_rate": 1.889165333479592e-08,
|
| 3152 |
+
"loss": 0.4193,
|
| 3153 |
+
"step": 4490
|
| 3154 |
+
},
|
| 3155 |
+
{
|
| 3156 |
+
"epoch": 1.8465326220763232,
|
| 3157 |
+
"grad_norm": 81.74843526830144,
|
| 3158 |
+
"learning_rate": 1.792884594621358e-08,
|
| 3159 |
+
"loss": 0.4038,
|
| 3160 |
+
"step": 4500
|
| 3161 |
+
},
|
| 3162 |
+
{
|
| 3163 |
+
"epoch": 1.8506360279031595,
|
| 3164 |
+
"grad_norm": 30.745163227488213,
|
| 3165 |
+
"learning_rate": 1.6990771271833572e-08,
|
| 3166 |
+
"loss": 0.4046,
|
| 3167 |
+
"step": 4510
|
| 3168 |
+
},
|
| 3169 |
+
{
|
| 3170 |
+
"epoch": 1.8547394337299958,
|
| 3171 |
+
"grad_norm": 130.76376318478836,
|
| 3172 |
+
"learning_rate": 1.607747743968152e-08,
|
| 3173 |
+
"loss": 0.4446,
|
| 3174 |
+
"step": 4520
|
| 3175 |
+
},
|
| 3176 |
+
{
|
| 3177 |
+
"epoch": 1.8588428395568322,
|
| 3178 |
+
"grad_norm": 133.70184545748543,
|
| 3179 |
+
"learning_rate": 1.5189011306398937e-08,
|
| 3180 |
+
"loss": 0.4119,
|
| 3181 |
+
"step": 4530
|
| 3182 |
+
},
|
| 3183 |
+
{
|
| 3184 |
+
"epoch": 1.8629462453836685,
|
| 3185 |
+
"grad_norm": 80.80317367608659,
|
| 3186 |
+
"learning_rate": 1.4325418454839866e-08,
|
| 3187 |
+
"loss": 0.4687,
|
| 3188 |
+
"step": 4540
|
| 3189 |
+
},
|
| 3190 |
+
{
|
| 3191 |
+
"epoch": 1.8670496512105048,
|
| 3192 |
+
"grad_norm": 58.169165629481995,
|
| 3193 |
+
"learning_rate": 1.3486743191731487e-08,
|
| 3194 |
+
"loss": 0.4278,
|
| 3195 |
+
"step": 4550
|
| 3196 |
+
},
|
| 3197 |
+
{
|
| 3198 |
+
"epoch": 1.8711530570373411,
|
| 3199 |
+
"grad_norm": 68.37992928499462,
|
| 3200 |
+
"learning_rate": 1.2673028545401531e-08,
|
| 3201 |
+
"loss": 0.4389,
|
| 3202 |
+
"step": 4560
|
| 3203 |
+
},
|
| 3204 |
+
{
|
| 3205 |
+
"epoch": 1.8752564628641772,
|
| 3206 |
+
"grad_norm": 150.706270654969,
|
| 3207 |
+
"learning_rate": 1.188431626357056e-08,
|
| 3208 |
+
"loss": 0.4103,
|
| 3209 |
+
"step": 4570
|
| 3210 |
+
},
|
| 3211 |
+
{
|
| 3212 |
+
"epoch": 1.8793598686910136,
|
| 3213 |
+
"grad_norm": 146.58898264850197,
|
| 3214 |
+
"learning_rate": 1.1120646811209888e-08,
|
| 3215 |
+
"loss": 0.3892,
|
| 3216 |
+
"step": 4580
|
| 3217 |
+
},
|
| 3218 |
+
{
|
| 3219 |
+
"epoch": 1.8834632745178497,
|
| 3220 |
+
"grad_norm": 89.90932455038448,
|
| 3221 |
+
"learning_rate": 1.03820593684657e-08,
|
| 3222 |
+
"loss": 0.4355,
|
| 3223 |
+
"step": 4590
|
| 3224 |
+
},
|
| 3225 |
+
{
|
| 3226 |
+
"epoch": 1.887566680344686,
|
| 3227 |
+
"grad_norm": 52.70879679807137,
|
| 3228 |
+
"learning_rate": 9.668591828649097e-09,
|
| 3229 |
+
"loss": 0.4332,
|
| 3230 |
+
"step": 4600
|
| 3231 |
+
},
|
| 3232 |
+
{
|
| 3233 |
+
"epoch": 1.8916700861715223,
|
| 3234 |
+
"grad_norm": 29.817884507529747,
|
| 3235 |
+
"learning_rate": 8.980280796291429e-09,
|
| 3236 |
+
"loss": 0.3732,
|
| 3237 |
+
"step": 4610
|
| 3238 |
+
},
|
| 3239 |
+
{
|
| 3240 |
+
"epoch": 1.8957734919983587,
|
| 3241 |
+
"grad_norm": 27.83302594915626,
|
| 3242 |
+
"learning_rate": 8.317161585266963e-09,
|
| 3243 |
+
"loss": 0.403,
|
| 3244 |
+
"step": 4620
|
| 3245 |
+
},
|
| 3246 |
+
{
|
| 3247 |
+
"epoch": 1.899876897825195,
|
| 3248 |
+
"grad_norm": 32.86007896299627,
|
| 3249 |
+
"learning_rate": 7.679268216980506e-09,
|
| 3250 |
+
"loss": 0.4237,
|
| 3251 |
+
"step": 4630
|
| 3252 |
+
},
|
| 3253 |
+
{
|
| 3254 |
+
"epoch": 1.9039803036520313,
|
| 3255 |
+
"grad_norm": 275.7872919953887,
|
| 3256 |
+
"learning_rate": 7.066633418622236e-09,
|
| 3257 |
+
"loss": 0.4611,
|
| 3258 |
+
"step": 4640
|
| 3259 |
+
},
|
| 3260 |
+
{
|
| 3261 |
+
"epoch": 1.9080837094788674,
|
| 3262 |
+
"grad_norm": 135.82283043740722,
|
| 3263 |
+
"learning_rate": 6.479288621488832e-09,
|
| 3264 |
+
"loss": 0.4264,
|
| 3265 |
+
"step": 4650
|
| 3266 |
+
},
|
| 3267 |
+
{
|
| 3268 |
+
"epoch": 1.9121871153057037,
|
| 3269 |
+
"grad_norm": 18.942397243087314,
|
| 3270 |
+
"learning_rate": 5.917263959370311e-09,
|
| 3271 |
+
"loss": 0.4177,
|
| 3272 |
+
"step": 4660
|
| 3273 |
+
},
|
| 3274 |
+
{
|
| 3275 |
+
"epoch": 1.91629052113254,
|
| 3276 |
+
"grad_norm": 109.85687476445716,
|
| 3277 |
+
"learning_rate": 5.3805882670045485e-09,
|
| 3278 |
+
"loss": 0.4231,
|
| 3279 |
+
"step": 4670
|
| 3280 |
+
},
|
| 3281 |
+
{
|
| 3282 |
+
"epoch": 1.9203939269593762,
|
| 3283 |
+
"grad_norm": 26.51843922541481,
|
| 3284 |
+
"learning_rate": 4.8692890785977935e-09,
|
| 3285 |
+
"loss": 0.3936,
|
| 3286 |
+
"step": 4680
|
| 3287 |
+
},
|
| 3288 |
+
{
|
| 3289 |
+
"epoch": 1.9244973327862125,
|
| 3290 |
+
"grad_norm": 33.37116688585416,
|
| 3291 |
+
"learning_rate": 4.383392626411575e-09,
|
| 3292 |
+
"loss": 0.4007,
|
| 3293 |
+
"step": 4690
|
| 3294 |
+
},
|
| 3295 |
+
{
|
| 3296 |
+
"epoch": 1.9286007386130488,
|
| 3297 |
+
"grad_norm": 77.91638613348145,
|
| 3298 |
+
"learning_rate": 3.922923839417613e-09,
|
| 3299 |
+
"loss": 0.4008,
|
| 3300 |
+
"step": 4700
|
| 3301 |
+
},
|
| 3302 |
+
{
|
| 3303 |
+
"epoch": 1.9327041444398851,
|
| 3304 |
+
"grad_norm": 30.569873893290577,
|
| 3305 |
+
"learning_rate": 3.487906342018232e-09,
|
| 3306 |
+
"loss": 0.4236,
|
| 3307 |
+
"step": 4710
|
| 3308 |
+
},
|
| 3309 |
+
{
|
| 3310 |
+
"epoch": 1.9368075502667215,
|
| 3311 |
+
"grad_norm": 135.27292614439648,
|
| 3312 |
+
"learning_rate": 3.0783624528344933e-09,
|
| 3313 |
+
"loss": 0.4131,
|
| 3314 |
+
"step": 4720
|
| 3315 |
+
},
|
| 3316 |
+
{
|
| 3317 |
+
"epoch": 1.9409109560935578,
|
| 3318 |
+
"grad_norm": 32.46574855115365,
|
| 3319 |
+
"learning_rate": 2.694313183561225e-09,
|
| 3320 |
+
"loss": 0.4025,
|
| 3321 |
+
"step": 4730
|
| 3322 |
+
},
|
| 3323 |
+
{
|
| 3324 |
+
"epoch": 1.945014361920394,
|
| 3325 |
+
"grad_norm": 16.46489148883643,
|
| 3326 |
+
"learning_rate": 2.335778237888941e-09,
|
| 3327 |
+
"loss": 0.3995,
|
| 3328 |
+
"step": 4740
|
| 3329 |
+
},
|
| 3330 |
+
{
|
| 3331 |
+
"epoch": 1.9491177677472302,
|
| 3332 |
+
"grad_norm": 106.94138078015894,
|
| 3333 |
+
"learning_rate": 2.0027760104929237e-09,
|
| 3334 |
+
"loss": 0.4225,
|
| 3335 |
+
"step": 4750
|
| 3336 |
+
},
|
| 3337 |
+
{
|
| 3338 |
+
"epoch": 1.9532211735740663,
|
| 3339 |
+
"grad_norm": 49.77724139490777,
|
| 3340 |
+
"learning_rate": 1.695323586089481e-09,
|
| 3341 |
+
"loss": 0.4118,
|
| 3342 |
+
"step": 4760
|
| 3343 |
+
},
|
| 3344 |
+
{
|
| 3345 |
+
"epoch": 1.9573245794009027,
|
| 3346 |
+
"grad_norm": 18.78674139341447,
|
| 3347 |
+
"learning_rate": 1.4134367385594815e-09,
|
| 3348 |
+
"loss": 0.4224,
|
| 3349 |
+
"step": 4770
|
| 3350 |
+
},
|
| 3351 |
+
{
|
| 3352 |
+
"epoch": 1.961427985227739,
|
| 3353 |
+
"grad_norm": 42.0269831384406,
|
| 3354 |
+
"learning_rate": 1.157129930139056e-09,
|
| 3355 |
+
"loss": 0.3956,
|
| 3356 |
+
"step": 4780
|
| 3357 |
+
},
|
| 3358 |
+
{
|
| 3359 |
+
"epoch": 1.9655313910545753,
|
| 3360 |
+
"grad_norm": 75.70041724600561,
|
| 3361 |
+
"learning_rate": 9.264163106774137e-10,
|
| 3362 |
+
"loss": 0.4388,
|
| 3363 |
+
"step": 4790
|
| 3364 |
+
},
|
| 3365 |
+
{
|
| 3366 |
+
"epoch": 1.9696347968814116,
|
| 3367 |
+
"grad_norm": 244.540271976536,
|
| 3368 |
+
"learning_rate": 7.213077169625492e-10,
|
| 3369 |
+
"loss": 0.4074,
|
| 3370 |
+
"step": 4800
|
| 3371 |
+
},
|
| 3372 |
+
{
|
| 3373 |
+
"epoch": 1.973738202708248,
|
| 3374 |
+
"grad_norm": 34.37503463179345,
|
| 3375 |
+
"learning_rate": 5.418146721136163e-10,
|
| 3376 |
+
"loss": 0.4761,
|
| 3377 |
+
"step": 4810
|
| 3378 |
+
},
|
| 3379 |
+
{
|
| 3380 |
+
"epoch": 1.9778416085350843,
|
| 3381 |
+
"grad_norm": 39.23485406355468,
|
| 3382 |
+
"learning_rate": 3.8794638504136003e-10,
|
| 3383 |
+
"loss": 0.4033,
|
| 3384 |
+
"step": 4820
|
| 3385 |
+
},
|
| 3386 |
+
{
|
| 3387 |
+
"epoch": 1.9819450143619204,
|
| 3388 |
+
"grad_norm": 154.91161427299485,
|
| 3389 |
+
"learning_rate": 2.5971074997532816e-10,
|
| 3390 |
+
"loss": 0.3972,
|
| 3391 |
+
"step": 4830
|
| 3392 |
+
},
|
| 3393 |
+
{
|
| 3394 |
+
"epoch": 1.9860484201887567,
|
| 3395 |
+
"grad_norm": 128.94739515413087,
|
| 3396 |
+
"learning_rate": 1.5711434605908401e-10,
|
| 3397 |
+
"loss": 0.476,
|
| 3398 |
+
"step": 4840
|
| 3399 |
+
},
|
| 3400 |
+
{
|
| 3401 |
+
"epoch": 1.9901518260155928,
|
| 3402 |
+
"grad_norm": 179.55554660829202,
|
| 3403 |
+
"learning_rate": 8.016243701242099e-11,
|
| 3404 |
+
"loss": 0.4148,
|
| 3405 |
+
"step": 4850
|
| 3406 |
+
},
|
| 3407 |
+
{
|
| 3408 |
+
"epoch": 1.9942552318424291,
|
| 3409 |
+
"grad_norm": 34.39804027289527,
|
| 3410 |
+
"learning_rate": 2.8858970861744824e-11,
|
| 3411 |
+
"loss": 0.4305,
|
| 3412 |
+
"step": 4860
|
| 3413 |
+
},
|
| 3414 |
+
{
|
| 3415 |
+
"epoch": 1.9983586376692655,
|
| 3416 |
+
"grad_norm": 30.39425994820148,
|
| 3417 |
+
"learning_rate": 3.2065797370139923e-12,
|
| 3418 |
+
"loss": 0.3936,
|
| 3419 |
+
"step": 4870
|
| 3420 |
+
},
|
| 3421 |
+
{
|
| 3422 |
+
"epoch": 2.0,
|
| 3423 |
+
"step": 4874,
|
| 3424 |
+
"total_flos": 688709313101824.0,
|
| 3425 |
+
"train_loss": 0.4963684876922902,
|
| 3426 |
+
"train_runtime": 35426.998,
|
| 3427 |
+
"train_samples_per_second": 2.201,
|
| 3428 |
+
"train_steps_per_second": 0.138
|
| 3429 |
+
}
|
| 3430 |
+
],
|
| 3431 |
+
"logging_steps": 10,
|
| 3432 |
+
"max_steps": 4874,
|
| 3433 |
+
"num_input_tokens_seen": 0,
|
| 3434 |
+
"num_train_epochs": 2,
|
| 3435 |
+
"save_steps": 1000,
|
| 3436 |
+
"stateful_callbacks": {
|
| 3437 |
+
"TrainerControl": {
|
| 3438 |
+
"args": {
|
| 3439 |
+
"should_epoch_stop": false,
|
| 3440 |
+
"should_evaluate": false,
|
| 3441 |
+
"should_log": false,
|
| 3442 |
+
"should_save": true,
|
| 3443 |
+
"should_training_stop": true
|
| 3444 |
+
},
|
| 3445 |
+
"attributes": {}
|
| 3446 |
+
}
|
| 3447 |
+
},
|
| 3448 |
+
"total_flos": 688709313101824.0,
|
| 3449 |
+
"train_batch_size": 1,
|
| 3450 |
+
"trial_name": null,
|
| 3451 |
+
"trial_params": null
|
| 3452 |
+
}
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9d032f1c694a06bba5593e9ac3ba3fe93df68eaace866e251c28678ce114a4fd
|
| 3 |
+
size 8209
|
training_loss.png
ADDED
|
video_preprocessor_config.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"crop_size": null,
|
| 3 |
+
"data_format": "channels_first",
|
| 4 |
+
"default_to_square": true,
|
| 5 |
+
"device": null,
|
| 6 |
+
"do_center_crop": null,
|
| 7 |
+
"do_convert_rgb": true,
|
| 8 |
+
"do_normalize": true,
|
| 9 |
+
"do_pad": null,
|
| 10 |
+
"do_rescale": true,
|
| 11 |
+
"do_resize": true,
|
| 12 |
+
"do_sample_frames": false,
|
| 13 |
+
"fps": null,
|
| 14 |
+
"image_mean": [
|
| 15 |
+
0.48145466,
|
| 16 |
+
0.4578275,
|
| 17 |
+
0.40821073
|
| 18 |
+
],
|
| 19 |
+
"image_std": [
|
| 20 |
+
0.26862954,
|
| 21 |
+
0.26130258,
|
| 22 |
+
0.27577711
|
| 23 |
+
],
|
| 24 |
+
"input_data_format": null,
|
| 25 |
+
"max_frames": 768,
|
| 26 |
+
"max_pixels": 12845056,
|
| 27 |
+
"merge_size": 2,
|
| 28 |
+
"min_frames": 4,
|
| 29 |
+
"min_pixels": 3136,
|
| 30 |
+
"num_frames": null,
|
| 31 |
+
"patch_size": 14,
|
| 32 |
+
"processor_class": "Qwen2_5_VLProcessor",
|
| 33 |
+
"resample": 3,
|
| 34 |
+
"rescale_factor": 0.00392156862745098,
|
| 35 |
+
"size": {
|
| 36 |
+
"longest_edge": 12845056,
|
| 37 |
+
"shortest_edge": 3136
|
| 38 |
+
},
|
| 39 |
+
"size_divisor": null,
|
| 40 |
+
"temporal_patch_size": 2,
|
| 41 |
+
"video_metadata": null,
|
| 42 |
+
"video_processor_type": "Qwen2VLVideoProcessor"
|
| 43 |
+
}
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|