yujianll commited on
Commit
508146e
·
verified ·
1 Parent(s): 623a217

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</think>": 151668,
3
+ "</tool_call>": 151658,
4
+ "</tool_response>": 151666,
5
+ "<think>": 151667,
6
+ "<tool_call>": 151657,
7
+ "<tool_response>": 151665,
8
+ "<|box_end|>": 151649,
9
+ "<|box_start|>": 151648,
10
+ "<|endoftext|>": 151643,
11
+ "<|file_sep|>": 151664,
12
+ "<|fim_middle|>": 151660,
13
+ "<|fim_pad|>": 151662,
14
+ "<|fim_prefix|>": 151659,
15
+ "<|fim_suffix|>": 151661,
16
+ "<|im_end|>": 151645,
17
+ "<|im_start|>": 151644,
18
+ "<|image_pad|>": 151655,
19
+ "<|object_ref_end|>": 151647,
20
+ "<|object_ref_start|>": 151646,
21
+ "<|quad_end|>": 151651,
22
+ "<|quad_start|>": 151650,
23
+ "<|repo_name|>": 151663,
24
+ "<|video_pad|>": 151656,
25
+ "<|vision_end|>": 151653,
26
+ "<|vision_pad|>": 151654,
27
+ "<|vision_start|>": 151652
28
+ }
config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen3ForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 2560,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 9728,
14
+ "max_position_embeddings": 40960,
15
+ "max_window_layers": 36,
16
+ "model_type": "qwen3",
17
+ "num_attention_heads": 32,
18
+ "num_hidden_layers": 36,
19
+ "num_key_value_heads": 8,
20
+ "rms_norm_eps": 1e-06,
21
+ "rope_scaling": null,
22
+ "rope_theta": 1000000,
23
+ "sliding_window": null,
24
+ "tie_word_embeddings": true,
25
+ "torch_dtype": "bfloat16",
26
+ "transformers_version": "4.51.3",
27
+ "use_cache": false,
28
+ "use_sliding_window": false,
29
+ "vocab_size": 151936
30
+ }
generation_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "temperature": 0.6,
10
+ "top_k": 20,
11
+ "top_p": 0.95,
12
+ "transformers_version": "4.51.3"
13
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4cc8c146d474d7d98eec0071445bedeac8d9c45a697faa6d617d16720e6b0c15
3
+ size 4967215360
model-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe4a0dd5557f91ccc799fc03f1e1e12c90684fa91240a051b0ff939c70414034
3
+ size 3855679144
model.safetensors.index.json ADDED
@@ -0,0 +1,406 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 8822848512
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00002-of-00002.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
13
+ "model.layers.0.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
16
+ "model.layers.0.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
18
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
19
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
20
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
21
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
22
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
23
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
24
+ "model.layers.1.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
25
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
26
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
27
+ "model.layers.1.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
28
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
30
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
31
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
32
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
34
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
35
+ "model.layers.10.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
36
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
37
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.layers.10.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
39
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
40
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
41
+ "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
42
+ "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
43
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
44
+ "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
45
+ "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
46
+ "model.layers.11.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
47
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
48
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
49
+ "model.layers.11.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
50
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
51
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
52
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
53
+ "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
54
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
56
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
57
+ "model.layers.12.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
58
+ "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
59
+ "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
60
+ "model.layers.12.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
61
+ "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
62
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
63
+ "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
64
+ "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
65
+ "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
66
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
67
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.13.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.13.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
72
+ "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
73
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
76
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
78
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.14.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
80
+ "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
81
+ "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.layers.14.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
83
+ "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
84
+ "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
85
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
88
+ "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
90
+ "model.layers.15.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.15.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
97
+ "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
98
+ "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
99
+ "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
100
+ "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.16.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
102
+ "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
103
+ "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.16.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
107
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
108
+ "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
109
+ "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
110
+ "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
111
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
112
+ "model.layers.17.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
113
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
114
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
115
+ "model.layers.17.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
116
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
117
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
118
+ "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
119
+ "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
120
+ "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
121
+ "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
122
+ "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
123
+ "model.layers.18.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
124
+ "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
125
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
126
+ "model.layers.18.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
127
+ "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
128
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
129
+ "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
130
+ "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
131
+ "model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
132
+ "model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
133
+ "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
134
+ "model.layers.19.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
135
+ "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
136
+ "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
137
+ "model.layers.19.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
138
+ "model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
139
+ "model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
140
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
141
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
142
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
143
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
144
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
145
+ "model.layers.2.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
146
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
147
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
148
+ "model.layers.2.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
149
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
150
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
151
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
152
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
153
+ "model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
154
+ "model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
155
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
156
+ "model.layers.20.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
157
+ "model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
158
+ "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
159
+ "model.layers.20.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
160
+ "model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
161
+ "model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
162
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
163
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
164
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
165
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
166
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
167
+ "model.layers.21.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
168
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
169
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
170
+ "model.layers.21.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
171
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
172
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
173
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
174
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
175
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
176
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
177
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
178
+ "model.layers.22.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
179
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
180
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
181
+ "model.layers.22.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
182
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
183
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
184
+ "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
185
+ "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
186
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
187
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
188
+ "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
189
+ "model.layers.23.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
190
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
191
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
192
+ "model.layers.23.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
193
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
194
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
195
+ "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
196
+ "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
197
+ "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
198
+ "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
199
+ "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
200
+ "model.layers.24.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
201
+ "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
202
+ "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
203
+ "model.layers.24.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
204
+ "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
205
+ "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
206
+ "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
207
+ "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
208
+ "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
209
+ "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
210
+ "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
211
+ "model.layers.25.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
212
+ "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
213
+ "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
214
+ "model.layers.25.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
215
+ "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
216
+ "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
217
+ "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
218
+ "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
219
+ "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
220
+ "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
221
+ "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
222
+ "model.layers.26.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
223
+ "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
224
+ "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
225
+ "model.layers.26.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
226
+ "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
227
+ "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
228
+ "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
229
+ "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
230
+ "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
231
+ "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
232
+ "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
233
+ "model.layers.27.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
234
+ "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
235
+ "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
236
+ "model.layers.27.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
237
+ "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
238
+ "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
239
+ "model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors",
240
+ "model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
241
+ "model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
242
+ "model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
243
+ "model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
244
+ "model.layers.28.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
245
+ "model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
246
+ "model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
247
+ "model.layers.28.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
248
+ "model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
249
+ "model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
250
+ "model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors",
251
+ "model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
252
+ "model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
253
+ "model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
254
+ "model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
255
+ "model.layers.29.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
256
+ "model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
257
+ "model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
258
+ "model.layers.29.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
259
+ "model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
260
+ "model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
261
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
262
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
263
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
264
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
265
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
266
+ "model.layers.3.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
267
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
268
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
269
+ "model.layers.3.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
270
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
271
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
272
+ "model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors",
273
+ "model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
274
+ "model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
275
+ "model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
276
+ "model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
277
+ "model.layers.30.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
278
+ "model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
279
+ "model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
280
+ "model.layers.30.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
281
+ "model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
282
+ "model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
283
+ "model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors",
284
+ "model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
285
+ "model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
286
+ "model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
287
+ "model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
288
+ "model.layers.31.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
289
+ "model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
290
+ "model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
291
+ "model.layers.31.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
292
+ "model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
293
+ "model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
294
+ "model.layers.32.input_layernorm.weight": "model-00002-of-00002.safetensors",
295
+ "model.layers.32.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
296
+ "model.layers.32.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
297
+ "model.layers.32.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
298
+ "model.layers.32.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
299
+ "model.layers.32.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
300
+ "model.layers.32.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
301
+ "model.layers.32.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
302
+ "model.layers.32.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
303
+ "model.layers.32.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
304
+ "model.layers.32.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
305
+ "model.layers.33.input_layernorm.weight": "model-00002-of-00002.safetensors",
306
+ "model.layers.33.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
307
+ "model.layers.33.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
308
+ "model.layers.33.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
309
+ "model.layers.33.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
310
+ "model.layers.33.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
311
+ "model.layers.33.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
312
+ "model.layers.33.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
313
+ "model.layers.33.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
314
+ "model.layers.33.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
315
+ "model.layers.33.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
316
+ "model.layers.34.input_layernorm.weight": "model-00002-of-00002.safetensors",
317
+ "model.layers.34.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
318
+ "model.layers.34.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
319
+ "model.layers.34.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
320
+ "model.layers.34.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
321
+ "model.layers.34.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
322
+ "model.layers.34.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
323
+ "model.layers.34.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
324
+ "model.layers.34.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
325
+ "model.layers.34.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
326
+ "model.layers.34.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
327
+ "model.layers.35.input_layernorm.weight": "model-00002-of-00002.safetensors",
328
+ "model.layers.35.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
329
+ "model.layers.35.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
330
+ "model.layers.35.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
331
+ "model.layers.35.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
332
+ "model.layers.35.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
333
+ "model.layers.35.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
334
+ "model.layers.35.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
335
+ "model.layers.35.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
336
+ "model.layers.35.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
337
+ "model.layers.35.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
338
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
339
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
340
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
341
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
342
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
343
+ "model.layers.4.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
344
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
345
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
346
+ "model.layers.4.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
347
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
348
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
349
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
350
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
351
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
352
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
353
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
354
+ "model.layers.5.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
355
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
356
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
357
+ "model.layers.5.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
358
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
359
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
360
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
361
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
362
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
363
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
364
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
365
+ "model.layers.6.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
366
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
367
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
368
+ "model.layers.6.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
369
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
370
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
371
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
372
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
373
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
374
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
375
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
376
+ "model.layers.7.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
377
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
378
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
379
+ "model.layers.7.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
380
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
381
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
382
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
383
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
384
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
385
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
386
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
387
+ "model.layers.8.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
388
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
389
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
390
+ "model.layers.8.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
391
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
392
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
393
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
394
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
395
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
396
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
397
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
398
+ "model.layers.9.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
399
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
400
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
401
+ "model.layers.9.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
402
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
403
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
404
+ "model.norm.weight": "model-00002-of-00002.safetensors"
405
+ }
406
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
3
+ size 11422654
tokenizer_config.json ADDED
@@ -0,0 +1,241 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "151665": {
182
+ "content": "<tool_response>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": false
188
+ },
189
+ "151666": {
190
+ "content": "</tool_response>",
191
+ "lstrip": false,
192
+ "normalized": false,
193
+ "rstrip": false,
194
+ "single_word": false,
195
+ "special": false
196
+ },
197
+ "151667": {
198
+ "content": "<think>",
199
+ "lstrip": false,
200
+ "normalized": false,
201
+ "rstrip": false,
202
+ "single_word": false,
203
+ "special": false
204
+ },
205
+ "151668": {
206
+ "content": "</think>",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": false,
211
+ "special": false
212
+ }
213
+ },
214
+ "additional_special_tokens": [
215
+ "<|im_start|>",
216
+ "<|im_end|>",
217
+ "<|object_ref_start|>",
218
+ "<|object_ref_end|>",
219
+ "<|box_start|>",
220
+ "<|box_end|>",
221
+ "<|quad_start|>",
222
+ "<|quad_end|>",
223
+ "<|vision_start|>",
224
+ "<|vision_end|>",
225
+ "<|vision_pad|>",
226
+ "<|image_pad|>",
227
+ "<|video_pad|>"
228
+ ],
229
+ "bos_token": null,
230
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0].role == 'system' %}\n {{- messages[0].content + '\\n\\n' }}\n {%- endif %}\n {{- \"# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0].role == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0].content + '<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- set ns = namespace(multi_step_tool=true, last_query_index=messages|length - 1) %}\n{%- for message in messages[::-1] %}\n {%- set index = (messages|length - 1) - loop.index0 %}\n {%- if ns.multi_step_tool and message.role == \"user\" and message.content is string and not(message.content.startswith('<tool_response>') and message.content.endswith('</tool_response>')) %}\n {%- set ns.multi_step_tool = false %}\n {%- set ns.last_query_index = index %}\n {%- endif %}\n{%- endfor %}\n{%- for message in messages %}\n {%- if message.content is string %}\n {%- set content = message.content %}\n {%- else %}\n {%- set content = '' %}\n {%- endif %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) %}\n {{- '<|im_start|>' + message.role + '\\n' + content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {%- set reasoning_content = '' %}\n {%- if message.reasoning_content is string %}\n {%- set reasoning_content = message.reasoning_content %}\n {%- else %}\n {%- if '</think>' in content %}\n {%- set reasoning_content = content.split('</think>')[0].rstrip('\\n').split('<think>')[-1].lstrip('\\n') %}\n {%- set content = content.split('</think>')[-1].lstrip('\\n') %}\n {%- endif %}\n {%- endif %}\n {%- if loop.index0 > ns.last_query_index %}\n {%- if loop.last or (not loop.last and reasoning_content) %}\n {{- '<|im_start|>' + message.role + '\\n<think>\\n' + reasoning_content.strip('\\n') + '\\n</think>\\n\\n' + content.lstrip('\\n') }}\n {%- else %}\n {{- '<|im_start|>' + message.role + '\\n' + content }}\n {%- endif %}\n {%- else %}\n {{- '<|im_start|>' + message.role + '\\n' + content }}\n {%- endif %}\n {%- if message.tool_calls %}\n {%- for tool_call in message.tool_calls %}\n {%- if (loop.first and content) or (not loop.first) %}\n {{- '\\n' }}\n {%- endif %}\n {%- if tool_call.function %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {%- if tool_call.arguments is string %}\n {{- tool_call.arguments }}\n {%- else %}\n {{- tool_call.arguments | tojson }}\n {%- endif %}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {%- endif %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if loop.first or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n {%- if enable_thinking is defined and enable_thinking is false %}\n {{- '<think>\\n\\n</think>\\n\\n' }}\n {%- endif %}\n{%- endif %}",
231
+ "clean_up_tokenization_spaces": false,
232
+ "eos_token": "<|im_end|>",
233
+ "errors": "replace",
234
+ "extra_special_tokens": {},
235
+ "model_max_length": 131072,
236
+ "pad_token": "<|endoftext|>",
237
+ "padding_side": "right",
238
+ "split_special_tokens": false,
239
+ "tokenizer_class": "Qwen2Tokenizer",
240
+ "unk_token": null
241
+ }
trainer_state.json ADDED
@@ -0,0 +1,2274 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 9.817204301075268,
6
+ "eval_steps": 500,
7
+ "global_step": 1600,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.030721966205837174,
14
+ "grad_norm": 5.377892017364502,
15
+ "learning_rate": 1.6460905349794242e-07,
16
+ "loss": 0.6829,
17
+ "step": 5
18
+ },
19
+ {
20
+ "epoch": 0.06144393241167435,
21
+ "grad_norm": 4.802157878875732,
22
+ "learning_rate": 3.7037037037037036e-07,
23
+ "loss": 0.6782,
24
+ "step": 10
25
+ },
26
+ {
27
+ "epoch": 0.09216589861751152,
28
+ "grad_norm": 4.367337226867676,
29
+ "learning_rate": 5.761316872427984e-07,
30
+ "loss": 0.6663,
31
+ "step": 15
32
+ },
33
+ {
34
+ "epoch": 0.1228878648233487,
35
+ "grad_norm": 3.747973918914795,
36
+ "learning_rate": 7.818930041152265e-07,
37
+ "loss": 0.655,
38
+ "step": 20
39
+ },
40
+ {
41
+ "epoch": 0.15360983102918588,
42
+ "grad_norm": 2.209118127822876,
43
+ "learning_rate": 9.876543209876544e-07,
44
+ "loss": 0.6173,
45
+ "step": 25
46
+ },
47
+ {
48
+ "epoch": 0.18433179723502305,
49
+ "grad_norm": 1.1678818464279175,
50
+ "learning_rate": 1.1934156378600823e-06,
51
+ "loss": 0.6048,
52
+ "step": 30
53
+ },
54
+ {
55
+ "epoch": 0.21505376344086022,
56
+ "grad_norm": 0.8436072468757629,
57
+ "learning_rate": 1.3991769547325104e-06,
58
+ "loss": 0.5856,
59
+ "step": 35
60
+ },
61
+ {
62
+ "epoch": 0.2457757296466974,
63
+ "grad_norm": 0.9035472273826599,
64
+ "learning_rate": 1.6049382716049383e-06,
65
+ "loss": 0.5731,
66
+ "step": 40
67
+ },
68
+ {
69
+ "epoch": 0.2764976958525346,
70
+ "grad_norm": 0.6879045963287354,
71
+ "learning_rate": 1.8106995884773665e-06,
72
+ "loss": 0.5672,
73
+ "step": 45
74
+ },
75
+ {
76
+ "epoch": 0.30721966205837176,
77
+ "grad_norm": 0.5572217106819153,
78
+ "learning_rate": 2.0164609053497946e-06,
79
+ "loss": 0.5652,
80
+ "step": 50
81
+ },
82
+ {
83
+ "epoch": 0.3379416282642089,
84
+ "grad_norm": 0.475868821144104,
85
+ "learning_rate": 2.222222222222222e-06,
86
+ "loss": 0.5569,
87
+ "step": 55
88
+ },
89
+ {
90
+ "epoch": 0.3686635944700461,
91
+ "grad_norm": 0.4037950932979584,
92
+ "learning_rate": 2.4279835390946504e-06,
93
+ "loss": 0.552,
94
+ "step": 60
95
+ },
96
+ {
97
+ "epoch": 0.39938556067588327,
98
+ "grad_norm": 0.3646543323993683,
99
+ "learning_rate": 2.6337448559670788e-06,
100
+ "loss": 0.5486,
101
+ "step": 65
102
+ },
103
+ {
104
+ "epoch": 0.43010752688172044,
105
+ "grad_norm": 0.3199547231197357,
106
+ "learning_rate": 2.8395061728395062e-06,
107
+ "loss": 0.548,
108
+ "step": 70
109
+ },
110
+ {
111
+ "epoch": 0.4608294930875576,
112
+ "grad_norm": 0.29529860615730286,
113
+ "learning_rate": 3.0452674897119346e-06,
114
+ "loss": 0.5412,
115
+ "step": 75
116
+ },
117
+ {
118
+ "epoch": 0.4915514592933948,
119
+ "grad_norm": 0.2877354025840759,
120
+ "learning_rate": 3.2510288065843625e-06,
121
+ "loss": 0.5384,
122
+ "step": 80
123
+ },
124
+ {
125
+ "epoch": 0.522273425499232,
126
+ "grad_norm": 0.27960506081581116,
127
+ "learning_rate": 3.4567901234567904e-06,
128
+ "loss": 0.5408,
129
+ "step": 85
130
+ },
131
+ {
132
+ "epoch": 0.5529953917050692,
133
+ "grad_norm": 0.3203478455543518,
134
+ "learning_rate": 3.6625514403292183e-06,
135
+ "loss": 0.5385,
136
+ "step": 90
137
+ },
138
+ {
139
+ "epoch": 0.5837173579109063,
140
+ "grad_norm": 0.293573260307312,
141
+ "learning_rate": 3.868312757201647e-06,
142
+ "loss": 0.5367,
143
+ "step": 95
144
+ },
145
+ {
146
+ "epoch": 0.6144393241167435,
147
+ "grad_norm": 0.2991442084312439,
148
+ "learning_rate": 4.074074074074074e-06,
149
+ "loss": 0.5318,
150
+ "step": 100
151
+ },
152
+ {
153
+ "epoch": 0.6451612903225806,
154
+ "grad_norm": 0.28697723150253296,
155
+ "learning_rate": 4.2798353909465025e-06,
156
+ "loss": 0.5325,
157
+ "step": 105
158
+ },
159
+ {
160
+ "epoch": 0.6758832565284179,
161
+ "grad_norm": 0.29979708790779114,
162
+ "learning_rate": 4.485596707818931e-06,
163
+ "loss": 0.5329,
164
+ "step": 110
165
+ },
166
+ {
167
+ "epoch": 0.706605222734255,
168
+ "grad_norm": 0.28516969084739685,
169
+ "learning_rate": 4.691358024691358e-06,
170
+ "loss": 0.5303,
171
+ "step": 115
172
+ },
173
+ {
174
+ "epoch": 0.7373271889400922,
175
+ "grad_norm": 0.2979312837123871,
176
+ "learning_rate": 4.897119341563787e-06,
177
+ "loss": 0.532,
178
+ "step": 120
179
+ },
180
+ {
181
+ "epoch": 0.7680491551459293,
182
+ "grad_norm": 0.29658472537994385,
183
+ "learning_rate": 5.102880658436214e-06,
184
+ "loss": 0.5361,
185
+ "step": 125
186
+ },
187
+ {
188
+ "epoch": 0.7987711213517665,
189
+ "grad_norm": 0.3170669972896576,
190
+ "learning_rate": 5.3086419753086425e-06,
191
+ "loss": 0.5307,
192
+ "step": 130
193
+ },
194
+ {
195
+ "epoch": 0.8294930875576036,
196
+ "grad_norm": 0.3079938590526581,
197
+ "learning_rate": 5.514403292181071e-06,
198
+ "loss": 0.5277,
199
+ "step": 135
200
+ },
201
+ {
202
+ "epoch": 0.8602150537634409,
203
+ "grad_norm": 0.33612361550331116,
204
+ "learning_rate": 5.720164609053498e-06,
205
+ "loss": 0.5298,
206
+ "step": 140
207
+ },
208
+ {
209
+ "epoch": 0.890937019969278,
210
+ "grad_norm": 0.3119032382965088,
211
+ "learning_rate": 5.925925925925926e-06,
212
+ "loss": 0.5248,
213
+ "step": 145
214
+ },
215
+ {
216
+ "epoch": 0.9216589861751152,
217
+ "grad_norm": 0.33814293146133423,
218
+ "learning_rate": 6.131687242798354e-06,
219
+ "loss": 0.5284,
220
+ "step": 150
221
+ },
222
+ {
223
+ "epoch": 0.9523809523809523,
224
+ "grad_norm": 0.322230726480484,
225
+ "learning_rate": 6.3374485596707825e-06,
226
+ "loss": 0.5281,
227
+ "step": 155
228
+ },
229
+ {
230
+ "epoch": 0.9831029185867896,
231
+ "grad_norm": 0.3144535720348358,
232
+ "learning_rate": 6.543209876543211e-06,
233
+ "loss": 0.5242,
234
+ "step": 160
235
+ },
236
+ {
237
+ "epoch": 1.012288786482335,
238
+ "grad_norm": 0.29515042901039124,
239
+ "learning_rate": 6.748971193415639e-06,
240
+ "loss": 0.5163,
241
+ "step": 165
242
+ },
243
+ {
244
+ "epoch": 1.043010752688172,
245
+ "grad_norm": 0.3272690176963806,
246
+ "learning_rate": 6.954732510288067e-06,
247
+ "loss": 0.5168,
248
+ "step": 170
249
+ },
250
+ {
251
+ "epoch": 1.0737327188940091,
252
+ "grad_norm": 0.38112202286720276,
253
+ "learning_rate": 7.160493827160494e-06,
254
+ "loss": 0.5175,
255
+ "step": 175
256
+ },
257
+ {
258
+ "epoch": 1.1044546850998465,
259
+ "grad_norm": 0.3627144992351532,
260
+ "learning_rate": 7.3662551440329225e-06,
261
+ "loss": 0.5146,
262
+ "step": 180
263
+ },
264
+ {
265
+ "epoch": 1.1351766513056836,
266
+ "grad_norm": 0.34759828448295593,
267
+ "learning_rate": 7.57201646090535e-06,
268
+ "loss": 0.5091,
269
+ "step": 185
270
+ },
271
+ {
272
+ "epoch": 1.1658986175115207,
273
+ "grad_norm": 0.32023346424102783,
274
+ "learning_rate": 7.77777777777778e-06,
275
+ "loss": 0.5154,
276
+ "step": 190
277
+ },
278
+ {
279
+ "epoch": 1.1966205837173578,
280
+ "grad_norm": 0.34595441818237305,
281
+ "learning_rate": 7.983539094650207e-06,
282
+ "loss": 0.514,
283
+ "step": 195
284
+ },
285
+ {
286
+ "epoch": 1.2273425499231951,
287
+ "grad_norm": 0.35786375403404236,
288
+ "learning_rate": 8.189300411522634e-06,
289
+ "loss": 0.515,
290
+ "step": 200
291
+ },
292
+ {
293
+ "epoch": 1.2580645161290323,
294
+ "grad_norm": 0.3521522283554077,
295
+ "learning_rate": 8.395061728395062e-06,
296
+ "loss": 0.5096,
297
+ "step": 205
298
+ },
299
+ {
300
+ "epoch": 1.2887864823348694,
301
+ "grad_norm": 0.3382728397846222,
302
+ "learning_rate": 8.60082304526749e-06,
303
+ "loss": 0.5092,
304
+ "step": 210
305
+ },
306
+ {
307
+ "epoch": 1.3195084485407067,
308
+ "grad_norm": 0.3584599494934082,
309
+ "learning_rate": 8.806584362139918e-06,
310
+ "loss": 0.5088,
311
+ "step": 215
312
+ },
313
+ {
314
+ "epoch": 1.3502304147465438,
315
+ "grad_norm": 0.3886154294013977,
316
+ "learning_rate": 9.012345679012346e-06,
317
+ "loss": 0.5103,
318
+ "step": 220
319
+ },
320
+ {
321
+ "epoch": 1.380952380952381,
322
+ "grad_norm": 0.35392460227012634,
323
+ "learning_rate": 9.218106995884775e-06,
324
+ "loss": 0.5122,
325
+ "step": 225
326
+ },
327
+ {
328
+ "epoch": 1.411674347158218,
329
+ "grad_norm": 0.3449483811855316,
330
+ "learning_rate": 9.423868312757202e-06,
331
+ "loss": 0.5101,
332
+ "step": 230
333
+ },
334
+ {
335
+ "epoch": 1.4423963133640554,
336
+ "grad_norm": 0.40504640340805054,
337
+ "learning_rate": 9.62962962962963e-06,
338
+ "loss": 0.5079,
339
+ "step": 235
340
+ },
341
+ {
342
+ "epoch": 1.4731182795698925,
343
+ "grad_norm": 0.3839814066886902,
344
+ "learning_rate": 9.835390946502057e-06,
345
+ "loss": 0.5075,
346
+ "step": 240
347
+ },
348
+ {
349
+ "epoch": 1.5038402457757296,
350
+ "grad_norm": 0.3998360335826874,
351
+ "learning_rate": 9.999994841278135e-06,
352
+ "loss": 0.5117,
353
+ "step": 245
354
+ },
355
+ {
356
+ "epoch": 1.5345622119815667,
357
+ "grad_norm": 0.3241407573223114,
358
+ "learning_rate": 9.99981428713058e-06,
359
+ "loss": 0.5116,
360
+ "step": 250
361
+ },
362
+ {
363
+ "epoch": 1.565284178187404,
364
+ "grad_norm": 0.3408064544200897,
365
+ "learning_rate": 9.999375807534642e-06,
366
+ "loss": 0.5086,
367
+ "step": 255
368
+ },
369
+ {
370
+ "epoch": 1.5960061443932412,
371
+ "grad_norm": 0.3956799805164337,
372
+ "learning_rate": 9.998679425110168e-06,
373
+ "loss": 0.5057,
374
+ "step": 260
375
+ },
376
+ {
377
+ "epoch": 1.6267281105990783,
378
+ "grad_norm": 0.34674304723739624,
379
+ "learning_rate": 9.997725175781445e-06,
380
+ "loss": 0.5042,
381
+ "step": 265
382
+ },
383
+ {
384
+ "epoch": 1.6574500768049156,
385
+ "grad_norm": 0.33803871273994446,
386
+ "learning_rate": 9.996513108775338e-06,
387
+ "loss": 0.5094,
388
+ "step": 270
389
+ },
390
+ {
391
+ "epoch": 1.6881720430107527,
392
+ "grad_norm": 0.3286557197570801,
393
+ "learning_rate": 9.995043286618752e-06,
394
+ "loss": 0.5082,
395
+ "step": 275
396
+ },
397
+ {
398
+ "epoch": 1.7188940092165899,
399
+ "grad_norm": 0.4859721064567566,
400
+ "learning_rate": 9.993315785135417e-06,
401
+ "loss": 0.5062,
402
+ "step": 280
403
+ },
404
+ {
405
+ "epoch": 1.7496159754224272,
406
+ "grad_norm": 0.39187705516815186,
407
+ "learning_rate": 9.991330693441956e-06,
408
+ "loss": 0.5004,
409
+ "step": 285
410
+ },
411
+ {
412
+ "epoch": 1.780337941628264,
413
+ "grad_norm": 0.3706142008304596,
414
+ "learning_rate": 9.989088113943309e-06,
415
+ "loss": 0.5074,
416
+ "step": 290
417
+ },
418
+ {
419
+ "epoch": 1.8110599078341014,
420
+ "grad_norm": 0.36376601457595825,
421
+ "learning_rate": 9.986588162327436e-06,
422
+ "loss": 0.5043,
423
+ "step": 295
424
+ },
425
+ {
426
+ "epoch": 1.8417818740399385,
427
+ "grad_norm": 0.3372829854488373,
428
+ "learning_rate": 9.983830967559355e-06,
429
+ "loss": 0.505,
430
+ "step": 300
431
+ },
432
+ {
433
+ "epoch": 1.8725038402457757,
434
+ "grad_norm": 0.3605220913887024,
435
+ "learning_rate": 9.98081667187449e-06,
436
+ "loss": 0.506,
437
+ "step": 305
438
+ },
439
+ {
440
+ "epoch": 1.903225806451613,
441
+ "grad_norm": 0.37473252415657043,
442
+ "learning_rate": 9.977545430771332e-06,
443
+ "loss": 0.5065,
444
+ "step": 310
445
+ },
446
+ {
447
+ "epoch": 1.93394777265745,
448
+ "grad_norm": 0.3622889816761017,
449
+ "learning_rate": 9.974017413003407e-06,
450
+ "loss": 0.5049,
451
+ "step": 315
452
+ },
453
+ {
454
+ "epoch": 1.9646697388632872,
455
+ "grad_norm": 0.36003556847572327,
456
+ "learning_rate": 9.970232800570594e-06,
457
+ "loss": 0.5042,
458
+ "step": 320
459
+ },
460
+ {
461
+ "epoch": 1.9953917050691246,
462
+ "grad_norm": 0.35878923535346985,
463
+ "learning_rate": 9.966191788709716e-06,
464
+ "loss": 0.498,
465
+ "step": 325
466
+ },
467
+ {
468
+ "epoch": 2.02457757296467,
469
+ "grad_norm": 0.3277081847190857,
470
+ "learning_rate": 9.961894585884472e-06,
471
+ "loss": 0.4833,
472
+ "step": 330
473
+ },
474
+ {
475
+ "epoch": 2.055299539170507,
476
+ "grad_norm": 0.35245636105537415,
477
+ "learning_rate": 9.957341413774693e-06,
478
+ "loss": 0.4823,
479
+ "step": 335
480
+ },
481
+ {
482
+ "epoch": 2.086021505376344,
483
+ "grad_norm": 0.3628138601779938,
484
+ "learning_rate": 9.952532507264892e-06,
485
+ "loss": 0.4789,
486
+ "step": 340
487
+ },
488
+ {
489
+ "epoch": 2.1167434715821813,
490
+ "grad_norm": 0.36662936210632324,
491
+ "learning_rate": 9.947468114432156e-06,
492
+ "loss": 0.4876,
493
+ "step": 345
494
+ },
495
+ {
496
+ "epoch": 2.1474654377880182,
497
+ "grad_norm": 0.3806234896183014,
498
+ "learning_rate": 9.942148496533348e-06,
499
+ "loss": 0.4797,
500
+ "step": 350
501
+ },
502
+ {
503
+ "epoch": 2.1781874039938556,
504
+ "grad_norm": 0.3836243152618408,
505
+ "learning_rate": 9.936573927991631e-06,
506
+ "loss": 0.4823,
507
+ "step": 355
508
+ },
509
+ {
510
+ "epoch": 2.208909370199693,
511
+ "grad_norm": 0.3716926872730255,
512
+ "learning_rate": 9.930744696382298e-06,
513
+ "loss": 0.4846,
514
+ "step": 360
515
+ },
516
+ {
517
+ "epoch": 2.23963133640553,
518
+ "grad_norm": 0.3589572608470917,
519
+ "learning_rate": 9.924661102417959e-06,
520
+ "loss": 0.4794,
521
+ "step": 365
522
+ },
523
+ {
524
+ "epoch": 2.270353302611367,
525
+ "grad_norm": 0.44799497723579407,
526
+ "learning_rate": 9.918323459933006e-06,
527
+ "loss": 0.4849,
528
+ "step": 370
529
+ },
530
+ {
531
+ "epoch": 2.3010752688172045,
532
+ "grad_norm": 0.35237064957618713,
533
+ "learning_rate": 9.911732095867443e-06,
534
+ "loss": 0.4819,
535
+ "step": 375
536
+ },
537
+ {
538
+ "epoch": 2.3317972350230414,
539
+ "grad_norm": 0.3844442665576935,
540
+ "learning_rate": 9.904887350250002e-06,
541
+ "loss": 0.4828,
542
+ "step": 380
543
+ },
544
+ {
545
+ "epoch": 2.3625192012288787,
546
+ "grad_norm": 0.34357205033302307,
547
+ "learning_rate": 9.897789576180617e-06,
548
+ "loss": 0.4795,
549
+ "step": 385
550
+ },
551
+ {
552
+ "epoch": 2.3932411674347156,
553
+ "grad_norm": 0.34739232063293457,
554
+ "learning_rate": 9.8904391398122e-06,
555
+ "loss": 0.4817,
556
+ "step": 390
557
+ },
558
+ {
559
+ "epoch": 2.423963133640553,
560
+ "grad_norm": 0.3262459337711334,
561
+ "learning_rate": 9.882836420331753e-06,
562
+ "loss": 0.4807,
563
+ "step": 395
564
+ },
565
+ {
566
+ "epoch": 2.4546850998463903,
567
+ "grad_norm": 0.32715994119644165,
568
+ "learning_rate": 9.87498180994081e-06,
569
+ "loss": 0.4825,
570
+ "step": 400
571
+ },
572
+ {
573
+ "epoch": 2.485407066052227,
574
+ "grad_norm": 0.3524874150753021,
575
+ "learning_rate": 9.8668757138352e-06,
576
+ "loss": 0.4832,
577
+ "step": 405
578
+ },
579
+ {
580
+ "epoch": 2.5161290322580645,
581
+ "grad_norm": 0.3556855618953705,
582
+ "learning_rate": 9.858518550184154e-06,
583
+ "loss": 0.4784,
584
+ "step": 410
585
+ },
586
+ {
587
+ "epoch": 2.546850998463902,
588
+ "grad_norm": 0.350763201713562,
589
+ "learning_rate": 9.849910750108718e-06,
590
+ "loss": 0.4796,
591
+ "step": 415
592
+ },
593
+ {
594
+ "epoch": 2.5775729646697387,
595
+ "grad_norm": 0.40554359555244446,
596
+ "learning_rate": 9.841052757659525e-06,
597
+ "loss": 0.4795,
598
+ "step": 420
599
+ },
600
+ {
601
+ "epoch": 2.608294930875576,
602
+ "grad_norm": 0.38155123591423035,
603
+ "learning_rate": 9.831945029793884e-06,
604
+ "loss": 0.4824,
605
+ "step": 425
606
+ },
607
+ {
608
+ "epoch": 2.6390168970814134,
609
+ "grad_norm": 0.34588319063186646,
610
+ "learning_rate": 9.822588036352201e-06,
611
+ "loss": 0.4812,
612
+ "step": 430
613
+ },
614
+ {
615
+ "epoch": 2.6697388632872503,
616
+ "grad_norm": 0.3738536536693573,
617
+ "learning_rate": 9.812982260033753e-06,
618
+ "loss": 0.4776,
619
+ "step": 435
620
+ },
621
+ {
622
+ "epoch": 2.7004608294930876,
623
+ "grad_norm": 0.34988853335380554,
624
+ "learning_rate": 9.803128196371778e-06,
625
+ "loss": 0.4827,
626
+ "step": 440
627
+ },
628
+ {
629
+ "epoch": 2.731182795698925,
630
+ "grad_norm": 0.3567947447299957,
631
+ "learning_rate": 9.793026353707915e-06,
632
+ "loss": 0.4824,
633
+ "step": 445
634
+ },
635
+ {
636
+ "epoch": 2.761904761904762,
637
+ "grad_norm": 0.3680736720561981,
638
+ "learning_rate": 9.782677253165979e-06,
639
+ "loss": 0.4817,
640
+ "step": 450
641
+ },
642
+ {
643
+ "epoch": 2.792626728110599,
644
+ "grad_norm": 0.3302510380744934,
645
+ "learning_rate": 9.77208142862508e-06,
646
+ "loss": 0.4799,
647
+ "step": 455
648
+ },
649
+ {
650
+ "epoch": 2.823348694316436,
651
+ "grad_norm": 0.3362921178340912,
652
+ "learning_rate": 9.761239426692077e-06,
653
+ "loss": 0.4792,
654
+ "step": 460
655
+ },
656
+ {
657
+ "epoch": 2.8540706605222734,
658
+ "grad_norm": 0.3084135949611664,
659
+ "learning_rate": 9.750151806673389e-06,
660
+ "loss": 0.4798,
661
+ "step": 465
662
+ },
663
+ {
664
+ "epoch": 2.8847926267281108,
665
+ "grad_norm": 0.3991258442401886,
666
+ "learning_rate": 9.738819140546135e-06,
667
+ "loss": 0.4776,
668
+ "step": 470
669
+ },
670
+ {
671
+ "epoch": 2.9155145929339477,
672
+ "grad_norm": 0.3840397298336029,
673
+ "learning_rate": 9.727242012928622e-06,
674
+ "loss": 0.4827,
675
+ "step": 475
676
+ },
677
+ {
678
+ "epoch": 2.946236559139785,
679
+ "grad_norm": 0.3366018235683441,
680
+ "learning_rate": 9.715421021050205e-06,
681
+ "loss": 0.478,
682
+ "step": 480
683
+ },
684
+ {
685
+ "epoch": 2.976958525345622,
686
+ "grad_norm": 0.3289054036140442,
687
+ "learning_rate": 9.703356774720454e-06,
688
+ "loss": 0.4806,
689
+ "step": 485
690
+ },
691
+ {
692
+ "epoch": 3.0061443932411676,
693
+ "grad_norm": 0.41949138045310974,
694
+ "learning_rate": 9.69104989629772e-06,
695
+ "loss": 0.474,
696
+ "step": 490
697
+ },
698
+ {
699
+ "epoch": 3.0368663594470044,
700
+ "grad_norm": 0.3739219605922699,
701
+ "learning_rate": 9.678501020657008e-06,
702
+ "loss": 0.4555,
703
+ "step": 495
704
+ },
705
+ {
706
+ "epoch": 3.067588325652842,
707
+ "grad_norm": 0.3918289244174957,
708
+ "learning_rate": 9.665710795157236e-06,
709
+ "loss": 0.4559,
710
+ "step": 500
711
+ },
712
+ {
713
+ "epoch": 3.098310291858679,
714
+ "grad_norm": 0.37078753113746643,
715
+ "learning_rate": 9.652679879607843e-06,
716
+ "loss": 0.4523,
717
+ "step": 505
718
+ },
719
+ {
720
+ "epoch": 3.129032258064516,
721
+ "grad_norm": 0.39428192377090454,
722
+ "learning_rate": 9.639408946234745e-06,
723
+ "loss": 0.455,
724
+ "step": 510
725
+ },
726
+ {
727
+ "epoch": 3.1597542242703534,
728
+ "grad_norm": 0.36103686690330505,
729
+ "learning_rate": 9.625898679645656e-06,
730
+ "loss": 0.4539,
731
+ "step": 515
732
+ },
733
+ {
734
+ "epoch": 3.1904761904761907,
735
+ "grad_norm": 0.3871241807937622,
736
+ "learning_rate": 9.612149776794776e-06,
737
+ "loss": 0.4585,
738
+ "step": 520
739
+ },
740
+ {
741
+ "epoch": 3.2211981566820276,
742
+ "grad_norm": 0.3608538508415222,
743
+ "learning_rate": 9.59816294694684e-06,
744
+ "loss": 0.4545,
745
+ "step": 525
746
+ },
747
+ {
748
+ "epoch": 3.251920122887865,
749
+ "grad_norm": 0.33820873498916626,
750
+ "learning_rate": 9.583938911640513e-06,
751
+ "loss": 0.4581,
752
+ "step": 530
753
+ },
754
+ {
755
+ "epoch": 3.282642089093702,
756
+ "grad_norm": 0.3311152160167694,
757
+ "learning_rate": 9.569478404651192e-06,
758
+ "loss": 0.4572,
759
+ "step": 535
760
+ },
761
+ {
762
+ "epoch": 3.313364055299539,
763
+ "grad_norm": 0.3974754512310028,
764
+ "learning_rate": 9.55478217195313e-06,
765
+ "loss": 0.4579,
766
+ "step": 540
767
+ },
768
+ {
769
+ "epoch": 3.3440860215053765,
770
+ "grad_norm": 0.36764049530029297,
771
+ "learning_rate": 9.53985097168097e-06,
772
+ "loss": 0.4548,
773
+ "step": 545
774
+ },
775
+ {
776
+ "epoch": 3.3748079877112134,
777
+ "grad_norm": 0.3310830295085907,
778
+ "learning_rate": 9.524685574090627e-06,
779
+ "loss": 0.4596,
780
+ "step": 550
781
+ },
782
+ {
783
+ "epoch": 3.4055299539170507,
784
+ "grad_norm": 0.35807356238365173,
785
+ "learning_rate": 9.50928676151955e-06,
786
+ "loss": 0.4561,
787
+ "step": 555
788
+ },
789
+ {
790
+ "epoch": 3.436251920122888,
791
+ "grad_norm": 0.3509482741355896,
792
+ "learning_rate": 9.493655328346378e-06,
793
+ "loss": 0.4601,
794
+ "step": 560
795
+ },
796
+ {
797
+ "epoch": 3.466973886328725,
798
+ "grad_norm": 0.32899123430252075,
799
+ "learning_rate": 9.477792080949938e-06,
800
+ "loss": 0.458,
801
+ "step": 565
802
+ },
803
+ {
804
+ "epoch": 3.4976958525345623,
805
+ "grad_norm": 0.3493783473968506,
806
+ "learning_rate": 9.461697837667668e-06,
807
+ "loss": 0.4578,
808
+ "step": 570
809
+ },
810
+ {
811
+ "epoch": 3.528417818740399,
812
+ "grad_norm": 0.42410966753959656,
813
+ "learning_rate": 9.445373428753386e-06,
814
+ "loss": 0.457,
815
+ "step": 575
816
+ },
817
+ {
818
+ "epoch": 3.5591397849462365,
819
+ "grad_norm": 0.39236894249916077,
820
+ "learning_rate": 9.42881969633447e-06,
821
+ "loss": 0.4621,
822
+ "step": 580
823
+ },
824
+ {
825
+ "epoch": 3.589861751152074,
826
+ "grad_norm": 0.3428690433502197,
827
+ "learning_rate": 9.412037494368412e-06,
828
+ "loss": 0.4613,
829
+ "step": 585
830
+ },
831
+ {
832
+ "epoch": 3.6205837173579107,
833
+ "grad_norm": 0.3500923216342926,
834
+ "learning_rate": 9.395027688598756e-06,
835
+ "loss": 0.4553,
836
+ "step": 590
837
+ },
838
+ {
839
+ "epoch": 3.651305683563748,
840
+ "grad_norm": 0.3521360456943512,
841
+ "learning_rate": 9.377791156510456e-06,
842
+ "loss": 0.4609,
843
+ "step": 595
844
+ },
845
+ {
846
+ "epoch": 3.6820276497695854,
847
+ "grad_norm": 0.3520371615886688,
848
+ "learning_rate": 9.360328787284587e-06,
849
+ "loss": 0.4561,
850
+ "step": 600
851
+ },
852
+ {
853
+ "epoch": 3.7127496159754223,
854
+ "grad_norm": 0.37317851185798645,
855
+ "learning_rate": 9.342641481752492e-06,
856
+ "loss": 0.4543,
857
+ "step": 605
858
+ },
859
+ {
860
+ "epoch": 3.7434715821812596,
861
+ "grad_norm": 0.3881990313529968,
862
+ "learning_rate": 9.324730152349305e-06,
863
+ "loss": 0.4573,
864
+ "step": 610
865
+ },
866
+ {
867
+ "epoch": 3.774193548387097,
868
+ "grad_norm": 0.34541720151901245,
869
+ "learning_rate": 9.306595723066878e-06,
870
+ "loss": 0.4588,
871
+ "step": 615
872
+ },
873
+ {
874
+ "epoch": 3.804915514592934,
875
+ "grad_norm": 0.6174806356430054,
876
+ "learning_rate": 9.28823912940612e-06,
877
+ "loss": 0.4615,
878
+ "step": 620
879
+ },
880
+ {
881
+ "epoch": 3.835637480798771,
882
+ "grad_norm": 0.37580618262290955,
883
+ "learning_rate": 9.26966131832873e-06,
884
+ "loss": 0.4603,
885
+ "step": 625
886
+ },
887
+ {
888
+ "epoch": 3.8663594470046085,
889
+ "grad_norm": 0.3373568058013916,
890
+ "learning_rate": 9.250863248208357e-06,
891
+ "loss": 0.4575,
892
+ "step": 630
893
+ },
894
+ {
895
+ "epoch": 3.8970814132104454,
896
+ "grad_norm": 0.3492389917373657,
897
+ "learning_rate": 9.231845888781153e-06,
898
+ "loss": 0.457,
899
+ "step": 635
900
+ },
901
+ {
902
+ "epoch": 3.9278033794162828,
903
+ "grad_norm": 0.353481262922287,
904
+ "learning_rate": 9.212610221095748e-06,
905
+ "loss": 0.4593,
906
+ "step": 640
907
+ },
908
+ {
909
+ "epoch": 3.9585253456221197,
910
+ "grad_norm": 0.339603066444397,
911
+ "learning_rate": 9.193157237462642e-06,
912
+ "loss": 0.4583,
913
+ "step": 645
914
+ },
915
+ {
916
+ "epoch": 3.989247311827957,
917
+ "grad_norm": 0.35986068844795227,
918
+ "learning_rate": 9.173487941403011e-06,
919
+ "loss": 0.4575,
920
+ "step": 650
921
+ },
922
+ {
923
+ "epoch": 4.018433179723503,
924
+ "grad_norm": 0.39629873633384705,
925
+ "learning_rate": 9.153603347596946e-06,
926
+ "loss": 0.4437,
927
+ "step": 655
928
+ },
929
+ {
930
+ "epoch": 4.04915514592934,
931
+ "grad_norm": 0.38085299730300903,
932
+ "learning_rate": 9.133504481831103e-06,
933
+ "loss": 0.4315,
934
+ "step": 660
935
+ },
936
+ {
937
+ "epoch": 4.0798771121351765,
938
+ "grad_norm": 0.375144898891449,
939
+ "learning_rate": 9.113192380945783e-06,
940
+ "loss": 0.4332,
941
+ "step": 665
942
+ },
943
+ {
944
+ "epoch": 4.110599078341014,
945
+ "grad_norm": 0.3690689206123352,
946
+ "learning_rate": 9.092668092781454e-06,
947
+ "loss": 0.4286,
948
+ "step": 670
949
+ },
950
+ {
951
+ "epoch": 4.141321044546851,
952
+ "grad_norm": 0.3713686764240265,
953
+ "learning_rate": 9.071932676124686e-06,
954
+ "loss": 0.4321,
955
+ "step": 675
956
+ },
957
+ {
958
+ "epoch": 4.172043010752688,
959
+ "grad_norm": 0.37255361676216125,
960
+ "learning_rate": 9.050987200653538e-06,
961
+ "loss": 0.4308,
962
+ "step": 680
963
+ },
964
+ {
965
+ "epoch": 4.202764976958525,
966
+ "grad_norm": 0.4153440296649933,
967
+ "learning_rate": 9.029832746882372e-06,
968
+ "loss": 0.434,
969
+ "step": 685
970
+ },
971
+ {
972
+ "epoch": 4.233486943164363,
973
+ "grad_norm": 0.3848015367984772,
974
+ "learning_rate": 9.008470406106118e-06,
975
+ "loss": 0.4321,
976
+ "step": 690
977
+ },
978
+ {
979
+ "epoch": 4.2642089093702,
980
+ "grad_norm": 0.38491949439048767,
981
+ "learning_rate": 8.986901280343973e-06,
982
+ "loss": 0.437,
983
+ "step": 695
984
+ },
985
+ {
986
+ "epoch": 4.2949308755760365,
987
+ "grad_norm": 0.40272125601768494,
988
+ "learning_rate": 8.96512648228255e-06,
989
+ "loss": 0.4327,
990
+ "step": 700
991
+ },
992
+ {
993
+ "epoch": 4.325652841781874,
994
+ "grad_norm": 0.40901532769203186,
995
+ "learning_rate": 8.943147135218482e-06,
996
+ "loss": 0.4355,
997
+ "step": 705
998
+ },
999
+ {
1000
+ "epoch": 4.356374807987711,
1001
+ "grad_norm": 0.37816864252090454,
1002
+ "learning_rate": 8.920964373000474e-06,
1003
+ "loss": 0.4309,
1004
+ "step": 710
1005
+ },
1006
+ {
1007
+ "epoch": 4.387096774193548,
1008
+ "grad_norm": 0.3686360716819763,
1009
+ "learning_rate": 8.898579339970806e-06,
1010
+ "loss": 0.4333,
1011
+ "step": 715
1012
+ },
1013
+ {
1014
+ "epoch": 4.417818740399386,
1015
+ "grad_norm": 0.3904341161251068,
1016
+ "learning_rate": 8.875993190906309e-06,
1017
+ "loss": 0.436,
1018
+ "step": 720
1019
+ },
1020
+ {
1021
+ "epoch": 4.448540706605223,
1022
+ "grad_norm": 0.369642049074173,
1023
+ "learning_rate": 8.85320709095878e-06,
1024
+ "loss": 0.4393,
1025
+ "step": 725
1026
+ },
1027
+ {
1028
+ "epoch": 4.47926267281106,
1029
+ "grad_norm": 0.39105841517448425,
1030
+ "learning_rate": 8.83022221559489e-06,
1031
+ "loss": 0.4335,
1032
+ "step": 730
1033
+ },
1034
+ {
1035
+ "epoch": 4.509984639016897,
1036
+ "grad_norm": 0.35647451877593994,
1037
+ "learning_rate": 8.80703975053554e-06,
1038
+ "loss": 0.4365,
1039
+ "step": 735
1040
+ },
1041
+ {
1042
+ "epoch": 4.540706605222734,
1043
+ "grad_norm": 0.37886905670166016,
1044
+ "learning_rate": 8.783660891694683e-06,
1045
+ "loss": 0.4358,
1046
+ "step": 740
1047
+ },
1048
+ {
1049
+ "epoch": 4.571428571428571,
1050
+ "grad_norm": 0.33613675832748413,
1051
+ "learning_rate": 8.760086845117648e-06,
1052
+ "loss": 0.4339,
1053
+ "step": 745
1054
+ },
1055
+ {
1056
+ "epoch": 4.602150537634409,
1057
+ "grad_norm": 0.3609409034252167,
1058
+ "learning_rate": 8.736318826918909e-06,
1059
+ "loss": 0.4367,
1060
+ "step": 750
1061
+ },
1062
+ {
1063
+ "epoch": 4.632872503840246,
1064
+ "grad_norm": 0.3324005603790283,
1065
+ "learning_rate": 8.71235806321936e-06,
1066
+ "loss": 0.4368,
1067
+ "step": 755
1068
+ },
1069
+ {
1070
+ "epoch": 4.663594470046083,
1071
+ "grad_norm": 0.34170496463775635,
1072
+ "learning_rate": 8.688205790083053e-06,
1073
+ "loss": 0.4364,
1074
+ "step": 760
1075
+ },
1076
+ {
1077
+ "epoch": 4.6943164362519205,
1078
+ "grad_norm": 0.3765306770801544,
1079
+ "learning_rate": 8.663863253453444e-06,
1080
+ "loss": 0.4381,
1081
+ "step": 765
1082
+ },
1083
+ {
1084
+ "epoch": 4.725038402457757,
1085
+ "grad_norm": 0.3638916611671448,
1086
+ "learning_rate": 8.639331709089107e-06,
1087
+ "loss": 0.438,
1088
+ "step": 770
1089
+ },
1090
+ {
1091
+ "epoch": 4.755760368663594,
1092
+ "grad_norm": 0.3378274738788605,
1093
+ "learning_rate": 8.614612422498965e-06,
1094
+ "loss": 0.4396,
1095
+ "step": 775
1096
+ },
1097
+ {
1098
+ "epoch": 4.786482334869431,
1099
+ "grad_norm": 0.3760294020175934,
1100
+ "learning_rate": 8.589706668876995e-06,
1101
+ "loss": 0.4387,
1102
+ "step": 780
1103
+ },
1104
+ {
1105
+ "epoch": 4.817204301075269,
1106
+ "grad_norm": 0.3364088535308838,
1107
+ "learning_rate": 8.564615733036457e-06,
1108
+ "loss": 0.4388,
1109
+ "step": 785
1110
+ },
1111
+ {
1112
+ "epoch": 4.847926267281106,
1113
+ "grad_norm": 0.3584051728248596,
1114
+ "learning_rate": 8.539340909343597e-06,
1115
+ "loss": 0.4355,
1116
+ "step": 790
1117
+ },
1118
+ {
1119
+ "epoch": 4.878648233486944,
1120
+ "grad_norm": 0.3589382469654083,
1121
+ "learning_rate": 8.513883501650892e-06,
1122
+ "loss": 0.4393,
1123
+ "step": 795
1124
+ },
1125
+ {
1126
+ "epoch": 4.9093701996927805,
1127
+ "grad_norm": 0.362913578748703,
1128
+ "learning_rate": 8.488244823229781e-06,
1129
+ "loss": 0.4391,
1130
+ "step": 800
1131
+ },
1132
+ {
1133
+ "epoch": 4.940092165898617,
1134
+ "grad_norm": 0.38569971919059753,
1135
+ "learning_rate": 8.462426196702912e-06,
1136
+ "loss": 0.44,
1137
+ "step": 805
1138
+ },
1139
+ {
1140
+ "epoch": 4.970814132104454,
1141
+ "grad_norm": 0.7799672484397888,
1142
+ "learning_rate": 8.436428953975921e-06,
1143
+ "loss": 0.4402,
1144
+ "step": 810
1145
+ },
1146
+ {
1147
+ "epoch": 5.0,
1148
+ "grad_norm": 0.35950392484664917,
1149
+ "learning_rate": 8.41025443616872e-06,
1150
+ "loss": 0.4385,
1151
+ "step": 815
1152
+ },
1153
+ {
1154
+ "epoch": 5.030721966205837,
1155
+ "grad_norm": 0.434950053691864,
1156
+ "learning_rate": 8.38390399354631e-06,
1157
+ "loss": 0.4124,
1158
+ "step": 820
1159
+ },
1160
+ {
1161
+ "epoch": 5.061443932411675,
1162
+ "grad_norm": 0.38890355825424194,
1163
+ "learning_rate": 8.357378985449124e-06,
1164
+ "loss": 0.4077,
1165
+ "step": 825
1166
+ },
1167
+ {
1168
+ "epoch": 5.092165898617512,
1169
+ "grad_norm": 0.3747502267360687,
1170
+ "learning_rate": 8.330680780222907e-06,
1171
+ "loss": 0.4116,
1172
+ "step": 830
1173
+ },
1174
+ {
1175
+ "epoch": 5.1228878648233485,
1176
+ "grad_norm": 0.4041999578475952,
1177
+ "learning_rate": 8.303810755148127e-06,
1178
+ "loss": 0.4125,
1179
+ "step": 835
1180
+ },
1181
+ {
1182
+ "epoch": 5.153609831029186,
1183
+ "grad_norm": 0.8506478667259216,
1184
+ "learning_rate": 8.276770296368922e-06,
1185
+ "loss": 0.4086,
1186
+ "step": 840
1187
+ },
1188
+ {
1189
+ "epoch": 5.184331797235023,
1190
+ "grad_norm": 0.43535116314888,
1191
+ "learning_rate": 8.249560798821592e-06,
1192
+ "loss": 0.4118,
1193
+ "step": 845
1194
+ },
1195
+ {
1196
+ "epoch": 5.21505376344086,
1197
+ "grad_norm": 0.4166457951068878,
1198
+ "learning_rate": 8.222183666162647e-06,
1199
+ "loss": 0.41,
1200
+ "step": 850
1201
+ },
1202
+ {
1203
+ "epoch": 5.245775729646698,
1204
+ "grad_norm": 0.3790026009082794,
1205
+ "learning_rate": 8.194640310696383e-06,
1206
+ "loss": 0.4131,
1207
+ "step": 855
1208
+ },
1209
+ {
1210
+ "epoch": 5.276497695852535,
1211
+ "grad_norm": 0.4068205654621124,
1212
+ "learning_rate": 8.16693215330204e-06,
1213
+ "loss": 0.4149,
1214
+ "step": 860
1215
+ },
1216
+ {
1217
+ "epoch": 5.307219662058372,
1218
+ "grad_norm": 0.40233853459358215,
1219
+ "learning_rate": 8.139060623360494e-06,
1220
+ "loss": 0.414,
1221
+ "step": 865
1222
+ },
1223
+ {
1224
+ "epoch": 5.337941628264209,
1225
+ "grad_norm": 0.4058436155319214,
1226
+ "learning_rate": 8.111027158680516e-06,
1227
+ "loss": 0.4128,
1228
+ "step": 870
1229
+ },
1230
+ {
1231
+ "epoch": 5.368663594470046,
1232
+ "grad_norm": 0.35581091046333313,
1233
+ "learning_rate": 8.082833205424614e-06,
1234
+ "loss": 0.412,
1235
+ "step": 875
1236
+ },
1237
+ {
1238
+ "epoch": 5.399385560675883,
1239
+ "grad_norm": 0.39174729585647583,
1240
+ "learning_rate": 8.054480218034415e-06,
1241
+ "loss": 0.4127,
1242
+ "step": 880
1243
+ },
1244
+ {
1245
+ "epoch": 5.43010752688172,
1246
+ "grad_norm": 0.4122447371482849,
1247
+ "learning_rate": 8.02596965915564e-06,
1248
+ "loss": 0.4143,
1249
+ "step": 885
1250
+ },
1251
+ {
1252
+ "epoch": 5.460829493087558,
1253
+ "grad_norm": 0.37394076585769653,
1254
+ "learning_rate": 7.997302999562657e-06,
1255
+ "loss": 0.4165,
1256
+ "step": 890
1257
+ },
1258
+ {
1259
+ "epoch": 5.491551459293395,
1260
+ "grad_norm": 0.38974493741989136,
1261
+ "learning_rate": 7.968481718082601e-06,
1262
+ "loss": 0.4158,
1263
+ "step": 895
1264
+ },
1265
+ {
1266
+ "epoch": 5.522273425499232,
1267
+ "grad_norm": 0.3667392134666443,
1268
+ "learning_rate": 7.93950730151908e-06,
1269
+ "loss": 0.4186,
1270
+ "step": 900
1271
+ },
1272
+ {
1273
+ "epoch": 5.552995391705069,
1274
+ "grad_norm": 0.3641802668571472,
1275
+ "learning_rate": 7.910381244575491e-06,
1276
+ "loss": 0.4146,
1277
+ "step": 905
1278
+ },
1279
+ {
1280
+ "epoch": 5.583717357910906,
1281
+ "grad_norm": 0.37418097257614136,
1282
+ "learning_rate": 7.881105049777902e-06,
1283
+ "loss": 0.4146,
1284
+ "step": 910
1285
+ },
1286
+ {
1287
+ "epoch": 5.614439324116743,
1288
+ "grad_norm": 0.3662942051887512,
1289
+ "learning_rate": 7.851680227397541e-06,
1290
+ "loss": 0.4181,
1291
+ "step": 915
1292
+ },
1293
+ {
1294
+ "epoch": 5.645161290322581,
1295
+ "grad_norm": 0.3564474284648895,
1296
+ "learning_rate": 7.82210829537289e-06,
1297
+ "loss": 0.4122,
1298
+ "step": 920
1299
+ },
1300
+ {
1301
+ "epoch": 5.675883256528418,
1302
+ "grad_norm": 0.3735935091972351,
1303
+ "learning_rate": 7.792390779231374e-06,
1304
+ "loss": 0.4152,
1305
+ "step": 925
1306
+ },
1307
+ {
1308
+ "epoch": 5.706605222734255,
1309
+ "grad_norm": 0.3896511197090149,
1310
+ "learning_rate": 7.762529212010675e-06,
1311
+ "loss": 0.4125,
1312
+ "step": 930
1313
+ },
1314
+ {
1315
+ "epoch": 5.7373271889400925,
1316
+ "grad_norm": 0.42632153630256653,
1317
+ "learning_rate": 7.732525134179625e-06,
1318
+ "loss": 0.4138,
1319
+ "step": 935
1320
+ },
1321
+ {
1322
+ "epoch": 5.768049155145929,
1323
+ "grad_norm": 0.3700067698955536,
1324
+ "learning_rate": 7.702380093558766e-06,
1325
+ "loss": 0.4128,
1326
+ "step": 940
1327
+ },
1328
+ {
1329
+ "epoch": 5.798771121351766,
1330
+ "grad_norm": 0.3713553547859192,
1331
+ "learning_rate": 7.672095645240479e-06,
1332
+ "loss": 0.4153,
1333
+ "step": 945
1334
+ },
1335
+ {
1336
+ "epoch": 5.829493087557603,
1337
+ "grad_norm": 0.49530503153800964,
1338
+ "learning_rate": 7.641673351508774e-06,
1339
+ "loss": 0.4159,
1340
+ "step": 950
1341
+ },
1342
+ {
1343
+ "epoch": 5.860215053763441,
1344
+ "grad_norm": 0.3351239562034607,
1345
+ "learning_rate": 7.6111147817586925e-06,
1346
+ "loss": 0.4181,
1347
+ "step": 955
1348
+ },
1349
+ {
1350
+ "epoch": 5.890937019969278,
1351
+ "grad_norm": 0.3583086133003235,
1352
+ "learning_rate": 7.580421512415349e-06,
1353
+ "loss": 0.4148,
1354
+ "step": 960
1355
+ },
1356
+ {
1357
+ "epoch": 5.921658986175116,
1358
+ "grad_norm": 0.3566780388355255,
1359
+ "learning_rate": 7.549595126852605e-06,
1360
+ "loss": 0.4133,
1361
+ "step": 965
1362
+ },
1363
+ {
1364
+ "epoch": 5.9523809523809526,
1365
+ "grad_norm": 0.3630661964416504,
1366
+ "learning_rate": 7.518637215311388e-06,
1367
+ "loss": 0.4151,
1368
+ "step": 970
1369
+ },
1370
+ {
1371
+ "epoch": 5.983102918586789,
1372
+ "grad_norm": 0.35128363966941833,
1373
+ "learning_rate": 7.487549374817662e-06,
1374
+ "loss": 0.4159,
1375
+ "step": 975
1376
+ },
1377
+ {
1378
+ "epoch": 6.012288786482335,
1379
+ "grad_norm": 0.442436158657074,
1380
+ "learning_rate": 7.456333209100032e-06,
1381
+ "loss": 0.4034,
1382
+ "step": 980
1383
+ },
1384
+ {
1385
+ "epoch": 6.043010752688172,
1386
+ "grad_norm": 0.41370487213134766,
1387
+ "learning_rate": 7.424990328507017e-06,
1388
+ "loss": 0.3851,
1389
+ "step": 985
1390
+ },
1391
+ {
1392
+ "epoch": 6.073732718894009,
1393
+ "grad_norm": 0.4333699941635132,
1394
+ "learning_rate": 7.393522349923981e-06,
1395
+ "loss": 0.3869,
1396
+ "step": 990
1397
+ },
1398
+ {
1399
+ "epoch": 6.104454685099847,
1400
+ "grad_norm": 0.4446549415588379,
1401
+ "learning_rate": 7.361930896689713e-06,
1402
+ "loss": 0.3836,
1403
+ "step": 995
1404
+ },
1405
+ {
1406
+ "epoch": 6.135176651305684,
1407
+ "grad_norm": 0.40873849391937256,
1408
+ "learning_rate": 7.330217598512696e-06,
1409
+ "loss": 0.3857,
1410
+ "step": 1000
1411
+ },
1412
+ {
1413
+ "epoch": 6.1658986175115205,
1414
+ "grad_norm": 0.4244365990161896,
1415
+ "learning_rate": 7.2983840913870215e-06,
1416
+ "loss": 0.3863,
1417
+ "step": 1005
1418
+ },
1419
+ {
1420
+ "epoch": 6.196620583717358,
1421
+ "grad_norm": 0.3845650851726532,
1422
+ "learning_rate": 7.266432017508008e-06,
1423
+ "loss": 0.3901,
1424
+ "step": 1010
1425
+ },
1426
+ {
1427
+ "epoch": 6.227342549923195,
1428
+ "grad_norm": 0.39868220686912537,
1429
+ "learning_rate": 7.234363025187474e-06,
1430
+ "loss": 0.3855,
1431
+ "step": 1015
1432
+ },
1433
+ {
1434
+ "epoch": 6.258064516129032,
1435
+ "grad_norm": 0.37892380356788635,
1436
+ "learning_rate": 7.202178768768711e-06,
1437
+ "loss": 0.3928,
1438
+ "step": 1020
1439
+ },
1440
+ {
1441
+ "epoch": 6.28878648233487,
1442
+ "grad_norm": 0.3923156261444092,
1443
+ "learning_rate": 7.169880908541136e-06,
1444
+ "loss": 0.3921,
1445
+ "step": 1025
1446
+ },
1447
+ {
1448
+ "epoch": 6.319508448540707,
1449
+ "grad_norm": 0.39880916476249695,
1450
+ "learning_rate": 7.137471110654656e-06,
1451
+ "loss": 0.3938,
1452
+ "step": 1030
1453
+ },
1454
+ {
1455
+ "epoch": 6.350230414746544,
1456
+ "grad_norm": 0.4066980481147766,
1457
+ "learning_rate": 7.104951047033697e-06,
1458
+ "loss": 0.3906,
1459
+ "step": 1035
1460
+ },
1461
+ {
1462
+ "epoch": 6.380952380952381,
1463
+ "grad_norm": 0.3751300573348999,
1464
+ "learning_rate": 7.0723223952909694e-06,
1465
+ "loss": 0.3909,
1466
+ "step": 1040
1467
+ },
1468
+ {
1469
+ "epoch": 6.411674347158218,
1470
+ "grad_norm": 0.3525156080722809,
1471
+ "learning_rate": 7.039586838640918e-06,
1472
+ "loss": 0.3894,
1473
+ "step": 1045
1474
+ },
1475
+ {
1476
+ "epoch": 6.442396313364055,
1477
+ "grad_norm": 0.36944258213043213,
1478
+ "learning_rate": 7.006746065812895e-06,
1479
+ "loss": 0.3909,
1480
+ "step": 1050
1481
+ },
1482
+ {
1483
+ "epoch": 6.473118279569892,
1484
+ "grad_norm": 0.3836762011051178,
1485
+ "learning_rate": 6.973801770964031e-06,
1486
+ "loss": 0.3896,
1487
+ "step": 1055
1488
+ },
1489
+ {
1490
+ "epoch": 6.50384024577573,
1491
+ "grad_norm": 0.41395968198776245,
1492
+ "learning_rate": 6.940755653591859e-06,
1493
+ "loss": 0.3889,
1494
+ "step": 1060
1495
+ },
1496
+ {
1497
+ "epoch": 6.534562211981567,
1498
+ "grad_norm": 0.42266151309013367,
1499
+ "learning_rate": 6.907609418446623e-06,
1500
+ "loss": 0.3924,
1501
+ "step": 1065
1502
+ },
1503
+ {
1504
+ "epoch": 6.565284178187404,
1505
+ "grad_norm": 0.38352274894714355,
1506
+ "learning_rate": 6.8743647754433485e-06,
1507
+ "loss": 0.3934,
1508
+ "step": 1070
1509
+ },
1510
+ {
1511
+ "epoch": 6.596006144393241,
1512
+ "grad_norm": 0.3761062026023865,
1513
+ "learning_rate": 6.841023439573623e-06,
1514
+ "loss": 0.3915,
1515
+ "step": 1075
1516
+ },
1517
+ {
1518
+ "epoch": 6.626728110599078,
1519
+ "grad_norm": 0.38670945167541504,
1520
+ "learning_rate": 6.807587130817134e-06,
1521
+ "loss": 0.3925,
1522
+ "step": 1080
1523
+ },
1524
+ {
1525
+ "epoch": 6.657450076804915,
1526
+ "grad_norm": 0.36626312136650085,
1527
+ "learning_rate": 6.774057574052932e-06,
1528
+ "loss": 0.3944,
1529
+ "step": 1085
1530
+ },
1531
+ {
1532
+ "epoch": 6.688172043010753,
1533
+ "grad_norm": 0.4045194685459137,
1534
+ "learning_rate": 6.740436498970453e-06,
1535
+ "loss": 0.3955,
1536
+ "step": 1090
1537
+ },
1538
+ {
1539
+ "epoch": 6.71889400921659,
1540
+ "grad_norm": 0.4138599932193756,
1541
+ "learning_rate": 6.706725639980294e-06,
1542
+ "loss": 0.3929,
1543
+ "step": 1095
1544
+ },
1545
+ {
1546
+ "epoch": 6.749615975422427,
1547
+ "grad_norm": 0.39506402611732483,
1548
+ "learning_rate": 6.6729267361247295e-06,
1549
+ "loss": 0.3883,
1550
+ "step": 1100
1551
+ },
1552
+ {
1553
+ "epoch": 6.7803379416282645,
1554
+ "grad_norm": 0.3903568387031555,
1555
+ "learning_rate": 6.639041530988009e-06,
1556
+ "loss": 0.3939,
1557
+ "step": 1105
1558
+ },
1559
+ {
1560
+ "epoch": 6.811059907834101,
1561
+ "grad_norm": 0.3678980767726898,
1562
+ "learning_rate": 6.605071772606404e-06,
1563
+ "loss": 0.394,
1564
+ "step": 1110
1565
+ },
1566
+ {
1567
+ "epoch": 6.841781874039938,
1568
+ "grad_norm": 0.35300132632255554,
1569
+ "learning_rate": 6.571019213378034e-06,
1570
+ "loss": 0.391,
1571
+ "step": 1115
1572
+ },
1573
+ {
1574
+ "epoch": 6.872503840245776,
1575
+ "grad_norm": 0.3788436949253082,
1576
+ "learning_rate": 6.536885609972467e-06,
1577
+ "loss": 0.397,
1578
+ "step": 1120
1579
+ },
1580
+ {
1581
+ "epoch": 6.903225806451613,
1582
+ "grad_norm": 0.38878560066223145,
1583
+ "learning_rate": 6.502672723240103e-06,
1584
+ "loss": 0.3969,
1585
+ "step": 1125
1586
+ },
1587
+ {
1588
+ "epoch": 6.93394777265745,
1589
+ "grad_norm": 0.4072780907154083,
1590
+ "learning_rate": 6.4683823181213224e-06,
1591
+ "loss": 0.3969,
1592
+ "step": 1130
1593
+ },
1594
+ {
1595
+ "epoch": 6.964669738863288,
1596
+ "grad_norm": 0.40496107935905457,
1597
+ "learning_rate": 6.434016163555452e-06,
1598
+ "loss": 0.3957,
1599
+ "step": 1135
1600
+ },
1601
+ {
1602
+ "epoch": 6.9953917050691246,
1603
+ "grad_norm": 0.3747064173221588,
1604
+ "learning_rate": 6.399576032389505e-06,
1605
+ "loss": 0.3984,
1606
+ "step": 1140
1607
+ },
1608
+ {
1609
+ "epoch": 7.024577572964669,
1610
+ "grad_norm": 0.5090351104736328,
1611
+ "learning_rate": 6.365063701286728e-06,
1612
+ "loss": 0.3714,
1613
+ "step": 1145
1614
+ },
1615
+ {
1616
+ "epoch": 7.055299539170507,
1617
+ "grad_norm": 0.42551228404045105,
1618
+ "learning_rate": 6.330480950634942e-06,
1619
+ "loss": 0.3673,
1620
+ "step": 1150
1621
+ },
1622
+ {
1623
+ "epoch": 7.086021505376344,
1624
+ "grad_norm": 0.4707318842411041,
1625
+ "learning_rate": 6.2958295644547026e-06,
1626
+ "loss": 0.3641,
1627
+ "step": 1155
1628
+ },
1629
+ {
1630
+ "epoch": 7.116743471582181,
1631
+ "grad_norm": 0.40848663449287415,
1632
+ "learning_rate": 6.261111330307272e-06,
1633
+ "loss": 0.3628,
1634
+ "step": 1160
1635
+ },
1636
+ {
1637
+ "epoch": 7.147465437788019,
1638
+ "grad_norm": 0.4382622539997101,
1639
+ "learning_rate": 6.22632803920239e-06,
1640
+ "loss": 0.3691,
1641
+ "step": 1165
1642
+ },
1643
+ {
1644
+ "epoch": 7.178187403993856,
1645
+ "grad_norm": 0.3866026699542999,
1646
+ "learning_rate": 6.191481485505898e-06,
1647
+ "loss": 0.3639,
1648
+ "step": 1170
1649
+ },
1650
+ {
1651
+ "epoch": 7.2089093701996925,
1652
+ "grad_norm": 0.4263141453266144,
1653
+ "learning_rate": 6.1565734668471614e-06,
1654
+ "loss": 0.3634,
1655
+ "step": 1175
1656
+ },
1657
+ {
1658
+ "epoch": 7.23963133640553,
1659
+ "grad_norm": 0.4050372242927551,
1660
+ "learning_rate": 6.121605784026339e-06,
1661
+ "loss": 0.3648,
1662
+ "step": 1180
1663
+ },
1664
+ {
1665
+ "epoch": 7.270353302611367,
1666
+ "grad_norm": 0.3879098892211914,
1667
+ "learning_rate": 6.086580240921486e-06,
1668
+ "loss": 0.3667,
1669
+ "step": 1185
1670
+ },
1671
+ {
1672
+ "epoch": 7.301075268817204,
1673
+ "grad_norm": 0.4055810868740082,
1674
+ "learning_rate": 6.051498644395496e-06,
1675
+ "loss": 0.3656,
1676
+ "step": 1190
1677
+ },
1678
+ {
1679
+ "epoch": 7.331797235023042,
1680
+ "grad_norm": 0.42201170325279236,
1681
+ "learning_rate": 6.01636280420289e-06,
1682
+ "loss": 0.3679,
1683
+ "step": 1195
1684
+ },
1685
+ {
1686
+ "epoch": 7.362519201228879,
1687
+ "grad_norm": 0.4164835214614868,
1688
+ "learning_rate": 5.981174532896459e-06,
1689
+ "loss": 0.367,
1690
+ "step": 1200
1691
+ },
1692
+ {
1693
+ "epoch": 7.393241167434716,
1694
+ "grad_norm": 0.39605438709259033,
1695
+ "learning_rate": 5.9459356457337556e-06,
1696
+ "loss": 0.3647,
1697
+ "step": 1205
1698
+ },
1699
+ {
1700
+ "epoch": 7.423963133640553,
1701
+ "grad_norm": 0.4393250644207001,
1702
+ "learning_rate": 5.910647960583458e-06,
1703
+ "loss": 0.3733,
1704
+ "step": 1210
1705
+ },
1706
+ {
1707
+ "epoch": 7.45468509984639,
1708
+ "grad_norm": 0.37553438544273376,
1709
+ "learning_rate": 5.875313297831579e-06,
1710
+ "loss": 0.37,
1711
+ "step": 1215
1712
+ },
1713
+ {
1714
+ "epoch": 7.485407066052227,
1715
+ "grad_norm": 0.3898600935935974,
1716
+ "learning_rate": 5.839933480287572e-06,
1717
+ "loss": 0.3678,
1718
+ "step": 1220
1719
+ },
1720
+ {
1721
+ "epoch": 7.516129032258064,
1722
+ "grad_norm": 0.4083476662635803,
1723
+ "learning_rate": 5.804510333090287e-06,
1724
+ "loss": 0.3665,
1725
+ "step": 1225
1726
+ },
1727
+ {
1728
+ "epoch": 7.546850998463902,
1729
+ "grad_norm": 0.40433645248413086,
1730
+ "learning_rate": 5.769045683613822e-06,
1731
+ "loss": 0.3715,
1732
+ "step": 1230
1733
+ },
1734
+ {
1735
+ "epoch": 7.577572964669739,
1736
+ "grad_norm": 0.4303235709667206,
1737
+ "learning_rate": 5.733541361373253e-06,
1738
+ "loss": 0.3711,
1739
+ "step": 1235
1740
+ },
1741
+ {
1742
+ "epoch": 7.6082949308755765,
1743
+ "grad_norm": 0.40306177735328674,
1744
+ "learning_rate": 5.697999197930259e-06,
1745
+ "loss": 0.3659,
1746
+ "step": 1240
1747
+ },
1748
+ {
1749
+ "epoch": 7.639016897081413,
1750
+ "grad_norm": 0.39787065982818604,
1751
+ "learning_rate": 5.662421026798624e-06,
1752
+ "loss": 0.3722,
1753
+ "step": 1245
1754
+ },
1755
+ {
1756
+ "epoch": 7.66973886328725,
1757
+ "grad_norm": 0.401962012052536,
1758
+ "learning_rate": 5.626808683349672e-06,
1759
+ "loss": 0.3691,
1760
+ "step": 1250
1761
+ },
1762
+ {
1763
+ "epoch": 7.700460829493087,
1764
+ "grad_norm": 0.38256722688674927,
1765
+ "learning_rate": 5.591164004717567e-06,
1766
+ "loss": 0.3694,
1767
+ "step": 1255
1768
+ },
1769
+ {
1770
+ "epoch": 7.731182795698925,
1771
+ "grad_norm": 0.4020300507545471,
1772
+ "learning_rate": 5.55548882970455e-06,
1773
+ "loss": 0.3728,
1774
+ "step": 1260
1775
+ },
1776
+ {
1777
+ "epoch": 7.761904761904762,
1778
+ "grad_norm": 0.41450026631355286,
1779
+ "learning_rate": 5.519784998686081e-06,
1780
+ "loss": 0.3673,
1781
+ "step": 1265
1782
+ },
1783
+ {
1784
+ "epoch": 7.792626728110599,
1785
+ "grad_norm": 0.36544522643089294,
1786
+ "learning_rate": 5.484054353515896e-06,
1787
+ "loss": 0.3729,
1788
+ "step": 1270
1789
+ },
1790
+ {
1791
+ "epoch": 7.8233486943164365,
1792
+ "grad_norm": 0.38962146639823914,
1793
+ "learning_rate": 5.448298737430992e-06,
1794
+ "loss": 0.3697,
1795
+ "step": 1275
1796
+ },
1797
+ {
1798
+ "epoch": 7.854070660522273,
1799
+ "grad_norm": 0.425886869430542,
1800
+ "learning_rate": 5.412519994956543e-06,
1801
+ "loss": 0.3733,
1802
+ "step": 1280
1803
+ },
1804
+ {
1805
+ "epoch": 7.88479262672811,
1806
+ "grad_norm": 0.3979520797729492,
1807
+ "learning_rate": 5.376719971810741e-06,
1808
+ "loss": 0.3734,
1809
+ "step": 1285
1810
+ },
1811
+ {
1812
+ "epoch": 7.915514592933948,
1813
+ "grad_norm": 0.38723668456077576,
1814
+ "learning_rate": 5.340900514809587e-06,
1815
+ "loss": 0.3726,
1816
+ "step": 1290
1817
+ },
1818
+ {
1819
+ "epoch": 7.946236559139785,
1820
+ "grad_norm": 0.37770572304725647,
1821
+ "learning_rate": 5.305063471771614e-06,
1822
+ "loss": 0.3699,
1823
+ "step": 1295
1824
+ },
1825
+ {
1826
+ "epoch": 7.976958525345622,
1827
+ "grad_norm": 0.398049533367157,
1828
+ "learning_rate": 5.26921069142257e-06,
1829
+ "loss": 0.3717,
1830
+ "step": 1300
1831
+ },
1832
+ {
1833
+ "epoch": 8.006144393241167,
1834
+ "grad_norm": 0.5838120579719543,
1835
+ "learning_rate": 5.233344023300037e-06,
1836
+ "loss": 0.3649,
1837
+ "step": 1305
1838
+ },
1839
+ {
1840
+ "epoch": 8.036866359447005,
1841
+ "grad_norm": 0.4888751208782196,
1842
+ "learning_rate": 5.197465317658036e-06,
1843
+ "loss": 0.3417,
1844
+ "step": 1310
1845
+ },
1846
+ {
1847
+ "epoch": 8.067588325652842,
1848
+ "grad_norm": 0.4426686465740204,
1849
+ "learning_rate": 5.161576425371554e-06,
1850
+ "loss": 0.3448,
1851
+ "step": 1315
1852
+ },
1853
+ {
1854
+ "epoch": 8.09831029185868,
1855
+ "grad_norm": 0.4328514635562897,
1856
+ "learning_rate": 5.125679197841088e-06,
1857
+ "loss": 0.3427,
1858
+ "step": 1320
1859
+ },
1860
+ {
1861
+ "epoch": 8.129032258064516,
1862
+ "grad_norm": 0.461224764585495,
1863
+ "learning_rate": 5.089775486897121e-06,
1864
+ "loss": 0.3411,
1865
+ "step": 1325
1866
+ },
1867
+ {
1868
+ "epoch": 8.159754224270353,
1869
+ "grad_norm": 0.41059058904647827,
1870
+ "learning_rate": 5.053867144704594e-06,
1871
+ "loss": 0.3432,
1872
+ "step": 1330
1873
+ },
1874
+ {
1875
+ "epoch": 8.19047619047619,
1876
+ "grad_norm": 0.4233262538909912,
1877
+ "learning_rate": 5.017956023667363e-06,
1878
+ "loss": 0.3428,
1879
+ "step": 1335
1880
+ },
1881
+ {
1882
+ "epoch": 8.221198156682028,
1883
+ "grad_norm": 0.44398781657218933,
1884
+ "learning_rate": 4.982043976332638e-06,
1885
+ "loss": 0.3396,
1886
+ "step": 1340
1887
+ },
1888
+ {
1889
+ "epoch": 8.251920122887865,
1890
+ "grad_norm": 0.43628108501434326,
1891
+ "learning_rate": 4.946132855295407e-06,
1892
+ "loss": 0.3432,
1893
+ "step": 1345
1894
+ },
1895
+ {
1896
+ "epoch": 8.282642089093702,
1897
+ "grad_norm": 0.45262426137924194,
1898
+ "learning_rate": 4.910224513102881e-06,
1899
+ "loss": 0.34,
1900
+ "step": 1350
1901
+ },
1902
+ {
1903
+ "epoch": 8.31336405529954,
1904
+ "grad_norm": 0.46370255947113037,
1905
+ "learning_rate": 4.8743208021589135e-06,
1906
+ "loss": 0.3404,
1907
+ "step": 1355
1908
+ },
1909
+ {
1910
+ "epoch": 8.344086021505376,
1911
+ "grad_norm": 0.40948814153671265,
1912
+ "learning_rate": 4.838423574628447e-06,
1913
+ "loss": 0.3431,
1914
+ "step": 1360
1915
+ },
1916
+ {
1917
+ "epoch": 8.374807987711213,
1918
+ "grad_norm": 0.4436282813549042,
1919
+ "learning_rate": 4.802534682341966e-06,
1920
+ "loss": 0.3446,
1921
+ "step": 1365
1922
+ },
1923
+ {
1924
+ "epoch": 8.40552995391705,
1925
+ "grad_norm": 0.4203520119190216,
1926
+ "learning_rate": 4.7666559766999635e-06,
1927
+ "loss": 0.3478,
1928
+ "step": 1370
1929
+ },
1930
+ {
1931
+ "epoch": 8.436251920122888,
1932
+ "grad_norm": 0.4091641306877136,
1933
+ "learning_rate": 4.730789308577432e-06,
1934
+ "loss": 0.3461,
1935
+ "step": 1375
1936
+ },
1937
+ {
1938
+ "epoch": 8.466973886328725,
1939
+ "grad_norm": 0.4320433735847473,
1940
+ "learning_rate": 4.694936528228387e-06,
1941
+ "loss": 0.3504,
1942
+ "step": 1380
1943
+ },
1944
+ {
1945
+ "epoch": 8.497695852534562,
1946
+ "grad_norm": 0.4243397116661072,
1947
+ "learning_rate": 4.659099485190414e-06,
1948
+ "loss": 0.3444,
1949
+ "step": 1385
1950
+ },
1951
+ {
1952
+ "epoch": 8.5284178187404,
1953
+ "grad_norm": 0.42783576250076294,
1954
+ "learning_rate": 4.6232800281892604e-06,
1955
+ "loss": 0.3398,
1956
+ "step": 1390
1957
+ },
1958
+ {
1959
+ "epoch": 8.559139784946236,
1960
+ "grad_norm": 0.395312637090683,
1961
+ "learning_rate": 4.587480005043458e-06,
1962
+ "loss": 0.3472,
1963
+ "step": 1395
1964
+ },
1965
+ {
1966
+ "epoch": 8.589861751152073,
1967
+ "grad_norm": 0.41875869035720825,
1968
+ "learning_rate": 4.551701262569009e-06,
1969
+ "loss": 0.3475,
1970
+ "step": 1400
1971
+ },
1972
+ {
1973
+ "epoch": 8.620583717357912,
1974
+ "grad_norm": 0.4307910203933716,
1975
+ "learning_rate": 4.515945646484105e-06,
1976
+ "loss": 0.3465,
1977
+ "step": 1405
1978
+ },
1979
+ {
1980
+ "epoch": 8.651305683563749,
1981
+ "grad_norm": 0.40852200984954834,
1982
+ "learning_rate": 4.480215001313919e-06,
1983
+ "loss": 0.3497,
1984
+ "step": 1410
1985
+ },
1986
+ {
1987
+ "epoch": 8.682027649769585,
1988
+ "grad_norm": 0.4345207214355469,
1989
+ "learning_rate": 4.444511170295451e-06,
1990
+ "loss": 0.3474,
1991
+ "step": 1415
1992
+ },
1993
+ {
1994
+ "epoch": 8.712749615975422,
1995
+ "grad_norm": 0.4096705913543701,
1996
+ "learning_rate": 4.408835995282434e-06,
1997
+ "loss": 0.3472,
1998
+ "step": 1420
1999
+ },
2000
+ {
2001
+ "epoch": 8.74347158218126,
2002
+ "grad_norm": 0.4314156770706177,
2003
+ "learning_rate": 4.373191316650328e-06,
2004
+ "loss": 0.3518,
2005
+ "step": 1425
2006
+ },
2007
+ {
2008
+ "epoch": 8.774193548387096,
2009
+ "grad_norm": 0.41832414269447327,
2010
+ "learning_rate": 4.3375789732013775e-06,
2011
+ "loss": 0.3498,
2012
+ "step": 1430
2013
+ },
2014
+ {
2015
+ "epoch": 8.804915514592935,
2016
+ "grad_norm": 0.42618289589881897,
2017
+ "learning_rate": 4.302000802069744e-06,
2018
+ "loss": 0.3486,
2019
+ "step": 1435
2020
+ },
2021
+ {
2022
+ "epoch": 8.835637480798772,
2023
+ "grad_norm": 0.43849977850914,
2024
+ "learning_rate": 4.2664586386267474e-06,
2025
+ "loss": 0.346,
2026
+ "step": 1440
2027
+ },
2028
+ {
2029
+ "epoch": 8.866359447004609,
2030
+ "grad_norm": 0.42157772183418274,
2031
+ "learning_rate": 4.230954316386179e-06,
2032
+ "loss": 0.3475,
2033
+ "step": 1445
2034
+ },
2035
+ {
2036
+ "epoch": 8.897081413210445,
2037
+ "grad_norm": 0.39600861072540283,
2038
+ "learning_rate": 4.195489666909714e-06,
2039
+ "loss": 0.3455,
2040
+ "step": 1450
2041
+ },
2042
+ {
2043
+ "epoch": 8.927803379416282,
2044
+ "grad_norm": 0.3980286419391632,
2045
+ "learning_rate": 4.160066519712428e-06,
2046
+ "loss": 0.3488,
2047
+ "step": 1455
2048
+ },
2049
+ {
2050
+ "epoch": 8.95852534562212,
2051
+ "grad_norm": 0.41449347138404846,
2052
+ "learning_rate": 4.1246867021684206e-06,
2053
+ "loss": 0.345,
2054
+ "step": 1460
2055
+ },
2056
+ {
2057
+ "epoch": 8.989247311827956,
2058
+ "grad_norm": 0.43595919013023376,
2059
+ "learning_rate": 4.089352039416543e-06,
2060
+ "loss": 0.3476,
2061
+ "step": 1465
2062
+ },
2063
+ {
2064
+ "epoch": 9.018433179723502,
2065
+ "grad_norm": 0.5251989364624023,
2066
+ "learning_rate": 4.054064354266244e-06,
2067
+ "loss": 0.3327,
2068
+ "step": 1470
2069
+ },
2070
+ {
2071
+ "epoch": 9.049155145929339,
2072
+ "grad_norm": 0.4793786108493805,
2073
+ "learning_rate": 4.018825467103542e-06,
2074
+ "loss": 0.318,
2075
+ "step": 1475
2076
+ },
2077
+ {
2078
+ "epoch": 9.079877112135177,
2079
+ "grad_norm": 0.4722115993499756,
2080
+ "learning_rate": 3.983637195797111e-06,
2081
+ "loss": 0.3217,
2082
+ "step": 1480
2083
+ },
2084
+ {
2085
+ "epoch": 9.110599078341014,
2086
+ "grad_norm": 0.4697054326534271,
2087
+ "learning_rate": 3.948501355604507e-06,
2088
+ "loss": 0.3184,
2089
+ "step": 1485
2090
+ },
2091
+ {
2092
+ "epoch": 9.141321044546851,
2093
+ "grad_norm": 0.4698534607887268,
2094
+ "learning_rate": 3.9134197590785164e-06,
2095
+ "loss": 0.3193,
2096
+ "step": 1490
2097
+ },
2098
+ {
2099
+ "epoch": 9.172043010752688,
2100
+ "grad_norm": 0.4833962917327881,
2101
+ "learning_rate": 3.878394215973663e-06,
2102
+ "loss": 0.3243,
2103
+ "step": 1495
2104
+ },
2105
+ {
2106
+ "epoch": 9.202764976958525,
2107
+ "grad_norm": 0.47097915410995483,
2108
+ "learning_rate": 3.843426533152841e-06,
2109
+ "loss": 0.3218,
2110
+ "step": 1500
2111
+ },
2112
+ {
2113
+ "epoch": 9.233486943164362,
2114
+ "grad_norm": 0.4613553285598755,
2115
+ "learning_rate": 3.808518514494105e-06,
2116
+ "loss": 0.3191,
2117
+ "step": 1505
2118
+ },
2119
+ {
2120
+ "epoch": 9.2642089093702,
2121
+ "grad_norm": 0.4618718922138214,
2122
+ "learning_rate": 3.773671960797613e-06,
2123
+ "loss": 0.3219,
2124
+ "step": 1510
2125
+ },
2126
+ {
2127
+ "epoch": 9.294930875576037,
2128
+ "grad_norm": 0.45279550552368164,
2129
+ "learning_rate": 3.7388886696927317e-06,
2130
+ "loss": 0.3235,
2131
+ "step": 1515
2132
+ },
2133
+ {
2134
+ "epoch": 9.325652841781874,
2135
+ "grad_norm": 0.4596066176891327,
2136
+ "learning_rate": 3.704170435545299e-06,
2137
+ "loss": 0.3183,
2138
+ "step": 1520
2139
+ },
2140
+ {
2141
+ "epoch": 9.356374807987711,
2142
+ "grad_norm": 0.4353365898132324,
2143
+ "learning_rate": 3.6695190493650608e-06,
2144
+ "loss": 0.3204,
2145
+ "step": 1525
2146
+ },
2147
+ {
2148
+ "epoch": 9.387096774193548,
2149
+ "grad_norm": 0.444594144821167,
2150
+ "learning_rate": 3.634936298713274e-06,
2151
+ "loss": 0.3225,
2152
+ "step": 1530
2153
+ },
2154
+ {
2155
+ "epoch": 9.417818740399385,
2156
+ "grad_norm": 0.43966248631477356,
2157
+ "learning_rate": 3.6004239676104957e-06,
2158
+ "loss": 0.3236,
2159
+ "step": 1535
2160
+ },
2161
+ {
2162
+ "epoch": 9.448540706605222,
2163
+ "grad_norm": 0.4758555591106415,
2164
+ "learning_rate": 3.5659838364445505e-06,
2165
+ "loss": 0.3219,
2166
+ "step": 1540
2167
+ },
2168
+ {
2169
+ "epoch": 9.47926267281106,
2170
+ "grad_norm": 0.4469148814678192,
2171
+ "learning_rate": 3.5316176818786797e-06,
2172
+ "loss": 0.324,
2173
+ "step": 1545
2174
+ },
2175
+ {
2176
+ "epoch": 9.509984639016897,
2177
+ "grad_norm": 0.4485386908054352,
2178
+ "learning_rate": 3.497327276759899e-06,
2179
+ "loss": 0.3238,
2180
+ "step": 1550
2181
+ },
2182
+ {
2183
+ "epoch": 9.540706605222734,
2184
+ "grad_norm": 0.44222620129585266,
2185
+ "learning_rate": 3.463114390027533e-06,
2186
+ "loss": 0.3205,
2187
+ "step": 1555
2188
+ },
2189
+ {
2190
+ "epoch": 9.571428571428571,
2191
+ "grad_norm": 0.45242762565612793,
2192
+ "learning_rate": 3.4289807866219683e-06,
2193
+ "loss": 0.3222,
2194
+ "step": 1560
2195
+ },
2196
+ {
2197
+ "epoch": 9.602150537634408,
2198
+ "grad_norm": 0.44047296047210693,
2199
+ "learning_rate": 3.394928227393598e-06,
2200
+ "loss": 0.3277,
2201
+ "step": 1565
2202
+ },
2203
+ {
2204
+ "epoch": 9.632872503840245,
2205
+ "grad_norm": 0.44352057576179504,
2206
+ "learning_rate": 3.3609584690119924e-06,
2207
+ "loss": 0.3231,
2208
+ "step": 1570
2209
+ },
2210
+ {
2211
+ "epoch": 9.663594470046084,
2212
+ "grad_norm": 0.44703468680381775,
2213
+ "learning_rate": 3.3270732638752713e-06,
2214
+ "loss": 0.3242,
2215
+ "step": 1575
2216
+ },
2217
+ {
2218
+ "epoch": 9.69431643625192,
2219
+ "grad_norm": 0.46220463514328003,
2220
+ "learning_rate": 3.293274360019707e-06,
2221
+ "loss": 0.3256,
2222
+ "step": 1580
2223
+ },
2224
+ {
2225
+ "epoch": 9.725038402457757,
2226
+ "grad_norm": 0.4843495786190033,
2227
+ "learning_rate": 3.259563501029548e-06,
2228
+ "loss": 0.3279,
2229
+ "step": 1585
2230
+ },
2231
+ {
2232
+ "epoch": 9.755760368663594,
2233
+ "grad_norm": 0.4653911292552948,
2234
+ "learning_rate": 3.2259424259470705e-06,
2235
+ "loss": 0.3233,
2236
+ "step": 1590
2237
+ },
2238
+ {
2239
+ "epoch": 9.786482334869431,
2240
+ "grad_norm": 0.45650723576545715,
2241
+ "learning_rate": 3.1924128691828678e-06,
2242
+ "loss": 0.324,
2243
+ "step": 1595
2244
+ },
2245
+ {
2246
+ "epoch": 9.817204301075268,
2247
+ "grad_norm": 0.5246464014053345,
2248
+ "learning_rate": 3.158976560426379e-06,
2249
+ "loss": 0.3286,
2250
+ "step": 1600
2251
+ }
2252
+ ],
2253
+ "logging_steps": 5,
2254
+ "max_steps": 2430,
2255
+ "num_input_tokens_seen": 0,
2256
+ "num_train_epochs": 15,
2257
+ "save_steps": 400,
2258
+ "stateful_callbacks": {
2259
+ "TrainerControl": {
2260
+ "args": {
2261
+ "should_epoch_stop": false,
2262
+ "should_evaluate": false,
2263
+ "should_log": false,
2264
+ "should_save": true,
2265
+ "should_training_stop": false
2266
+ },
2267
+ "attributes": {}
2268
+ }
2269
+ },
2270
+ "total_flos": 2.4548687991848042e+19,
2271
+ "train_batch_size": 1,
2272
+ "trial_name": null,
2273
+ "trial_params": null
2274
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7fc3a06490eacaa5c07f40008683352e150e12cb8622c53832fde4d1fc850a5c
3
+ size 7544
vocab.json ADDED
The diff for this file is too large to render. See raw diff